1
Fork 0

librustc: Remove all uses of ~str from librustc.

This commit is contained in:
Patrick Walton 2014-05-09 18:45:36 -07:00
parent e8053b9a7f
commit 6559a3675e
83 changed files with 2014 additions and 1439 deletions

View file

@ -129,12 +129,12 @@ impl<'a> Archive<'a> {
} }
/// Lists all files in an archive /// Lists all files in an archive
pub fn files(&self) -> Vec<~str> { pub fn files(&self) -> Vec<StrBuf> {
let output = run_ar(self.sess, "t", None, [&self.dst]); let output = run_ar(self.sess, "t", None, [&self.dst]);
let output = str::from_utf8(output.output.as_slice()).unwrap(); let output = str::from_utf8(output.output.as_slice()).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of // use lines_any because windows delimits output with `\r\n` instead of
// just `\n` // just `\n`
output.lines_any().map(|s| s.to_owned()).collect() output.lines_any().map(|s| s.to_strbuf()).collect()
} }
fn add_archive(&mut self, archive: &Path, name: &str, fn add_archive(&mut self, archive: &Path, name: &str,

View file

@ -13,56 +13,57 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name; use metadata::loader::meta_section_name;
use syntax::abi; use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.contains("thumb") { let cc_args = if target_triple.as_slice().contains("thumb") {
vec!("-mthumb".to_owned()) vec!("-mthumb".to_strbuf())
} else { } else {
vec!("-marm".to_owned()) vec!("-marm".to_strbuf())
}; };
return target_strs::t { return target_strs::t {
module_asm: "".to_owned(), module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(), meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os { data_layout: match target_os {
abi::OsMacos => { abi::OsMacos => {
"e-p:32:32:32".to_owned() + "e-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsWin32 => { abi::OsWin32 => {
"e-p:32:32:32".to_owned() + "e-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsLinux => { abi::OsLinux => {
"e-p:32:32:32".to_owned() + "e-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsAndroid => { abi::OsAndroid => {
"e-p:32:32:32".to_owned() + "e-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsFreebsd => { abi::OsFreebsd => {
"e-p:32:32:32".to_owned() + "e-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
}, },

View file

@ -54,15 +54,15 @@ pub enum OutputType {
OutputTypeExe, OutputTypeExe,
} }
pub fn llvm_err(sess: &Session, msg: ~str) -> ! { pub fn llvm_err(sess: &Session, msg: StrBuf) -> ! {
unsafe { unsafe {
let cstr = llvm::LLVMRustGetLastError(); let cstr = llvm::LLVMRustGetLastError();
if cstr == ptr::null() { if cstr == ptr::null() {
sess.fatal(msg); sess.fatal(msg.as_slice());
} else { } else {
let err = CString::new(cstr, true); let err = CString::new(cstr, true);
let err = str::from_utf8_lossy(err.as_bytes()); let err = str::from_utf8_lossy(err.as_bytes());
sess.fatal(msg + ": " + err.as_slice()); sess.fatal((msg.as_slice() + ": " + err.as_slice()));
} }
} }
} }
@ -79,7 +79,7 @@ pub fn WriteOutputFile(
let result = llvm::LLVMRustWriteOutputFile( let result = llvm::LLVMRustWriteOutputFile(
target, pm, m, output, file_type); target, pm, m, output, file_type);
if !result { if !result {
llvm_err(sess, "could not write output".to_owned()); llvm_err(sess, "could not write output".to_strbuf());
} }
}) })
} }
@ -115,7 +115,7 @@ pub mod write {
fn target_feature<'a>(sess: &'a Session) -> &'a str { fn target_feature<'a>(sess: &'a Session) -> &'a str {
match sess.targ_cfg.os { match sess.targ_cfg.os {
abi::OsAndroid => { abi::OsAndroid => {
if "" == sess.opts.cg.target_feature { if "" == sess.opts.cg.target_feature.as_slice() {
"+v7" "+v7"
} else { } else {
sess.opts.cg.target_feature.as_slice() sess.opts.cg.target_feature.as_slice()
@ -173,8 +173,12 @@ pub mod write {
} }
}; };
let tm = sess.targ_cfg.target_strs.target_triple.with_c_str(|t| { let tm = sess.targ_cfg
sess.opts.cg.target_cpu.with_c_str(|cpu| { .target_strs
.target_triple
.as_slice()
.with_c_str(|t| {
sess.opts.cg.target_cpu.as_slice().with_c_str(|cpu| {
target_feature(sess).with_c_str(|features| { target_feature(sess).with_c_str(|features| {
llvm::LLVMRustCreateTargetMachine( llvm::LLVMRustCreateTargetMachine(
t, cpu, features, t, cpu, features,
@ -201,7 +205,7 @@ pub mod write {
// If we're verifying or linting, add them to the function pass // If we're verifying or linting, add them to the function pass
// manager. // manager.
let addpass = |pass: &str| { let addpass = |pass: &str| {
pass.with_c_str(|s| llvm::LLVMRustAddPass(fpm, s)) pass.as_slice().with_c_str(|s| llvm::LLVMRustAddPass(fpm, s))
}; };
if !sess.no_verify() { assert!(addpass("verify")); } if !sess.no_verify() { assert!(addpass("verify")); }
@ -212,7 +216,7 @@ pub mod write {
} }
for pass in sess.opts.cg.passes.iter() { for pass in sess.opts.cg.passes.iter() {
pass.with_c_str(|s| { pass.as_slice().with_c_str(|s| {
if !llvm::LLVMRustAddPass(mpm, s) { if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(format!("unknown pass {}, ignoring", *pass)); sess.warn(format!("unknown pass {}, ignoring", *pass));
} }
@ -355,7 +359,7 @@ pub mod write {
assembly.as_str().unwrap().to_owned()]; assembly.as_str().unwrap().to_owned()];
debug!("{} '{}'", cc, args.connect("' '")); debug!("{} '{}'", cc, args.connect("' '"));
match Process::output(cc, args) { match Process::output(cc.as_slice(), args) {
Ok(prog) => { Ok(prog) => {
if !prog.status.success() { if !prog.status.success() {
sess.err(format!("linking with `{}` failed: {}", cc, prog.status)); sess.err(format!("linking with `{}` failed: {}", cc, prog.status));
@ -400,7 +404,7 @@ pub mod write {
if sess.print_llvm_passes() { add("-debug-pass=Structure"); } if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
for arg in sess.opts.cg.llvm_args.iter() { for arg in sess.opts.cg.llvm_args.iter() {
add(*arg); add((*arg).as_slice());
} }
} }
@ -527,19 +531,20 @@ pub fn find_crate_id(attrs: &[ast::Attribute], out_filestem: &str) -> CrateId {
match attr::find_crateid(attrs) { match attr::find_crateid(attrs) {
None => from_str(out_filestem).unwrap_or_else(|| { None => from_str(out_filestem).unwrap_or_else(|| {
let mut s = out_filestem.chars().filter(|c| c.is_XID_continue()); let mut s = out_filestem.chars().filter(|c| c.is_XID_continue());
from_str(s.collect::<~str>()).or(from_str("rust-out")).unwrap() from_str(s.collect::<StrBuf>()
.to_owned()).or(from_str("rust-out")).unwrap()
}), }),
Some(s) => s, Some(s) => s,
} }
} }
pub fn crate_id_hash(crate_id: &CrateId) -> ~str { pub fn crate_id_hash(crate_id: &CrateId) -> StrBuf {
// This calculates CMH as defined above. Note that we don't use the path of // This calculates CMH as defined above. Note that we don't use the path of
// the crate id in the hash because lookups are only done by (name/vers), // the crate id in the hash because lookups are only done by (name/vers),
// not by path. // not by path.
let mut s = Sha256::new(); let mut s = Sha256::new();
s.input_str(crate_id.short_name_with_version().as_slice()); s.input_str(crate_id.short_name_with_version().as_slice());
truncated_hash_result(&mut s).slice_to(8).to_owned() truncated_hash_result(&mut s).as_slice().slice_to(8).to_strbuf()
} }
pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta { pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {
@ -551,10 +556,10 @@ pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {
return r; return r;
} }
fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str { fn truncated_hash_result(symbol_hasher: &mut Sha256) -> StrBuf {
let output = symbol_hasher.result_bytes(); let output = symbol_hasher.result_bytes();
// 64 bits should be enough to avoid collisions. // 64 bits should be enough to avoid collisions.
output.slice_to(8).to_hex() output.slice_to(8).to_hex().to_strbuf()
} }
@ -563,7 +568,7 @@ fn symbol_hash(tcx: &ty::ctxt,
symbol_hasher: &mut Sha256, symbol_hasher: &mut Sha256,
t: ty::t, t: ty::t,
link_meta: &LinkMeta) link_meta: &LinkMeta)
-> ~str { -> StrBuf {
// NB: do *not* use abbrevs here as we want the symbol names // NB: do *not* use abbrevs here as we want the symbol names
// to be independent of one another in the crate. // to be independent of one another in the crate.
@ -572,16 +577,16 @@ fn symbol_hash(tcx: &ty::ctxt,
symbol_hasher.input_str("-"); symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str()); symbol_hasher.input_str(link_meta.crate_hash.as_str());
symbol_hasher.input_str("-"); symbol_hasher.input_str("-");
symbol_hasher.input_str(encoder::encoded_ty(tcx, t)); symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_slice());
// Prefix with 'h' so that it never blends into adjacent digits // Prefix with 'h' so that it never blends into adjacent digits
let mut hash = StrBuf::from_str("h"); let mut hash = StrBuf::from_str("h");
hash.push_str(truncated_hash_result(symbol_hasher)); hash.push_str(truncated_hash_result(symbol_hasher).as_slice());
hash.into_owned() hash
} }
fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str { fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> StrBuf {
match ccx.type_hashcodes.borrow().find(&t) { match ccx.type_hashcodes.borrow().find(&t) {
Some(h) => return h.to_str(), Some(h) => return h.to_strbuf(),
None => {} None => {}
} }
@ -595,7 +600,7 @@ fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
// Name sanitation. LLVM will happily accept identifiers with weird names, but // Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't! // gas doesn't!
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $ // gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
pub fn sanitize(s: &str) -> ~str { pub fn sanitize(s: &str) -> StrBuf {
let mut result = StrBuf::new(); let mut result = StrBuf::new();
for c in s.chars() { for c in s.chars() {
match c { match c {
@ -630,11 +635,10 @@ pub fn sanitize(s: &str) -> ~str {
} }
// Underscore-qualify anything that didn't start as an ident. // Underscore-qualify anything that didn't start as an ident.
let result = result.into_owned();
if result.len() > 0u && if result.len() > 0u &&
result[0] != '_' as u8 && result.as_slice()[0] != '_' as u8 &&
! char::is_XID_start(result[0] as char) { ! char::is_XID_start(result.as_slice()[0] as char) {
return "_".to_owned() + result; return ("_" + result.as_slice()).to_strbuf();
} }
return result; return result;
@ -642,7 +646,7 @@ pub fn sanitize(s: &str) -> ~str {
pub fn mangle<PI: Iterator<PathElem>>(mut path: PI, pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
hash: Option<&str>, hash: Option<&str>,
vers: Option<&str>) -> ~str { vers: Option<&str>) -> StrBuf {
// Follow C++ namespace-mangling style, see // Follow C++ namespace-mangling style, see
// http://en.wikipedia.org/wiki/Name_mangling for more info. // http://en.wikipedia.org/wiki/Name_mangling for more info.
// //
@ -679,10 +683,10 @@ pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
} }
n.push_char('E'); // End name-sequence. n.push_char('E'); // End name-sequence.
n.into_owned() n
} }
pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> ~str { pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> StrBuf {
// The version will get mangled to have a leading '_', but it makes more // The version will get mangled to have a leading '_', but it makes more
// sense to lead with a 'v' b/c this is a version... // sense to lead with a 'v' b/c this is a version...
let vers = if vers.len() > 0 && !char::is_XID_start(vers.char_at(0)) { let vers = if vers.len() > 0 && !char::is_XID_start(vers.char_at(0)) {
@ -695,8 +699,8 @@ pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> ~str {
} }
pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems, pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
t: ty::t, id: ast::NodeId) -> ~str { t: ty::t, id: ast::NodeId) -> StrBuf {
let mut hash = StrBuf::from_owned_str(get_symbol_hash(ccx, t)); let mut hash = get_symbol_hash(ccx, t);
// Paths can be completely identical for different nodes, // Paths can be completely identical for different nodes,
// e.g. `fn foo() { { fn a() {} } { fn a() {} } }`, so we // e.g. `fn foo() { { fn a() {} } { fn a() {} } }`, so we
@ -723,25 +727,28 @@ pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext, pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext,
t: ty::t, t: ty::t,
name: &str) -> ~str { name: &str) -> StrBuf {
let s = ppaux::ty_to_str(ccx.tcx(), t); let s = ppaux::ty_to_str(ccx.tcx(), t);
let path = [PathName(token::intern(s)), let path = [PathName(token::intern(s.as_slice())),
gensym_name(name)]; gensym_name(name)];
let hash = get_symbol_hash(ccx, t); let hash = get_symbol_hash(ccx, t);
mangle(ast_map::Values(path.iter()), Some(hash.as_slice()), None) mangle(ast_map::Values(path.iter()), Some(hash.as_slice()), None)
} }
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> ~str { pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> StrBuf {
mangle(path.chain(Some(gensym_name(flav)).move_iter()), None, None) mangle(path.chain(Some(gensym_name(flav)).move_iter()), None, None)
} }
pub fn output_lib_filename(id: &CrateId) -> ~str { pub fn output_lib_filename(id: &CrateId) -> StrBuf {
format!("{}-{}-{}", id.name, crate_id_hash(id), id.version_or_default()) format_strbuf!("{}-{}-{}",
id.name,
crate_id_hash(id),
id.version_or_default())
} }
pub fn get_cc_prog(sess: &Session) -> ~str { pub fn get_cc_prog(sess: &Session) -> StrBuf {
match sess.opts.cg.linker { match sess.opts.cg.linker {
Some(ref linker) => return linker.to_owned(), Some(ref linker) => return linker.to_strbuf(),
None => {} None => {}
} }
@ -750,23 +757,23 @@ pub fn get_cc_prog(sess: &Session) -> ~str {
// instead of hard-coded gcc. // instead of hard-coded gcc.
// For win32, there is no cc command, so we add a condition to make it use gcc. // For win32, there is no cc command, so we add a condition to make it use gcc.
match sess.targ_cfg.os { match sess.targ_cfg.os {
abi::OsWin32 => return "gcc".to_owned(), abi::OsWin32 => return "gcc".to_strbuf(),
_ => {}, _ => {},
} }
get_system_tool(sess, "cc") get_system_tool(sess, "cc")
} }
pub fn get_ar_prog(sess: &Session) -> ~str { pub fn get_ar_prog(sess: &Session) -> StrBuf {
match sess.opts.cg.ar { match sess.opts.cg.ar {
Some(ref ar) => return ar.to_owned(), Some(ref ar) => return (*ar).clone(),
None => {} None => {}
} }
get_system_tool(sess, "ar") get_system_tool(sess, "ar")
} }
fn get_system_tool(sess: &Session, tool: &str) -> ~str { fn get_system_tool(sess: &Session, tool: &str) -> StrBuf {
match sess.targ_cfg.os { match sess.targ_cfg.os {
abi::OsAndroid => match sess.opts.cg.android_cross_path { abi::OsAndroid => match sess.opts.cg.android_cross_path {
Some(ref path) => { Some(ref path) => {
@ -774,14 +781,16 @@ fn get_system_tool(sess: &Session, tool: &str) -> ~str {
"cc" => "gcc", "cc" => "gcc",
_ => tool _ => tool
}; };
format!("{}/bin/arm-linux-androideabi-{}", *path, tool_str) format_strbuf!("{}/bin/arm-linux-androideabi-{}",
*path,
tool_str)
} }
None => { None => {
sess.fatal(format!("need Android NDK path for the '{}' tool \ sess.fatal(format!("need Android NDK path for the '{}' tool \
(-C android-cross-path)", tool)) (-C android-cross-path)", tool))
} }
}, },
_ => tool.to_owned(), _ => tool.to_strbuf(),
} }
} }
@ -1022,7 +1031,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
continue continue
} }
}; };
a.add_rlib(&p, name, sess.lto()).unwrap(); a.add_rlib(&p, name.as_slice(), sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
for &(kind, ref lib) in native_libs.iter() { for &(kind, ref lib) in native_libs.iter() {
let name = match kind { let name = match kind {
@ -1057,7 +1066,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
// Invoke the system linker // Invoke the system linker
debug!("{} {}", cc_prog, cc_args.connect(" ")); debug!("{} {}", cc_prog, cc_args.connect(" "));
let prog = time(sess.time_passes(), "running linker", (), |()| let prog = time(sess.time_passes(), "running linker", (), |()|
Process::output(cc_prog, cc_args.as_slice())); Process::output(cc_prog.as_slice(),
cc_args.iter()
.map(|x| (*x).to_owned())
.collect::<Vec<_>>()
.as_slice()));
match prog { match prog {
Ok(prog) => { Ok(prog) => {
if !prog.status.success() { if !prog.status.success() {
@ -1096,20 +1109,20 @@ fn link_args(sess: &Session,
tmpdir: &Path, tmpdir: &Path,
trans: &CrateTranslation, trans: &CrateTranslation,
obj_filename: &Path, obj_filename: &Path,
out_filename: &Path) -> Vec<~str> { out_filename: &Path) -> Vec<StrBuf> {
// The default library location, we need this to find the runtime. // The default library location, we need this to find the runtime.
// The location of crates will be determined as needed. // The location of crates will be determined as needed.
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
let lib_path = sess.target_filesearch().get_lib_path(); let lib_path = sess.target_filesearch().get_lib_path();
let stage: ~str = "-L".to_owned() + lib_path.as_str().unwrap(); let stage = ("-L".to_owned() + lib_path.as_str().unwrap()).to_strbuf();
let mut args = vec!(stage); let mut args = vec!(stage);
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
args.push_all([ args.push_all([
"-o".to_owned(), out_filename.as_str().unwrap().to_owned(), "-o".to_strbuf(), out_filename.as_str().unwrap().to_strbuf(),
obj_filename.as_str().unwrap().to_owned()]); obj_filename.as_str().unwrap().to_strbuf()]);
// Stack growth requires statically linking a __morestack function. Note // Stack growth requires statically linking a __morestack function. Note
// that this is listed *before* all other libraries, even though it may be // that this is listed *before* all other libraries, even though it may be
@ -1126,14 +1139,14 @@ fn link_args(sess: &Session,
// line, but inserting this farther to the left makes the // line, but inserting this farther to the left makes the
// "rust_stack_exhausted" symbol an outstanding undefined symbol, which // "rust_stack_exhausted" symbol an outstanding undefined symbol, which
// flags libstd as a required library (or whatever provides the symbol). // flags libstd as a required library (or whatever provides the symbol).
args.push("-lmorestack".to_owned()); args.push("-lmorestack".to_strbuf());
// When linking a dynamic library, we put the metadata into a section of the // When linking a dynamic library, we put the metadata into a section of the
// executable. This metadata is in a separate object file from the main // executable. This metadata is in a separate object file from the main
// object file, so we link that in here. // object file, so we link that in here.
if dylib { if dylib {
let metadata = obj_filename.with_extension("metadata.o"); let metadata = obj_filename.with_extension("metadata.o");
args.push(metadata.as_str().unwrap().to_owned()); args.push(metadata.as_str().unwrap().to_strbuf());
} }
// We want to prevent the compiler from accidentally leaking in any system // We want to prevent the compiler from accidentally leaking in any system
@ -1144,7 +1157,7 @@ fn link_args(sess: &Session,
// //
// FIXME(#11937) we should invoke the system linker directly // FIXME(#11937) we should invoke the system linker directly
if sess.targ_cfg.os != abi::OsWin32 { if sess.targ_cfg.os != abi::OsWin32 {
args.push("-nodefaultlibs".to_owned()); args.push("-nodefaultlibs".to_strbuf());
} }
// If we're building a dylib, we don't use --gc-sections because LLVM has // If we're building a dylib, we don't use --gc-sections because LLVM has
@ -1152,20 +1165,20 @@ fn link_args(sess: &Session,
// metadata. If we're building an executable, however, --gc-sections drops // metadata. If we're building an executable, however, --gc-sections drops
// the size of hello world from 1.8MB to 597K, a 67% reduction. // the size of hello world from 1.8MB to 597K, a 67% reduction.
if !dylib && sess.targ_cfg.os != abi::OsMacos { if !dylib && sess.targ_cfg.os != abi::OsMacos {
args.push("-Wl,--gc-sections".to_owned()); args.push("-Wl,--gc-sections".to_strbuf());
} }
if sess.targ_cfg.os == abi::OsLinux { if sess.targ_cfg.os == abi::OsLinux {
// GNU-style linkers will use this to omit linking to libraries which // GNU-style linkers will use this to omit linking to libraries which
// don't actually fulfill any relocations, but only for libraries which // don't actually fulfill any relocations, but only for libraries which
// follow this flag. Thus, use it before specifying libraries to link to. // follow this flag. Thus, use it before specifying libraries to link to.
args.push("-Wl,--as-needed".to_owned()); args.push("-Wl,--as-needed".to_strbuf());
// GNU-style linkers support optimization with -O. GNU ld doesn't need a // GNU-style linkers support optimization with -O. GNU ld doesn't need a
// numeric argument, but other linkers do. // numeric argument, but other linkers do.
if sess.opts.optimize == config::Default || if sess.opts.optimize == config::Default ||
sess.opts.optimize == config::Aggressive { sess.opts.optimize == config::Aggressive {
args.push("-Wl,-O1".to_owned()); args.push("-Wl,-O1".to_strbuf());
} }
} else if sess.targ_cfg.os == abi::OsMacos { } else if sess.targ_cfg.os == abi::OsMacos {
// The dead_strip option to the linker specifies that functions and data // The dead_strip option to the linker specifies that functions and data
@ -1178,14 +1191,14 @@ fn link_args(sess: &Session,
// won't get much benefit from dylibs because LLVM will have already // won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact // stripped away as much as it could. This has not been seen to impact
// link times negatively. // link times negatively.
args.push("-Wl,-dead_strip".to_owned()); args.push("-Wl,-dead_strip".to_strbuf());
} }
if sess.targ_cfg.os == abi::OsWin32 { if sess.targ_cfg.os == abi::OsWin32 {
// Make sure that we link to the dynamic libgcc, otherwise cross-module // Make sure that we link to the dynamic libgcc, otherwise cross-module
// DWARF stack unwinding will not work. // DWARF stack unwinding will not work.
// This behavior may be overridden by --link-args "-static-libgcc" // This behavior may be overridden by --link-args "-static-libgcc"
args.push("-shared-libgcc".to_owned()); args.push("-shared-libgcc".to_strbuf());
// And here, we see obscure linker flags #45. On windows, it has been // And here, we see obscure linker flags #45. On windows, it has been
// found to be necessary to have this flag to compile liblibc. // found to be necessary to have this flag to compile liblibc.
@ -1212,13 +1225,13 @@ fn link_args(sess: &Session,
// //
// [1] - https://sourceware.org/bugzilla/show_bug.cgi?id=13130 // [1] - https://sourceware.org/bugzilla/show_bug.cgi?id=13130
// [2] - https://code.google.com/p/go/issues/detail?id=2139 // [2] - https://code.google.com/p/go/issues/detail?id=2139
args.push("-Wl,--enable-long-section-names".to_owned()); args.push("-Wl,--enable-long-section-names".to_strbuf());
} }
if sess.targ_cfg.os == abi::OsAndroid { if sess.targ_cfg.os == abi::OsAndroid {
// Many of the symbols defined in compiler-rt are also defined in libgcc. // Many of the symbols defined in compiler-rt are also defined in libgcc.
// Android linker doesn't like that by default. // Android linker doesn't like that by default.
args.push("-Wl,--allow-multiple-definition".to_owned()); args.push("-Wl,--allow-multiple-definition".to_strbuf());
} }
// Take careful note of the ordering of the arguments we pass to the linker // Take careful note of the ordering of the arguments we pass to the linker
@ -1263,22 +1276,23 @@ fn link_args(sess: &Session,
if dylib { if dylib {
// On mac we need to tell the linker to let this library be rpathed // On mac we need to tell the linker to let this library be rpathed
if sess.targ_cfg.os == abi::OsMacos { if sess.targ_cfg.os == abi::OsMacos {
args.push("-dynamiclib".to_owned()); args.push("-dynamiclib".to_strbuf());
args.push("-Wl,-dylib".to_owned()); args.push("-Wl,-dylib".to_strbuf());
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
if !sess.opts.cg.no_rpath { if !sess.opts.cg.no_rpath {
args.push("-Wl,-install_name,@rpath/".to_owned() + args.push(format_strbuf!("-Wl,-install_name,@rpath/{}",
out_filename.filename_str().unwrap()); out_filename.filename_str()
.unwrap()));
} }
} else { } else {
args.push("-shared".to_owned()) args.push("-shared".to_strbuf())
} }
} }
if sess.targ_cfg.os == abi::OsFreebsd { if sess.targ_cfg.os == abi::OsFreebsd {
args.push_all(["-L/usr/local/lib".to_owned(), args.push_all(["-L/usr/local/lib".to_strbuf(),
"-L/usr/local/lib/gcc46".to_owned(), "-L/usr/local/lib/gcc46".to_strbuf(),
"-L/usr/local/lib/gcc44".to_owned()]); "-L/usr/local/lib/gcc44".to_strbuf()]);
} }
// FIXME (#2397): At some point we want to rpath our guesses as to // FIXME (#2397): At some point we want to rpath our guesses as to
@ -1295,7 +1309,7 @@ fn link_args(sess: &Session,
// //
// This is the end of the command line, so this library is used to resolve // This is the end of the command line, so this library is used to resolve
// *all* undefined symbols in all other libraries, and this is intentional. // *all* undefined symbols in all other libraries, and this is intentional.
args.push("-lcompiler-rt".to_owned()); args.push("-lcompiler-rt".to_strbuf());
// Finally add all the linker arguments provided on the command line along // Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate // with any #[link_args] attributes found inside the crate
@ -1317,16 +1331,16 @@ fn link_args(sess: &Session,
// Also note that the native libraries linked here are only the ones located // Also note that the native libraries linked here are only the ones located
// in the current crate. Upstream crates with native library dependencies // in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above. // may have their native library pulled in above.
fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) { fn add_local_native_libraries(args: &mut Vec<StrBuf>, sess: &Session) {
for path in sess.opts.addl_lib_search_paths.borrow().iter() { for path in sess.opts.addl_lib_search_paths.borrow().iter() {
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
args.push("-L" + path.as_str().unwrap().to_owned()); args.push(("-L" + path.as_str().unwrap().to_owned()).to_strbuf());
} }
let rustpath = filesearch::rust_path(); let rustpath = filesearch::rust_path();
for path in rustpath.iter() { for path in rustpath.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
args.push("-L" + path.as_str().unwrap().to_owned()); args.push(("-L" + path.as_str().unwrap().to_owned()).to_strbuf());
} }
// Some platforms take hints about whether a library is static or dynamic. // Some platforms take hints about whether a library is static or dynamic.
@ -1340,21 +1354,21 @@ fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) {
cstore::NativeUnknown | cstore::NativeStatic => { cstore::NativeUnknown | cstore::NativeStatic => {
if takes_hints { if takes_hints {
if kind == cstore::NativeStatic { if kind == cstore::NativeStatic {
args.push("-Wl,-Bstatic".to_owned()); args.push("-Wl,-Bstatic".to_strbuf());
} else { } else {
args.push("-Wl,-Bdynamic".to_owned()); args.push("-Wl,-Bdynamic".to_strbuf());
} }
} }
args.push("-l" + *l); args.push(format_strbuf!("-l{}", *l));
} }
cstore::NativeFramework => { cstore::NativeFramework => {
args.push("-framework".to_owned()); args.push("-framework".to_strbuf());
args.push(l.to_owned()); args.push(l.to_strbuf());
} }
} }
} }
if takes_hints { if takes_hints {
args.push("-Wl,-Bdynamic".to_owned()); args.push("-Wl,-Bdynamic".to_strbuf());
} }
} }
@ -1363,7 +1377,7 @@ fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) {
// Rust crates are not considered at all when creating an rlib output. All // Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of // dependencies will be linked when producing the final output (instead of
// the intermediate rlib version) // the intermediate rlib version)
fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session, fn add_upstream_rust_crates(args: &mut Vec<StrBuf>, sess: &Session,
dylib: bool, tmpdir: &Path, dylib: bool, tmpdir: &Path,
trans: &CrateTranslation) { trans: &CrateTranslation) {
// All of the heavy lifting has previously been accomplished by the // All of the heavy lifting has previously been accomplished by the
@ -1405,16 +1419,16 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
} }
// Converts a library file-stem into a cc -l argument // Converts a library file-stem into a cc -l argument
fn unlib(config: &config::Config, stem: &str) -> ~str { fn unlib(config: &config::Config, stem: &str) -> StrBuf {
if stem.starts_with("lib") && config.os != abi::OsWin32 { if stem.starts_with("lib") && config.os != abi::OsWin32 {
stem.slice(3, stem.len()).to_owned() stem.slice(3, stem.len()).to_strbuf()
} else { } else {
stem.to_owned() stem.to_strbuf()
} }
} }
// Adds the static "rlib" versions of all crates to the command line. // Adds the static "rlib" versions of all crates to the command line.
fn add_static_crate(args: &mut Vec<~str>, sess: &Session, tmpdir: &Path, fn add_static_crate(args: &mut Vec<StrBuf>, sess: &Session, tmpdir: &Path,
cnum: ast::CrateNum, cratepath: Path) { cnum: ast::CrateNum, cratepath: Path) {
// When performing LTO on an executable output, all of the // When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been // bytecode from the upstream libraries has already been
@ -1445,21 +1459,21 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
let dst_str = dst.as_str().unwrap().to_owned(); let dst_str = dst.as_str().unwrap().to_strbuf();
let mut archive = Archive::open(sess, dst); let mut archive = Archive::open(sess, dst);
archive.remove_file(format!("{}.o", name)); archive.remove_file(format!("{}.o", name));
let files = archive.files(); let files = archive.files();
if files.iter().any(|s| s.ends_with(".o")) { if files.iter().any(|s| s.as_slice().ends_with(".o")) {
args.push(dst_str); args.push(dst_str);
} }
}); });
} else { } else {
args.push(cratepath.as_str().unwrap().to_owned()); args.push(cratepath.as_str().unwrap().to_strbuf());
} }
} }
// Same thing as above, but for dynamic crates instead of static crates. // Same thing as above, but for dynamic crates instead of static crates.
fn add_dynamic_crate(args: &mut Vec<~str>, sess: &Session, fn add_dynamic_crate(args: &mut Vec<StrBuf>, sess: &Session,
cratepath: Path) { cratepath: Path) {
// If we're performing LTO, then it should have been previously required // If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format. // that all upstream rust dependencies were available in an rlib format.
@ -1468,9 +1482,11 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
// Just need to tell the linker about where the library lives and // Just need to tell the linker about where the library lives and
// what its name is // what its name is
let dir = cratepath.dirname_str().unwrap(); let dir = cratepath.dirname_str().unwrap();
if !dir.is_empty() { args.push("-L" + dir); } if !dir.is_empty() {
args.push(format_strbuf!("-L{}", dir));
}
let libarg = unlib(&sess.targ_cfg, cratepath.filestem_str().unwrap()); let libarg = unlib(&sess.targ_cfg, cratepath.filestem_str().unwrap());
args.push("-l" + libarg); args.push(format_strbuf!("-l{}", libarg));
} }
} }
@ -1492,7 +1508,7 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
// generic function calls a native function, then the generic function must // generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must // be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate. // also be resolved in the target crate.
fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) { fn add_upstream_native_libraries(args: &mut Vec<StrBuf>, sess: &Session) {
// Be sure to use a topological sorting of crates because there may be // Be sure to use a topological sorting of crates because there may be
// interdependencies between native libraries. When passing -nodefaultlibs, // interdependencies between native libraries. When passing -nodefaultlibs,
// for example, almost all native libraries depend on libc, so we have to // for example, almost all native libraries depend on libc, so we have to
@ -1507,10 +1523,12 @@ fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) {
let libs = csearch::get_native_libraries(&sess.cstore, cnum); let libs = csearch::get_native_libraries(&sess.cstore, cnum);
for &(kind, ref lib) in libs.iter() { for &(kind, ref lib) in libs.iter() {
match kind { match kind {
cstore::NativeUnknown => args.push("-l" + *lib), cstore::NativeUnknown => {
args.push(format_strbuf!("-l{}", *lib))
}
cstore::NativeFramework => { cstore::NativeFramework => {
args.push("-framework".to_owned()); args.push("-framework".to_strbuf());
args.push(lib.to_owned()); args.push(lib.to_strbuf());
} }
cstore::NativeStatic => { cstore::NativeStatic => {
sess.bug("statics shouldn't be propagated"); sess.bug("statics shouldn't be propagated");

View file

@ -20,7 +20,7 @@ use libc;
use flate; use flate;
pub fn run(sess: &session::Session, llmod: ModuleRef, pub fn run(sess: &session::Session, llmod: ModuleRef,
tm: TargetMachineRef, reachable: &[~str]) { tm: TargetMachineRef, reachable: &[StrBuf]) {
if sess.opts.cg.prefer_dynamic { if sess.opts.cg.prefer_dynamic {
sess.err("cannot prefer dynamic linking when performing LTO"); sess.err("cannot prefer dynamic linking when performing LTO");
sess.note("only 'staticlib' and 'bin' outputs are supported with LTO"); sess.note("only 'staticlib' and 'bin' outputs are supported with LTO");
@ -67,13 +67,16 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
if !llvm::LLVMRustLinkInExternalBitcode(llmod, if !llvm::LLVMRustLinkInExternalBitcode(llmod,
ptr as *libc::c_char, ptr as *libc::c_char,
bc.len() as libc::size_t) { bc.len() as libc::size_t) {
link::llvm_err(sess, format!("failed to load bc of `{}`", name)); link::llvm_err(sess,
(format_strbuf!("failed to load bc of `{}`",
name)));
} }
}); });
} }
// Internalize everything but the reachable symbols of the current module // Internalize everything but the reachable symbols of the current module
let cstrs: Vec<::std::c_str::CString> = reachable.iter().map(|s| s.to_c_str()).collect(); let cstrs: Vec<::std::c_str::CString> =
reachable.iter().map(|s| s.as_slice().to_c_str()).collect();
let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect(); let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect();
let ptr = arr.as_ptr(); let ptr = arr.as_ptr();
unsafe { unsafe {

View file

@ -13,51 +13,52 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name; use metadata::loader::meta_section_name;
use syntax::abi; use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
return target_strs::t { return target_strs::t {
module_asm: "".to_owned(), module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(), meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os { data_layout: match target_os {
abi::OsMacos => { abi::OsMacos => {
"E-p:32:32:32".to_owned() + "E-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsWin32 => { abi::OsWin32 => {
"E-p:32:32:32".to_owned() + "E-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsLinux => { abi::OsLinux => {
"E-p:32:32:32".to_owned() + "E-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsAndroid => { abi::OsAndroid => {
"E-p:32:32:32".to_owned() + "E-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
abi::OsFreebsd => { abi::OsFreebsd => {
"E-p:32:32:32".to_owned() + "E-p:32:32:32\
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" + -i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
"-f32:32:32-f64:64:64" + -f32:32:32-f64:64:64\
"-v64:64:64-v128:64:128" + -v64:64:64-v128:64:128\
"-a0:0:64-n32" -a0:0:64-n32".to_strbuf()
} }
}, },

View file

@ -22,7 +22,7 @@ fn not_win32(os: abi::Os) -> bool {
os != abi::OsWin32 os != abi::OsWin32
} }
pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> { pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<StrBuf> {
let os = sess.targ_cfg.os; let os = sess.targ_cfg.os;
// No rpath on windows // No rpath on windows
@ -33,9 +33,9 @@ pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> {
let mut flags = Vec::new(); let mut flags = Vec::new();
if sess.targ_cfg.os == abi::OsFreebsd { if sess.targ_cfg.os == abi::OsFreebsd {
flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_owned(), flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_strbuf(),
"-Wl,-rpath,/usr/local/lib/gcc44".to_owned(), "-Wl,-rpath,/usr/local/lib/gcc44".to_strbuf(),
"-Wl,-z,origin".to_owned()]); "-Wl,-z,origin".to_strbuf()]);
} }
debug!("preparing the RPATH!"); debug!("preparing the RPATH!");
@ -47,16 +47,19 @@ pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> {
l.map(|p| p.clone()) l.map(|p| p.clone())
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let rpaths = get_rpaths(os, sysroot, output, libs.as_slice(), let rpaths = get_rpaths(os,
sess.opts.target_triple); sysroot,
output,
libs.as_slice(),
sess.opts.target_triple.as_slice());
flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice()); flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
flags flags
} }
pub fn rpaths_to_flags(rpaths: &[~str]) -> Vec<~str> { pub fn rpaths_to_flags(rpaths: &[StrBuf]) -> Vec<StrBuf> {
let mut ret = Vec::new(); let mut ret = Vec::new();
for rpath in rpaths.iter() { for rpath in rpaths.iter() {
ret.push("-Wl,-rpath," + *rpath); ret.push(("-Wl,-rpath," + (*rpath).as_slice()).to_strbuf());
} }
return ret; return ret;
} }
@ -65,7 +68,7 @@ fn get_rpaths(os: abi::Os,
sysroot: &Path, sysroot: &Path,
output: &Path, output: &Path,
libs: &[Path], libs: &[Path],
target_triple: &str) -> Vec<~str> { target_triple: &str) -> Vec<StrBuf> {
debug!("sysroot: {}", sysroot.display()); debug!("sysroot: {}", sysroot.display());
debug!("output: {}", output.display()); debug!("output: {}", output.display());
debug!("libs:"); debug!("libs:");
@ -82,7 +85,7 @@ fn get_rpaths(os: abi::Os,
// And a final backup rpath to the global library location. // And a final backup rpath to the global library location.
let fallback_rpaths = vec!(get_install_prefix_rpath(sysroot, target_triple)); let fallback_rpaths = vec!(get_install_prefix_rpath(sysroot, target_triple));
fn log_rpaths(desc: &str, rpaths: &[~str]) { fn log_rpaths(desc: &str, rpaths: &[StrBuf]) {
debug!("{} rpaths:", desc); debug!("{} rpaths:", desc);
for rpath in rpaths.iter() { for rpath in rpaths.iter() {
debug!(" {}", *rpath); debug!(" {}", *rpath);
@ -102,14 +105,14 @@ fn get_rpaths(os: abi::Os,
fn get_rpaths_relative_to_output(os: abi::Os, fn get_rpaths_relative_to_output(os: abi::Os,
output: &Path, output: &Path,
libs: &[Path]) -> Vec<~str> { libs: &[Path]) -> Vec<StrBuf> {
libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect() libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect()
} }
pub fn get_rpath_relative_to_output(os: abi::Os, pub fn get_rpath_relative_to_output(os: abi::Os,
output: &Path, output: &Path,
lib: &Path) lib: &Path)
-> ~str { -> StrBuf {
use std::os; use std::os;
assert!(not_win32(os)); assert!(not_win32(os));
@ -129,10 +132,11 @@ pub fn get_rpath_relative_to_output(os: abi::Os,
let relative = lib.path_relative_from(&output); let relative = lib.path_relative_from(&output);
let relative = relative.expect("could not create rpath relative to output"); let relative = relative.expect("could not create rpath relative to output");
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
prefix+"/"+relative.as_str().expect("non-utf8 component in path") (prefix + "/" + relative.as_str()
.expect("non-utf8 component in path")).to_strbuf()
} }
pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> ~str { pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> StrBuf {
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
@ -140,10 +144,10 @@ pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> ~str {
path.push(&tlib); path.push(&tlib);
let path = os::make_absolute(&path); let path = os::make_absolute(&path);
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().expect("non-utf8 component in rpath").to_owned() path.as_str().expect("non-utf8 component in rpath").to_strbuf()
} }
pub fn minimize_rpaths(rpaths: &[~str]) -> Vec<~str> { pub fn minimize_rpaths(rpaths: &[StrBuf]) -> Vec<StrBuf> {
let mut set = HashSet::new(); let mut set = HashSet::new();
let mut minimized = Vec::new(); let mut minimized = Vec::new();
for rpath in rpaths.iter() { for rpath in rpaths.iter() {
@ -163,8 +167,13 @@ mod test {
#[test] #[test]
fn test_rpaths_to_flags() { fn test_rpaths_to_flags() {
let flags = rpaths_to_flags(["path1".to_owned(), "path2".to_owned()]); let flags = rpaths_to_flags([
assert_eq!(flags, vec!("-Wl,-rpath,path1".to_owned(), "-Wl,-rpath,path2".to_owned())); "path1".to_strbuf(),
"path2".to_strbuf()
]);
assert_eq!(flags,
vec!("-Wl,-rpath,path1".to_strbuf(),
"-Wl,-rpath,path2".to_strbuf()));
} }
#[test] #[test]
@ -190,18 +199,37 @@ mod test {
#[test] #[test]
fn test_minimize1() { fn test_minimize1() {
let res = minimize_rpaths(["rpath1".to_owned(), "rpath2".to_owned(), "rpath1".to_owned()]); let res = minimize_rpaths([
assert!(res.as_slice() == ["rpath1".to_owned(), "rpath2".to_owned()]); "rpath1".to_strbuf(),
"rpath2".to_strbuf(),
"rpath1".to_strbuf()
]);
assert!(res.as_slice() == [
"rpath1".to_strbuf(),
"rpath2".to_strbuf()
]);
} }
#[test] #[test]
fn test_minimize2() { fn test_minimize2() {
let res = minimize_rpaths(["1a".to_owned(), "2".to_owned(), "2".to_owned(), let res = minimize_rpaths([
"1a".to_owned(), "4a".to_owned(), "1a".to_owned(), "1a".to_strbuf(),
"2".to_owned(), "3".to_owned(), "4a".to_owned(), "2".to_strbuf(),
"3".to_owned()]); "2".to_strbuf(),
assert!(res.as_slice() == ["1a".to_owned(), "2".to_owned(), "4a".to_owned(), "1a".to_strbuf(),
"3".to_owned()]); "4a".to_strbuf(),
"1a".to_strbuf(),
"2".to_strbuf(),
"3".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
]);
assert!(res.as_slice() == [
"1a".to_strbuf(),
"2".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
]);
} }
#[test] #[test]

View file

@ -54,13 +54,13 @@ use syntax::ast;
#[deriving(Clone, Eq)] #[deriving(Clone, Eq)]
pub struct Svh { pub struct Svh {
hash: ~str, hash: StrBuf,
} }
impl Svh { impl Svh {
pub fn new(hash: &str) -> Svh { pub fn new(hash: &str) -> Svh {
assert!(hash.len() == 16); assert!(hash.len() == 16);
Svh { hash: hash.to_owned() } Svh { hash: hash.to_strbuf() }
} }
pub fn as_str<'a>(&'a self) -> &'a str { pub fn as_str<'a>(&'a self) -> &'a str {

View file

@ -11,9 +11,9 @@
#![allow(non_camel_case_types)] #![allow(non_camel_case_types)]
pub struct t { pub struct t {
pub module_asm: ~str, pub module_asm: StrBuf,
pub meta_sect_name: ~str, pub meta_sect_name: StrBuf,
pub data_layout: ~str, pub data_layout: StrBuf,
pub target_triple: ~str, pub target_triple: StrBuf,
pub cc_args: Vec<~str> , pub cc_args: Vec<StrBuf> ,
} }

View file

@ -14,38 +14,41 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name; use metadata::loader::meta_section_name;
use syntax::abi; use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os)
-> target_strs::t {
return target_strs::t { return target_strs::t {
module_asm: "".to_owned(), module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(), meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os { data_layout: match target_os {
abi::OsMacos => { abi::OsMacos => {
"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16".to_owned() + "e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16\
"-i32:32:32-i64:32:64" + -i32:32:32-i64:32:64\
"-f32:32:32-f64:32:64-v64:64:64" + -f32:32:32-f64:32:64-v64:64:64\
"-v128:128:128-a0:0:64-f80:128:128" + "-n8:16:32" -v128:128:128-a0:0:64-f80:128:128\
-n8:16:32".to_strbuf()
} }
abi::OsWin32 => { abi::OsWin32 => {
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_owned() "e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_strbuf()
} }
abi::OsLinux => { abi::OsLinux => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned() "e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
} }
abi::OsAndroid => { abi::OsAndroid => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned() "e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
} }
abi::OsFreebsd => { abi::OsFreebsd => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned() "e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
} }
}, },
target_triple: target_triple, target_triple: target_triple,
cc_args: vec!("-m32".to_owned()), cc_args: vec!("-m32".to_strbuf()),
}; };
} }

View file

@ -14,46 +14,47 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name; use metadata::loader::meta_section_name;
use syntax::abi; use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t { pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
return target_strs::t { return target_strs::t {
module_asm: "".to_owned(), module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(), meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os { data_layout: match target_os {
abi::OsMacos => { abi::OsMacos => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+ "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
"s0:64:64-f80:128:128-n8:16:32:64" s0:64:64-f80:128:128-n8:16:32:64".to_strbuf()
} }
abi::OsWin32 => { abi::OsWin32 => {
// FIXME: Test this. Copied from linux (#2398) // FIXME: Test this. Copied from linux (#2398)
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+ "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
"s0:64:64-f80:128:128-n8:16:32:64-S128" s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
} }
abi::OsLinux => { abi::OsLinux => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+ "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
"s0:64:64-f80:128:128-n8:16:32:64-S128" s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
} }
abi::OsAndroid => { abi::OsAndroid => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+ "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
"s0:64:64-f80:128:128-n8:16:32:64-S128" s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
} }
abi::OsFreebsd => { abi::OsFreebsd => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+ "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
"s0:64:64-f80:128:128-n8:16:32:64-S128" s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
} }
}, },
target_triple: target_triple, target_triple: target_triple,
cc_args: vec!("-m64".to_owned()), cc_args: vec!("-m64".to_strbuf()),
}; };
} }

View file

@ -76,7 +76,7 @@ pub struct Options {
// this. // this.
pub addl_lib_search_paths: RefCell<HashSet<Path>>, pub addl_lib_search_paths: RefCell<HashSet<Path>>,
pub maybe_sysroot: Option<Path>, pub maybe_sysroot: Option<Path>,
pub target_triple: ~str, pub target_triple: StrBuf,
// User-specified cfg meta items. The compiler itself will add additional // User-specified cfg meta items. The compiler itself will add additional
// items to the crate config, and during parsing the entire crate config // items to the crate config, and during parsing the entire crate config
// will be added to the crate AST node. This should not be used for // will be added to the crate AST node. This should not be used for
@ -105,7 +105,7 @@ pub fn basic_options() -> Options {
output_types: Vec::new(), output_types: Vec::new(),
addl_lib_search_paths: RefCell::new(HashSet::new()), addl_lib_search_paths: RefCell::new(HashSet::new()),
maybe_sysroot: None, maybe_sysroot: None,
target_triple: driver::host_triple().to_owned(), target_triple: driver::host_triple().to_strbuf(),
cfg: Vec::new(), cfg: Vec::new(),
test: false, test: false,
parse_only: false, parse_only: false,
@ -247,26 +247,26 @@ macro_rules! cgoptions(
} }
} }
fn parse_opt_string(slot: &mut Option<~str>, v: Option<&str>) -> bool { fn parse_opt_string(slot: &mut Option<StrBuf>, v: Option<&str>) -> bool {
match v { match v {
Some(s) => { *slot = Some(s.to_owned()); true }, Some(s) => { *slot = Some(s.to_strbuf()); true },
None => false, None => false,
} }
} }
fn parse_string(slot: &mut ~str, v: Option<&str>) -> bool { fn parse_string(slot: &mut StrBuf, v: Option<&str>) -> bool {
match v { match v {
Some(s) => { *slot = s.to_owned(); true }, Some(s) => { *slot = s.to_strbuf(); true },
None => false, None => false,
} }
} }
fn parse_list(slot: &mut Vec<~str>, v: Option<&str>) fn parse_list(slot: &mut Vec<StrBuf>, v: Option<&str>)
-> bool { -> bool {
match v { match v {
Some(s) => { Some(s) => {
for s in s.words() { for s in s.words() {
slot.push(s.to_owned()); slot.push(s.to_strbuf());
} }
true true
}, },
@ -278,23 +278,23 @@ macro_rules! cgoptions(
) ) ) )
cgoptions!( cgoptions!(
ar: Option<~str> = (None, parse_opt_string, ar: Option<StrBuf> = (None, parse_opt_string,
"tool to assemble archives with"), "tool to assemble archives with"),
linker: Option<~str> = (None, parse_opt_string, linker: Option<StrBuf> = (None, parse_opt_string,
"system linker to link outputs with"), "system linker to link outputs with"),
link_args: Vec<~str> = (Vec::new(), parse_list, link_args: Vec<StrBuf> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"), "extra arguments to pass to the linker (space separated)"),
target_cpu: ~str = ("generic".to_owned(), parse_string, target_cpu: StrBuf = ("generic".to_strbuf(), parse_string,
"select target processor (llc -mcpu=help for details)"), "select target processor (llc -mcpu=help for details)"),
target_feature: ~str = ("".to_owned(), parse_string, target_feature: StrBuf = ("".to_strbuf(), parse_string,
"target specific attributes (llc -mattr=help for details)"), "target specific attributes (llc -mattr=help for details)"),
passes: Vec<~str> = (Vec::new(), parse_list, passes: Vec<StrBuf> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"), "a list of extra LLVM passes to run (space separated)"),
llvm_args: Vec<~str> = (Vec::new(), parse_list, llvm_args: Vec<StrBuf> = (Vec::new(), parse_list,
"a list of arguments to pass to llvm (space separated)"), "a list of arguments to pass to llvm (space separated)"),
save_temps: bool = (false, parse_bool, save_temps: bool = (false, parse_bool,
"save all temporary output files during compilation"), "save all temporary output files during compilation"),
android_cross_path: Option<~str> = (None, parse_opt_string, android_cross_path: Option<StrBuf> = (None, parse_opt_string,
"the path to the Android NDK"), "the path to the Android NDK"),
no_rpath: bool = (false, parse_bool, no_rpath: bool = (false, parse_bool,
"disables setting the rpath in libs/exes"), "disables setting the rpath in libs/exes"),
@ -310,7 +310,7 @@ cgoptions!(
"prefer dynamic linking to static linking"), "prefer dynamic linking to static linking"),
no_integrated_as: bool = (false, parse_bool, no_integrated_as: bool = (false, parse_bool,
"use an external assembler rather than LLVM's integrated one"), "use an external assembler rather than LLVM's integrated one"),
relocation_model: ~str = ("pic".to_owned(), parse_string, relocation_model: StrBuf = ("pic".to_strbuf(), parse_string,
"choose the relocation model to use (llc -relocation-model for details)"), "choose the relocation model to use (llc -relocation-model for details)"),
) )
@ -456,13 +456,16 @@ static architecture_abis : &'static [(&'static str, abi::Architecture)] = &'stat
("mips", abi::Mips)]; ("mips", abi::Mips)];
pub fn build_target_config(sopts: &Options) -> Config { pub fn build_target_config(sopts: &Options) -> Config {
let os = match get_os(sopts.target_triple) { let os = match get_os(sopts.target_triple.as_slice()) {
Some(os) => os, Some(os) => os,
None => early_error("unknown operating system") None => early_error("unknown operating system")
}; };
let arch = match get_arch(sopts.target_triple) { let arch = match get_arch(sopts.target_triple.as_slice()) {
Some(arch) => arch, Some(arch) => arch,
None => early_error("unknown architecture: " + sopts.target_triple) None => {
early_error("unknown architecture: " +
sopts.target_triple.as_slice())
}
}; };
let (int_type, uint_type) = match arch { let (int_type, uint_type) = match arch {
abi::X86 => (ast::TyI32, ast::TyU32), abi::X86 => (ast::TyI32, ast::TyU32),
@ -541,7 +544,7 @@ pub fn optgroups() -> Vec<getopts::OptGroup> {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg // Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: Vec<~str> ) -> ast::CrateConfig { fn parse_cfgspecs(cfgspecs: Vec<StrBuf> ) -> ast::CrateConfig {
cfgspecs.move_iter().map(|s| { cfgspecs.move_iter().map(|s| {
parse::parse_meta_from_source_str("cfgspec".to_strbuf(), parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
s.to_strbuf(), s.to_strbuf(),
@ -639,7 +642,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
} }
let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m)); let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m));
let target = matches.opt_str("target").unwrap_or(driver::host_triple().to_owned()); let target = match matches.opt_str("target") {
Some(supplied_target) => supplied_target.to_strbuf(),
None => driver::host_triple().to_strbuf(),
};
let opt_level = { let opt_level = {
if (debugging_opts & NO_OPT) != 0 { if (debugging_opts & NO_OPT) != 0 {
No No
@ -689,10 +695,14 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
Path::new(s.as_slice()) Path::new(s.as_slice())
}).collect(); }).collect();
let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect()); let cfg = parse_cfgspecs(matches.opt_strs("cfg")
.move_iter()
.map(|x| x.to_strbuf())
.collect());
let test = matches.opt_present("test"); let test = matches.opt_present("test");
let write_dependency_info = (matches.opt_present("dep-info"), let write_dependency_info = (matches.opt_present("dep-info"),
matches.opt_str("dep-info").map(|p| Path::new(p))); matches.opt_str("dep-info")
.map(|p| Path::new(p)));
let print_metas = (matches.opt_present("crate-id"), let print_metas = (matches.opt_present("crate-id"),
matches.opt_present("crate-name"), matches.opt_present("crate-name"),

View file

@ -71,9 +71,9 @@ pub fn compile_input(sess: Session,
&sess); &sess);
let loader = &mut Loader::new(&sess); let loader = &mut Loader::new(&sess);
let id = link::find_crate_id(krate.attrs.as_slice(), let id = link::find_crate_id(krate.attrs.as_slice(),
outputs.out_filestem); outputs.out_filestem.as_slice());
let (expanded_crate, ast_map) = phase_2_configure_and_expand(&sess, loader, let (expanded_crate, ast_map) =
krate, &id); phase_2_configure_and_expand(&sess, loader, krate, &id);
(outputs, expanded_crate, ast_map) (outputs, expanded_crate, ast_map)
}; };
write_out_deps(&sess, input, &outputs, &expanded_crate); write_out_deps(&sess, input, &outputs, &expanded_crate);
@ -99,14 +99,14 @@ pub fn compile_input(sess: Session,
* The name used for source code that doesn't originate in a file * The name used for source code that doesn't originate in a file
* (e.g. source from stdin or a string) * (e.g. source from stdin or a string)
*/ */
pub fn anon_src() -> ~str { pub fn anon_src() -> StrBuf {
"<anon>".to_str() "<anon>".to_strbuf()
} }
pub fn source_name(input: &Input) -> ~str { pub fn source_name(input: &Input) -> StrBuf {
match *input { match *input {
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
FileInput(ref ifile) => ifile.as_str().unwrap().to_str(), FileInput(ref ifile) => ifile.as_str().unwrap().to_strbuf(),
StrInput(_) => anon_src() StrInput(_) => anon_src()
} }
} }
@ -115,14 +115,14 @@ pub enum Input {
/// Load source from file /// Load source from file
FileInput(Path), FileInput(Path),
/// The string is the source /// The string is the source
StrInput(~str) StrInput(StrBuf)
} }
impl Input { impl Input {
fn filestem(&self) -> ~str { fn filestem(&self) -> StrBuf {
match *self { match *self {
FileInput(ref ifile) => ifile.filestem_str().unwrap().to_str(), FileInput(ref ifile) => ifile.filestem_str().unwrap().to_strbuf(),
StrInput(_) => "rust_out".to_owned(), StrInput(_) => "rust_out".to_strbuf(),
} }
} }
} }
@ -354,7 +354,7 @@ pub struct CrateTranslation {
pub metadata_module: ModuleRef, pub metadata_module: ModuleRef,
pub link: LinkMeta, pub link: LinkMeta,
pub metadata: Vec<u8>, pub metadata: Vec<u8>,
pub reachable: Vec<~str>, pub reachable: Vec<StrBuf>,
pub crate_formats: dependency_format::Dependencies, pub crate_formats: dependency_format::Dependencies,
} }
@ -450,7 +450,8 @@ fn write_out_deps(sess: &Session,
input: &Input, input: &Input,
outputs: &OutputFilenames, outputs: &OutputFilenames,
krate: &ast::Crate) { krate: &ast::Crate) {
let id = link::find_crate_id(krate.attrs.as_slice(), outputs.out_filestem); let id = link::find_crate_id(krate.attrs.as_slice(),
outputs.out_filestem.as_slice());
let mut out_filenames = Vec::new(); let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.iter() { for output_type in sess.opts.output_types.iter() {
@ -487,9 +488,9 @@ fn write_out_deps(sess: &Session,
let result = (|| { let result = (|| {
// Build a list of files used to compile the output and // Build a list of files used to compile the output and
// write Makefile-compatible dependency rules // write Makefile-compatible dependency rules
let files: Vec<~str> = sess.codemap().files.borrow() let files: Vec<StrBuf> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file()) .iter().filter(|fmap| fmap.is_real_file())
.map(|fmap| fmap.name.to_owned()) .map(|fmap| fmap.name.to_strbuf())
.collect(); .collect();
let mut file = try!(io::File::create(&deps_filename)); let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() { for path in out_filenames.iter() {
@ -567,7 +568,9 @@ impl pprust::PpAnn for TypedAnnotation {
try!(pp::word(&mut s.s, "as")); try!(pp::word(&mut s.s, "as"));
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s, try!(pp::word(&mut s.s,
ppaux::ty_to_str(tcx, ty::expr_ty(tcx, expr)))); ppaux::ty_to_str(
tcx,
ty::expr_ty(tcx, expr)).as_slice()));
s.pclose() s.pclose()
} }
_ => Ok(()) _ => Ok(())
@ -581,20 +584,26 @@ pub fn pretty_print_input(sess: Session,
ppm: ::driver::PpMode, ppm: ::driver::PpMode,
ofile: Option<Path>) { ofile: Option<Path>) {
let krate = phase_1_parse_input(&sess, cfg, input); let krate = phase_1_parse_input(&sess, cfg, input);
let id = link::find_crate_id(krate.attrs.as_slice(), input.filestem()); let id = link::find_crate_id(krate.attrs.as_slice(),
input.filestem().as_slice());
let (krate, ast_map, is_expanded) = match ppm { let (krate, ast_map, is_expanded) = match ppm {
PpmExpanded | PpmExpandedIdentified | PpmTyped => { PpmExpanded | PpmExpandedIdentified | PpmTyped => {
let loader = &mut Loader::new(&sess); let loader = &mut Loader::new(&sess);
let (krate, ast_map) = phase_2_configure_and_expand(&sess, loader, let (krate, ast_map) = phase_2_configure_and_expand(&sess,
krate, &id); loader,
krate,
&id);
(krate, Some(ast_map), true) (krate, Some(ast_map), true)
} }
_ => (krate, None, false) _ => (krate, None, false)
}; };
let src_name = source_name(input); let src_name = source_name(input);
let src = Vec::from_slice(sess.codemap().get_filemap(src_name).src.as_bytes()); let src = Vec::from_slice(sess.codemap()
.get_filemap(src_name.as_slice())
.src
.as_bytes());
let mut rdr = MemReader::new(src); let mut rdr = MemReader::new(src);
let out = match ofile { let out = match ofile {
@ -666,8 +675,12 @@ pub fn collect_crate_types(session: &Session,
let iter = attrs.iter().filter_map(|a| { let iter = attrs.iter().filter_map(|a| {
if a.name().equiv(&("crate_type")) { if a.name().equiv(&("crate_type")) {
match a.value_str() { match a.value_str() {
Some(ref n) if n.equiv(&("rlib")) => Some(config::CrateTypeRlib), Some(ref n) if n.equiv(&("rlib")) => {
Some(ref n) if n.equiv(&("dylib")) => Some(config::CrateTypeDylib), Some(config::CrateTypeRlib)
}
Some(ref n) if n.equiv(&("dylib")) => {
Some(config::CrateTypeDylib)
}
Some(ref n) if n.equiv(&("lib")) => { Some(ref n) if n.equiv(&("lib")) => {
Some(config::default_lib_output()) Some(config::default_lib_output())
} }
@ -679,12 +692,16 @@ pub fn collect_crate_types(session: &Session,
session.add_lint(lint::UnknownCrateType, session.add_lint(lint::UnknownCrateType,
ast::CRATE_NODE_ID, ast::CRATE_NODE_ID,
a.span, a.span,
"invalid `crate_type` value".to_owned()); "invalid `crate_type` \
value".to_strbuf());
None None
} }
_ => { _ => {
session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID, session.add_lint(lint::UnknownCrateType,
a.span, "`crate_type` requires a value".to_owned()); ast::CRATE_NODE_ID,
a.span,
"`crate_type` requires a \
value".to_strbuf());
None None
} }
} }
@ -704,7 +721,7 @@ pub fn collect_crate_types(session: &Session,
pub struct OutputFilenames { pub struct OutputFilenames {
pub out_directory: Path, pub out_directory: Path,
pub out_filestem: ~str, pub out_filestem: StrBuf,
pub single_output_file: Option<Path>, pub single_output_file: Option<Path>,
} }
@ -756,7 +773,7 @@ pub fn build_output_filenames(input: &Input,
let crateid = attr::find_crateid(attrs); let crateid = attr::find_crateid(attrs);
match crateid { match crateid {
None => {} None => {}
Some(crateid) => stem = crateid.name.to_str(), Some(crateid) => stem = crateid.name.to_strbuf(),
} }
OutputFilenames { OutputFilenames {
out_directory: dirpath, out_directory: dirpath,
@ -778,7 +795,7 @@ pub fn build_output_filenames(input: &Input,
} }
OutputFilenames { OutputFilenames {
out_directory: out_file.dir_path(), out_directory: out_file.dir_path(),
out_filestem: out_file.filestem_str().unwrap().to_str(), out_filestem: out_file.filestem_str().unwrap().to_strbuf(),
single_output_file: ofile, single_output_file: ofile,
} }
} }

View file

@ -56,7 +56,8 @@ fn run_compiler(args: &[~str]) {
let ifile = matches.free.get(0).as_slice(); let ifile = matches.free.get(0).as_slice();
if ifile == "-" { if ifile == "-" {
let contents = io::stdin().read_to_end().unwrap(); let contents = io::stdin().read_to_end().unwrap();
let src = str::from_utf8(contents.as_slice()).unwrap().to_owned(); let src = str::from_utf8(contents.as_slice()).unwrap()
.to_strbuf();
(StrInput(src), None) (StrInput(src), None)
} else { } else {
(FileInput(Path::new(ifile)), Some(Path::new(ifile))) (FileInput(Path::new(ifile)), Some(Path::new(ifile)))
@ -249,9 +250,13 @@ fn print_crate_info(sess: &Session,
// these nasty nested conditions are to avoid doing extra work // these nasty nested conditions are to avoid doing extra work
if crate_id || crate_name || crate_file_name { if crate_id || crate_name || crate_file_name {
let attrs = parse_crate_attrs(sess, input); let attrs = parse_crate_attrs(sess, input);
let t_outputs = driver::build_output_filenames(input, odir, ofile, let t_outputs = driver::build_output_filenames(input,
attrs.as_slice(), sess); odir,
let id = link::find_crate_id(attrs.as_slice(), t_outputs.out_filestem); ofile,
attrs.as_slice(),
sess);
let id = link::find_crate_id(attrs.as_slice(),
t_outputs.out_filestem.as_slice());
if crate_id { if crate_id {
println!("{}", id.to_str()); println!("{}", id.to_str());

View file

@ -28,7 +28,6 @@ use syntax::{ast, codemap};
use std::os; use std::os;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
pub struct Session { pub struct Session {
pub targ_cfg: config::Config, pub targ_cfg: config::Config,
pub opts: config::Options, pub opts: config::Options,
@ -43,7 +42,7 @@ pub struct Session {
// expected to be absolute. `None` means that there is no source file. // expected to be absolute. `None` means that there is no source file.
pub local_crate_source_file: Option<Path>, pub local_crate_source_file: Option<Path>,
pub working_dir: Path, pub working_dir: Path,
pub lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, ~str)>>>, pub lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, StrBuf)>>>,
pub node_id: Cell<ast::NodeId>, pub node_id: Cell<ast::NodeId>,
pub crate_types: RefCell<Vec<config::CrateType>>, pub crate_types: RefCell<Vec<config::CrateType>>,
pub features: front::feature_gate::Features, pub features: front::feature_gate::Features,
@ -109,7 +108,7 @@ impl Session {
lint: lint::Lint, lint: lint::Lint,
id: ast::NodeId, id: ast::NodeId,
sp: Span, sp: Span,
msg: ~str) { msg: StrBuf) {
let mut lints = self.lints.borrow_mut(); let mut lints = self.lints.borrow_mut();
match lints.find_mut(&id) { match lints.find_mut(&id) {
Some(arr) => { arr.push((lint, sp, msg)); return; } Some(arr) => { arr.push((lint, sp, msg)); return; }
@ -180,9 +179,8 @@ impl Session {
} }
} }
pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
filesearch::FileSearch::new( filesearch::FileSearch::new(self.sysroot(),
self.sysroot(), self.opts.target_triple.as_slice(),
self.opts.target_triple,
&self.opts.addl_lib_search_paths) &self.opts.addl_lib_search_paths)
} }
pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
@ -245,7 +243,6 @@ pub fn build_session_(sopts: config::Options,
} }
} }
// Seems out of place, but it uses session, so I'm putting it here // Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf) pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf)
-> T { -> T {

View file

@ -359,7 +359,7 @@ pub fn check_crate(sess: &Session, krate: &ast::Crate) {
sess.add_lint(lint::UnknownFeatures, sess.add_lint(lint::UnknownFeatures,
ast::CRATE_NODE_ID, ast::CRATE_NODE_ID,
mi.span, mi.span,
"unknown feature".to_owned()); "unknown feature".to_strbuf());
} }
} }
} }

View file

@ -119,7 +119,6 @@ pub mod lib {
pub mod llvmdeps; pub mod llvmdeps;
} }
pub fn main() { pub fn main() {
std::os::set_exit_status(driver::main_args(std::os::args().as_slice())); std::os::set_exit_status(driver::main_args(std::os::args().as_slice()));
} }

View file

@ -1840,7 +1840,7 @@ pub fn SetFunctionAttribute(fn_: ValueRef, attr: Attribute) {
/* Memory-managed object interface to type handles. */ /* Memory-managed object interface to type handles. */
pub struct TypeNames { pub struct TypeNames {
named_types: RefCell<HashMap<~str, TypeRef>>, named_types: RefCell<HashMap<StrBuf, TypeRef>>,
} }
impl TypeNames { impl TypeNames {
@ -1851,33 +1851,34 @@ impl TypeNames {
} }
pub fn associate_type(&self, s: &str, t: &Type) { pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_owned(), t.to_ref())); assert!(self.named_types.borrow_mut().insert(s.to_strbuf(),
t.to_ref()));
} }
pub fn find_type(&self, s: &str) -> Option<Type> { pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.borrow().find_equiv(&s).map(|x| Type::from_ref(*x)) self.named_types.borrow().find_equiv(&s).map(|x| Type::from_ref(*x))
} }
pub fn type_to_str(&self, ty: Type) -> ~str { pub fn type_to_str(&self, ty: Type) -> StrBuf {
unsafe { unsafe {
let s = llvm::LLVMTypeToString(ty.to_ref()); let s = llvm::LLVMTypeToString(ty.to_ref());
let ret = from_c_str(s); let ret = from_c_str(s);
free(s as *mut c_void); free(s as *mut c_void);
ret ret.to_strbuf()
} }
} }
pub fn types_to_str(&self, tys: &[Type]) -> ~str { pub fn types_to_str(&self, tys: &[Type]) -> StrBuf {
let strs: Vec<~str> = tys.iter().map(|t| self.type_to_str(*t)).collect(); let strs: Vec<StrBuf> = tys.iter().map(|t| self.type_to_str(*t)).collect();
format!("[{}]", strs.connect(",")) format_strbuf!("[{}]", strs.connect(",").to_strbuf())
} }
pub fn val_to_str(&self, val: ValueRef) -> ~str { pub fn val_to_str(&self, val: ValueRef) -> StrBuf {
unsafe { unsafe {
let s = llvm::LLVMValueToString(val); let s = llvm::LLVMValueToString(val);
let ret = from_c_str(s); let ret = from_c_str(s);
free(s as *mut c_void); free(s as *mut c_void);
ret ret.to_strbuf()
} }
} }
} }

View file

@ -120,8 +120,11 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
match extract_crate_info(e, i) { match extract_crate_info(e, i) {
Some(info) => { Some(info) => {
let (cnum, _, _) = resolve_crate(e, &None, info.ident, let (cnum, _, _) = resolve_crate(e,
&info.crate_id, None, &None,
info.ident.as_slice(),
&info.crate_id,
None,
i.span); i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum); e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
} }
@ -130,7 +133,7 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
} }
struct CrateInfo { struct CrateInfo {
ident: ~str, ident: StrBuf,
crate_id: CrateId, crate_id: CrateId,
id: ast::NodeId, id: ast::NodeId,
should_link: bool, should_link: bool,
@ -156,7 +159,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
None => from_str(ident.get().to_str()).unwrap() None => from_str(ident.get().to_str()).unwrap()
}; };
Some(CrateInfo { Some(CrateInfo {
ident: ident.get().to_str(), ident: ident.get().to_strbuf(),
crate_id: crate_id, crate_id: crate_id,
id: id, id: id,
should_link: should_link(i), should_link: should_link(i),
@ -237,7 +240,9 @@ fn visit_item(e: &Env, i: &ast::Item) {
if n.get().is_empty() { if n.get().is_empty() {
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name"); e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name");
} else { } else {
e.sess.cstore.add_used_library(n.get().to_owned(), kind); e.sess
.cstore
.add_used_library(n.get().to_strbuf(), kind);
} }
} }
None => {} None => {}
@ -279,7 +284,7 @@ fn register_crate<'a>(e: &mut Env,
// Stash paths for top-most crate locally if necessary. // Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() { let crate_paths = if root.is_none() {
Some(CratePaths { Some(CratePaths {
ident: ident.to_owned(), ident: ident.to_strbuf(),
dylib: lib.dylib.clone(), dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(), rlib: lib.rlib.clone(),
}) })
@ -294,7 +299,7 @@ fn register_crate<'a>(e: &mut Env,
let loader::Library{ dylib, rlib, metadata } = lib; let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata { let cmeta = Rc::new( cstore::crate_metadata {
name: crate_id.name.to_owned(), name: crate_id.name.to_strbuf(),
data: metadata, data: metadata,
cnum_map: cnum_map, cnum_map: cnum_map,
cnum: cnum, cnum: cnum,
@ -328,7 +333,7 @@ fn resolve_crate<'a>(e: &mut Env,
span: span, span: span,
ident: ident, ident: ident,
crate_id: crate_id, crate_id: crate_id,
id_hash: id_hash, id_hash: id_hash.as_slice(),
hash: hash.map(|a| &*a), hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(), filesearch: e.sess.target_filesearch(),
os: config::cfg_os_to_meta_os(e.sess.targ_cfg.os), os: config::cfg_os_to_meta_os(e.sess.targ_cfg.os),
@ -391,9 +396,9 @@ impl<'a> CrateLoader for Loader<'a> {
let mut load_ctxt = loader::Context { let mut load_ctxt = loader::Context {
sess: self.env.sess, sess: self.env.sess,
span: krate.span, span: krate.span,
ident: info.ident, ident: info.ident.as_slice(),
crate_id: &info.crate_id, crate_id: &info.crate_id,
id_hash: id_hash, id_hash: id_hash.as_slice(),
hash: None, hash: None,
filesearch: self.env.sess.host_filesearch(), filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(), triple: driver::host_triple(),

View file

@ -32,7 +32,7 @@ pub struct StaticMethodInfo {
pub vis: ast::Visibility, pub vis: ast::Visibility,
} }
pub fn get_symbol(cstore: &cstore::CStore, def: ast::DefId) -> ~str { pub fn get_symbol(cstore: &cstore::CStore, def: ast::DefId) -> StrBuf {
let cdata = cstore.get_crate_data(def.krate); let cdata = cstore.get_crate_data(def.krate);
decoder::get_symbol(cdata.data(), def.node) decoder::get_symbol(cdata.data(), def.node)
} }
@ -86,7 +86,8 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
// FIXME #1920: This path is not always correct if the crate is not linked // FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace. // into the root namespace.
(vec!(ast_map::PathMod(token::intern(cdata.name)))).append(path.as_slice()) (vec!(ast_map::PathMod(token::intern(cdata.name.as_slice())))).append(
path.as_slice())
} }
pub enum found_ast { pub enum found_ast {
@ -245,7 +246,7 @@ pub fn get_impl_vtables(tcx: &ty::ctxt,
pub fn get_native_libraries(cstore: &cstore::CStore, pub fn get_native_libraries(cstore: &cstore::CStore,
crate_num: ast::CrateNum) crate_num: ast::CrateNum)
-> Vec<(cstore::NativeLibaryKind, ~str)> { -> Vec<(cstore::NativeLibaryKind, StrBuf)> {
let cdata = cstore.get_crate_data(crate_num); let cdata = cstore.get_crate_data(crate_num);
decoder::get_native_libraries(&*cdata) decoder::get_native_libraries(&*cdata)
} }

View file

@ -38,7 +38,7 @@ pub enum MetadataBlob {
} }
pub struct crate_metadata { pub struct crate_metadata {
pub name: ~str, pub name: StrBuf,
pub data: MetadataBlob, pub data: MetadataBlob,
pub cnum_map: cnum_map, pub cnum_map: cnum_map,
pub cnum: ast::CrateNum, pub cnum: ast::CrateNum,
@ -71,8 +71,8 @@ pub struct CStore {
metas: RefCell<HashMap<ast::CrateNum, Rc<crate_metadata>>>, metas: RefCell<HashMap<ast::CrateNum, Rc<crate_metadata>>>,
extern_mod_crate_map: RefCell<extern_mod_crate_map>, extern_mod_crate_map: RefCell<extern_mod_crate_map>,
used_crate_sources: RefCell<Vec<CrateSource>>, used_crate_sources: RefCell<Vec<CrateSource>>,
used_libraries: RefCell<Vec<(~str, NativeLibaryKind)>>, used_libraries: RefCell<Vec<(StrBuf, NativeLibaryKind)>>,
used_link_args: RefCell<Vec<~str>>, used_link_args: RefCell<Vec<StrBuf>>,
pub intr: Rc<IdentInterner>, pub intr: Rc<IdentInterner>,
} }
@ -178,23 +178,23 @@ impl CStore {
libs libs
} }
pub fn add_used_library(&self, lib: ~str, kind: NativeLibaryKind) { pub fn add_used_library(&self, lib: StrBuf, kind: NativeLibaryKind) {
assert!(!lib.is_empty()); assert!(!lib.is_empty());
self.used_libraries.borrow_mut().push((lib, kind)); self.used_libraries.borrow_mut().push((lib, kind));
} }
pub fn get_used_libraries<'a>(&'a self) pub fn get_used_libraries<'a>(&'a self)
-> &'a RefCell<Vec<(~str, NativeLibaryKind)> > { -> &'a RefCell<Vec<(StrBuf, NativeLibaryKind)> > {
&self.used_libraries &self.used_libraries
} }
pub fn add_used_link_args(&self, args: &str) { pub fn add_used_link_args(&self, args: &str) {
for s in args.split(' ') { for s in args.split(' ') {
self.used_link_args.borrow_mut().push(s.to_owned()); self.used_link_args.borrow_mut().push(s.to_strbuf());
} }
} }
pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<~str> > { pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<StrBuf> > {
&self.used_link_args &self.used_link_args
} }

View file

@ -184,8 +184,8 @@ fn item_method_sort(item: ebml::Doc) -> char {
ret ret
} }
fn item_symbol(item: ebml::Doc) -> ~str { fn item_symbol(item: ebml::Doc) -> StrBuf {
reader::get_doc(item, tag_items_data_item_symbol).as_str() reader::get_doc(item, tag_items_data_item_symbol).as_str().to_strbuf()
} }
fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> { fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> {
@ -451,7 +451,7 @@ pub fn get_impl_vtables(cdata: Cmd,
} }
pub fn get_symbol(data: &[u8], id: ast::NodeId) -> ~str { pub fn get_symbol(data: &[u8], id: ast::NodeId) -> StrBuf {
return item_symbol(lookup_item(id, data)); return item_symbol(lookup_item(id, data));
} }
@ -1097,13 +1097,15 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let cratedoc = reader::Doc(data); let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1; let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str { fn docstr(doc: ebml::Doc, tag_: uint) -> StrBuf {
let d = reader::get_doc(doc, tag_); let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_str() d.as_str_slice().to_strbuf()
} }
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
let crate_id = from_str(docstr(depdoc, tag_crate_dep_crateid)).unwrap(); let crate_id =
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)); from_str(docstr(depdoc,
tag_crate_dep_crateid).as_slice()).unwrap();
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_slice());
deps.push(CrateDep { deps.push(CrateDep {
cnum: crate_num, cnum: crate_num,
crate_id: crate_id, crate_id: crate_id,
@ -1144,10 +1146,10 @@ pub fn maybe_get_crate_id(data: &[u8]) -> Option<CrateId> {
}) })
} }
pub fn get_crate_triple(data: &[u8]) -> ~str { pub fn get_crate_triple(data: &[u8]) -> StrBuf {
let cratedoc = reader::Doc(data); let cratedoc = reader::Doc(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple); let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.expect("No triple in crate").as_str() triple_doc.expect("No triple in crate").as_str().to_strbuf()
} }
pub fn get_crate_id(data: &[u8]) -> CrateId { pub fn get_crate_id(data: &[u8]) -> CrateId {
@ -1239,7 +1241,8 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt)
} }
pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)> { pub fn get_native_libraries(cdata: Cmd)
-> Vec<(cstore::NativeLibaryKind, StrBuf)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()), let libraries = reader::get_doc(reader::Doc(cdata.data()),
tag_native_libraries); tag_native_libraries);
let mut result = Vec::new(); let mut result = Vec::new();
@ -1248,7 +1251,7 @@ pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)>
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name); let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
let kind: cstore::NativeLibaryKind = let kind: cstore::NativeLibaryKind =
FromPrimitive::from_u32(reader::doc_as_u32(kind_doc)).unwrap(); FromPrimitive::from_u32(reader::doc_as_u32(kind_doc)).unwrap();
let name = name_doc.as_str(); let name = name_doc.as_str().to_strbuf();
result.push((kind, name)); result.push((kind, name));
true true
}); });
@ -1260,12 +1263,12 @@ pub fn get_macro_registrar_fn(data: &[u8]) -> Option<ast::NodeId> {
.map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap()) .map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap())
} }
pub fn get_exported_macros(data: &[u8]) -> Vec<~str> { pub fn get_exported_macros(data: &[u8]) -> Vec<StrBuf> {
let macros = reader::get_doc(reader::Doc(data), let macros = reader::get_doc(reader::Doc(data),
tag_exported_macros); tag_exported_macros);
let mut result = Vec::new(); let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| { reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
result.push(macro_doc.as_str()); result.push(macro_doc.as_str().to_strbuf());
true true
}); });
result result

View file

@ -70,7 +70,7 @@ pub struct EncodeParams<'a> {
pub diag: &'a SpanHandler, pub diag: &'a SpanHandler,
pub tcx: &'a ty::ctxt, pub tcx: &'a ty::ctxt,
pub reexports2: &'a middle::resolve::ExportMap2, pub reexports2: &'a middle::resolve::ExportMap2,
pub item_symbols: &'a RefCell<NodeMap<~str>>, pub item_symbols: &'a RefCell<NodeMap<StrBuf>>,
pub non_inlineable_statics: &'a RefCell<NodeSet>, pub non_inlineable_statics: &'a RefCell<NodeSet>,
pub link_meta: &'a LinkMeta, pub link_meta: &'a LinkMeta,
pub cstore: &'a cstore::CStore, pub cstore: &'a cstore::CStore,
@ -81,7 +81,7 @@ pub struct EncodeContext<'a> {
pub diag: &'a SpanHandler, pub diag: &'a SpanHandler,
pub tcx: &'a ty::ctxt, pub tcx: &'a ty::ctxt,
pub reexports2: &'a middle::resolve::ExportMap2, pub reexports2: &'a middle::resolve::ExportMap2,
pub item_symbols: &'a RefCell<NodeMap<~str>>, pub item_symbols: &'a RefCell<NodeMap<StrBuf>>,
pub non_inlineable_statics: &'a RefCell<NodeSet>, pub non_inlineable_statics: &'a RefCell<NodeSet>,
pub link_meta: &'a LinkMeta, pub link_meta: &'a LinkMeta,
pub cstore: &'a cstore::CStore, pub cstore: &'a cstore::CStore,
@ -98,7 +98,7 @@ fn encode_impl_type_basename(ebml_w: &mut Encoder, name: Ident) {
} }
pub fn encode_def_id(ebml_w: &mut Encoder, id: DefId) { pub fn encode_def_id(ebml_w: &mut Encoder, id: DefId) {
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id)); ebml_w.wr_tagged_str(tag_def_id, def_to_str(id).as_slice());
} }
#[deriving(Clone)] #[deriving(Clone)]
@ -139,8 +139,8 @@ fn encode_family(ebml_w: &mut Encoder, c: char) {
ebml_w.end_tag(); ebml_w.end_tag();
} }
pub fn def_to_str(did: DefId) -> ~str { pub fn def_to_str(did: DefId) -> StrBuf {
format!("{}:{}", did.krate, did.node) format_strbuf!("{}:{}", did.krate, did.node)
} }
fn encode_ty_type_param_defs(ebml_w: &mut Encoder, fn encode_ty_type_param_defs(ebml_w: &mut Encoder,
@ -170,7 +170,7 @@ fn encode_region_param_defs(ebml_w: &mut Encoder,
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.wr_tagged_str(tag_region_param_def_def_id, ebml_w.wr_tagged_str(tag_region_param_def_def_id,
def_to_str(param.def_id)); def_to_str(param.def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -370,10 +370,12 @@ fn encode_reexported_static_method(ebml_w: &mut Encoder,
exp.name, token::get_ident(method_ident)); exp.name, token::get_ident(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id)); ebml_w.wr_str(def_to_str(method_def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(format!("{}::{}", exp.name, token::get_ident(method_ident))); ebml_w.wr_str(format!("{}::{}",
exp.name,
token::get_ident(method_ident)));
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -447,7 +449,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
// encoded metadata for static methods relative to Bar, // encoded metadata for static methods relative to Bar,
// but not yet for Foo. // but not yet for Foo.
// //
if path_differs || original_name.get() != exp.name { if path_differs || original_name.get() != exp.name.as_slice() {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) { if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) { if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!("(encode reexported static methods) {} \ debug!("(encode reexported static methods) {} \
@ -515,10 +517,10 @@ fn encode_reexports(ecx: &EncodeContext,
id); id);
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id)); ebml_w.wr_str(def_to_str(exp.def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(exp.name); ebml_w.wr_str(exp.name.as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
encode_reexported_static_methods(ecx, ebml_w, path.clone(), exp); encode_reexported_static_methods(ecx, ebml_w, path.clone(), exp);
@ -547,12 +549,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
// Encode info about all the module children. // Encode info about all the module children.
for item in md.items.iter() { for item in md.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id))); ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
each_auxiliary_node_id(*item, |auxiliary_node_id| { each_auxiliary_node_id(*item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id))); ebml_w.wr_str(def_to_str(local_def(
auxiliary_node_id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
true true
}); });
@ -566,7 +569,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
did, ecx.tcx.map.node_to_str(did)); did, ecx.tcx.map.node_to_str(did));
ebml_w.start_tag(tag_mod_impl); ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did))); ebml_w.wr_str(def_to_str(local_def(did)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
_ => {} _ => {}
@ -931,7 +934,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// Encode all the items in this module. // Encode all the items in this module.
for foreign_item in fm.items.iter() { for foreign_item in fm.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(foreign_item.id))); ebml_w.wr_str(def_to_str(local_def(foreign_item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
encode_visibility(ebml_w, vis); encode_visibility(ebml_w, vis);
@ -1111,7 +1114,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(method_def_id)); ebml_w.wr_str(def_to_str(method_def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
encode_path(ebml_w, path.clone()); encode_path(ebml_w, path.clone());
@ -1647,12 +1650,13 @@ fn encode_misc_info(ecx: &EncodeContext,
ebml_w.start_tag(tag_misc_info_crate_items); ebml_w.start_tag(tag_misc_info_crate_items);
for &item in krate.module.items.iter() { for &item in krate.module.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id))); ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
each_auxiliary_node_id(item, |auxiliary_node_id| { each_auxiliary_node_id(item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id))); ebml_w.wr_str(def_to_str(local_def(
auxiliary_node_id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
true true
}); });
@ -1700,11 +1704,11 @@ fn encode_dylib_dependency_formats(ebml_w: &mut Encoder, ecx: &EncodeContext) {
match ecx.tcx.dependency_formats.borrow().find(&config::CrateTypeDylib) { match ecx.tcx.dependency_formats.borrow().find(&config::CrateTypeDylib) {
Some(arr) => { Some(arr) => {
let s = arr.iter().enumerate().filter_map(|(i, slot)| { let s = arr.iter().enumerate().filter_map(|(i, slot)| {
slot.map(|kind| format!("{}:{}", i + 1, match kind { slot.map(|kind| (format!("{}:{}", i + 1, match kind {
cstore::RequireDynamic => "d", cstore::RequireDynamic => "d",
cstore::RequireStatic => "s", cstore::RequireStatic => "s",
})) })).to_strbuf())
}).collect::<Vec<~str>>(); }).collect::<Vec<StrBuf>>();
ebml_w.writer.write(s.connect(",").as_bytes()); ebml_w.writer.write(s.connect(",").as_bytes());
} }
None => {} None => {}
@ -1781,7 +1785,12 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
let mut ebml_w = writer::Encoder(wr); let mut ebml_w = writer::Encoder(wr);
encode_crate_id(&mut ebml_w, &ecx.link_meta.crateid); encode_crate_id(&mut ebml_w, &ecx.link_meta.crateid);
encode_crate_triple(&mut ebml_w, tcx.sess.targ_cfg.target_strs.target_triple); encode_crate_triple(&mut ebml_w,
tcx.sess
.targ_cfg
.target_strs
.target_triple
.as_slice());
encode_hash(&mut ebml_w, &ecx.link_meta.crate_hash); encode_hash(&mut ebml_w, &ecx.link_meta.crate_hash);
encode_dylib_dependency_formats(&mut ebml_w, &ecx); encode_dylib_dependency_formats(&mut ebml_w, &ecx);
@ -1861,7 +1870,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
} }
// Get the encoded string for a type // Get the encoded string for a type
pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str { pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> StrBuf {
let mut wr = MemWriter::new(); let mut wr = MemWriter::new();
tyencode::enc_ty(&mut wr, &tyencode::ctxt { tyencode::enc_ty(&mut wr, &tyencode::ctxt {
diag: tcx.sess.diagnostic(), diag: tcx.sess.diagnostic(),
@ -1869,5 +1878,5 @@ pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str {
tcx: tcx, tcx: tcx,
abbrevs: &RefCell::new(HashMap::new()) abbrevs: &RefCell::new(HashMap::new())
}, t); }, t);
str::from_utf8_owned(wr.get_ref().to_owned()).unwrap() str::from_utf8_owned(wr.get_ref().to_owned()).unwrap().to_strbuf()
} }

View file

@ -186,8 +186,8 @@ static PATH_ENTRY_SEPARATOR: &'static str = ";";
static PATH_ENTRY_SEPARATOR: &'static str = ":"; static PATH_ENTRY_SEPARATOR: &'static str = ":";
/// Returns RUST_PATH as a string, without default paths added /// Returns RUST_PATH as a string, without default paths added
pub fn get_rust_path() -> Option<~str> { pub fn get_rust_path() -> Option<StrBuf> {
os::getenv("RUST_PATH") os::getenv("RUST_PATH").map(|x| x.to_strbuf())
} }
/// Returns the value of RUST_PATH, as a list /// Returns the value of RUST_PATH, as a list
@ -199,7 +199,7 @@ pub fn rust_path() -> Vec<Path> {
let mut env_rust_path: Vec<Path> = match get_rust_path() { let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => { Some(env_path) => {
let env_path_components = let env_path_components =
env_path.split_str(PATH_ENTRY_SEPARATOR); env_path.as_slice().split_str(PATH_ENTRY_SEPARATOR);
env_path_components.map(|s| Path::new(s)).collect() env_path_components.map(|s| Path::new(s)).collect()
} }
None => Vec::new() None => Vec::new()
@ -236,7 +236,7 @@ pub fn rust_path() -> Vec<Path> {
// The name of the directory rustc expects libraries to be located. // The name of the directory rustc expects libraries to be located.
// On Unix should be "lib", on windows "bin" // On Unix should be "lib", on windows "bin"
#[cfg(unix)] #[cfg(unix)]
fn find_libdir(sysroot: &Path) -> ~str { fn find_libdir(sysroot: &Path) -> StrBuf {
// FIXME: This is a quick hack to make the rustc binary able to locate // FIXME: This is a quick hack to make the rustc binary able to locate
// Rust libraries in Linux environments where libraries might be installed // Rust libraries in Linux environments where libraries might be installed
// to lib64/lib32. This would be more foolproof by basing the sysroot off // to lib64/lib32. This would be more foolproof by basing the sysroot off
@ -250,21 +250,27 @@ fn find_libdir(sysroot: &Path) -> ~str {
} }
#[cfg(target_word_size = "64")] #[cfg(target_word_size = "64")]
fn primary_libdir_name() -> ~str { "lib64".to_owned() } fn primary_libdir_name() -> StrBuf {
"lib64".to_strbuf()
}
#[cfg(target_word_size = "32")] #[cfg(target_word_size = "32")]
fn primary_libdir_name() -> ~str { "lib32".to_owned() } fn primary_libdir_name() -> StrBuf {
"lib32".to_strbuf()
}
fn secondary_libdir_name() -> ~str { "lib".to_owned() } fn secondary_libdir_name() -> StrBuf {
"lib".to_strbuf()
}
} }
#[cfg(windows)] #[cfg(windows)]
fn find_libdir(_sysroot: &Path) -> ~str { fn find_libdir(_sysroot: &Path) -> StrBuf {
"bin".to_owned() "bin".to_strbuf()
} }
// The name of rustc's own place to organize libraries. // The name of rustc's own place to organize libraries.
// Used to be "rustc", now the default is "rustlib" // Used to be "rustc", now the default is "rustlib"
pub fn rustlibdir() -> ~str { pub fn rustlibdir() -> StrBuf {
"rustlib".to_owned() "rustlib".to_strbuf()
} }

View file

@ -61,7 +61,7 @@ pub enum Os {
pub struct CrateMismatch { pub struct CrateMismatch {
path: Path, path: Path,
got: ~str, got: StrBuf,
} }
pub struct Context<'a> { pub struct Context<'a> {
@ -92,7 +92,7 @@ pub struct ArchiveMetadata {
} }
pub struct CratePaths { pub struct CratePaths {
pub ident: ~str, pub ident: StrBuf,
pub dylib: Option<Path>, pub dylib: Option<Path>,
pub rlib: Option<Path> pub rlib: Option<Path>
} }
@ -305,7 +305,7 @@ impl<'a> Context<'a> {
// //
// If everything checks out, then `Some(hash)` is returned where `hash` is // If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself. // the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{ fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len()); let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle); debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1); let mut parts = middle.splitn('-', 1);
@ -319,13 +319,13 @@ impl<'a> Context<'a> {
Some(..) => {} // check the hash Some(..) => {} // check the hash
// hash is irrelevant, no version specified // hash is irrelevant, no version specified
None => return Some(hash.to_owned()) None => return Some(hash.to_strbuf())
} }
debug!("matching -- {}, vers ok", file); debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash" // hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() { if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file); debug!("matching -- {}, hash ok", file);
Some(hash.to_owned()) Some(hash.to_strbuf())
} else { } else {
None None
} }
@ -410,8 +410,10 @@ impl<'a> Context<'a> {
let triple = decoder::get_crate_triple(crate_data); let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice() != self.triple { if triple.as_slice() != self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple); info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(), self.rejected_via_triple.push(CrateMismatch {
got: triple.to_owned() }); path: libpath.clone(),
got: triple.to_strbuf()
});
return false; return false;
} }
@ -420,8 +422,10 @@ impl<'a> Context<'a> {
Some(myhash) => { Some(myhash) => {
if *myhash != hash { if *myhash != hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash); info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(), self.rejected_via_hash.push(CrateMismatch {
got: myhash.as_str().to_owned() }); path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false false
} else { } else {
true true
@ -481,7 +485,7 @@ impl ArchiveMetadata {
} }
// Just a small wrapper to time how long reading metadata takes. // Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> { fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
let start = time::precise_time_ns(); let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename); let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(), info!("reading {} => {}ms", filename.filename_display(),
@ -489,9 +493,9 @@ fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
return ret; return ret;
} }
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> { fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if !filename.exists() { if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display())); return Err(format_strbuf!("no such file: '{}'", filename.display()));
} }
if filename.filename_str().unwrap().ends_with(".rlib") { if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
@ -501,13 +505,17 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
Some(ar) => ar, Some(ar) => ar,
None => { None => {
debug!("llvm didn't like `{}`", filename.display()); debug!("llvm didn't like `{}`", filename.display());
return Err(format!("failed to read rlib metadata: '{}'", return Err(format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display())); filename.display()));
} }
}; };
return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) { return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) {
None => return Err(format!("failed to read rlib metadata: '{}'", None => {
filename.display())), return Err((format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display())))
}
Some(blob) => return Ok(blob) Some(blob) => return Ok(blob)
} }
} }
@ -516,11 +524,16 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf) llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)
}); });
if mb as int == 0 { if mb as int == 0 {
return Err(format!("error reading library: '{}'",filename.display())) return Err(format_strbuf!("error reading library: '{}'",
filename.display()))
} }
let of = match ObjectFile::new(mb) { let of = match ObjectFile::new(mb) {
Some(of) => of, Some(of) => of,
_ => return Err(format!("provided path not an object file: '{}'", filename.display())) _ => {
return Err((format_strbuf!("provided path not an object \
file: '{}'",
filename.display())))
}
}; };
let si = mk_section_iter(of.llof); let si = mk_section_iter(of.llof);
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
@ -531,7 +544,9 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
if read_meta_section_name(os) == name { if read_meta_section_name(os) == name {
let cbuf = llvm::LLVMGetSectionContents(si.llsi); let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint; let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let mut found = Err(format!("metadata not found: '{}'", filename.display())); let mut found =
Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
let cvbuf: *u8 = mem::transmute(cbuf); let cvbuf: *u8 = mem::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len(); let vlen = encoder::metadata_encoding_version.len();
debug!("checking {} bytes of metadata-version stamp", debug!("checking {} bytes of metadata-version stamp",
@ -539,8 +554,11 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
let minsz = cmp::min(vlen, csz); let minsz = cmp::min(vlen, csz);
let version_ok = slice::raw::buf_as_slice(cvbuf, minsz, let version_ok = slice::raw::buf_as_slice(cvbuf, minsz,
|buf0| buf0 == encoder::metadata_encoding_version); |buf0| buf0 == encoder::metadata_encoding_version);
if !version_ok { return Err(format!("incompatible metadata version found: '{}'", if !version_ok {
filename.display())); } return Err((format_strbuf!("incompatible metadata \
version found: '{}'",
filename.display())));
}
let cvbuf1 = cvbuf.offset(vlen as int); let cvbuf1 = cvbuf.offset(vlen as int);
debug!("inflating {} bytes of compressed metadata", debug!("inflating {} bytes of compressed metadata",
@ -548,9 +566,13 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
slice::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| { slice::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| {
match flate::inflate_bytes(bytes) { match flate::inflate_bytes(bytes) {
Some(inflated) => found = Ok(MetadataVec(inflated)), Some(inflated) => found = Ok(MetadataVec(inflated)),
None => found = Err(format!("failed to decompress metadata for: '{}'", None => {
found =
Err(format_strbuf!("failed to decompress \
metadata for: '{}'",
filename.display())) filename.display()))
} }
}
}); });
if found.is_ok() { if found.is_ok() {
return found; return found;
@ -558,7 +580,8 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
} }
llvm::LLVMMoveToNextSection(si.llsi); llvm::LLVMMoveToNextSection(si.llsi);
} }
return Err(format!("metadata not found: '{}'", filename.display())); return Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
} }
} }

View file

@ -201,7 +201,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion {
} }
'[' => { '[' => {
let def = parse_def(st, RegionParameter, |x,y| conv(x,y)); let def = parse_def(st, RegionParameter, |x,y| conv(x,y));
let ident = token::str_to_ident(parse_str(st, ']')); let ident = token::str_to_ident(parse_str(st, ']').as_slice());
ty::BrNamed(def, ident.name) ty::BrNamed(def, ident.name)
} }
'f' => { 'f' => {
@ -229,7 +229,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
assert_eq!(next(st), '|'); assert_eq!(next(st), '|');
let index = parse_uint(st); let index = parse_uint(st);
assert_eq!(next(st), '|'); assert_eq!(next(st), '|');
let nm = token::str_to_ident(parse_str(st, ']')); let nm = token::str_to_ident(parse_str(st, ']').as_slice());
ty::ReEarlyBound(node_id, index, nm.name) ty::ReEarlyBound(node_id, index, nm.name)
} }
'f' => { 'f' => {
@ -264,7 +264,7 @@ fn parse_opt<T>(st: &mut PState, f: |&mut PState| -> T) -> Option<T> {
} }
} }
fn parse_str(st: &mut PState, term: char) -> ~str { fn parse_str(st: &mut PState, term: char) -> StrBuf {
let mut result = StrBuf::new(); let mut result = StrBuf::new();
while peek(st) != term { while peek(st) != term {
unsafe { unsafe {
@ -272,7 +272,7 @@ fn parse_str(st: &mut PState, term: char) -> ~str {
} }
} }
next(st); next(st);
return result.into_owned(); result
} }
fn parse_trait_ref(st: &mut PState, conv: conv_did) -> ty::TraitRef { fn parse_trait_ref(st: &mut PState, conv: conv_did) -> ty::TraitRef {

View file

@ -35,7 +35,7 @@ macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
pub struct ctxt<'a> { pub struct ctxt<'a> {
pub diag: &'a SpanHandler, pub diag: &'a SpanHandler,
// Def -> str Callback: // Def -> str Callback:
pub ds: fn(DefId) -> ~str, pub ds: fn(DefId) -> StrBuf,
// The type context. // The type context.
pub tcx: &'a ty::ctxt, pub tcx: &'a ty::ctxt,
pub abbrevs: &'a abbrev_map pub abbrevs: &'a abbrev_map
@ -47,7 +47,7 @@ pub struct ctxt<'a> {
pub struct ty_abbrev { pub struct ty_abbrev {
pos: uint, pos: uint,
len: uint, len: uint,
s: ~str s: StrBuf
} }
pub type abbrev_map = RefCell<HashMap<ty::t, ty_abbrev>>; pub type abbrev_map = RefCell<HashMap<ty::t, ty_abbrev>>;
@ -77,7 +77,7 @@ pub fn enc_ty(w: &mut MemWriter, cx: &ctxt, t: ty::t) {
cx.abbrevs.borrow_mut().insert(t, ty_abbrev { cx.abbrevs.borrow_mut().insert(t, ty_abbrev {
pos: pos as uint, pos: pos as uint,
len: len as uint, len: len as uint,
s: format!("\\#{:x}:{:x}\\#", pos, len) s: format_strbuf!("\\#{:x}:{:x}\\#", pos, len)
}); });
} }
} }

View file

@ -1152,12 +1152,12 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
Ok(ty) Ok(ty)
}).unwrap(); }).unwrap();
fn type_string(doc: ebml::Doc) -> ~str { fn type_string(doc: ebml::Doc) -> StrBuf {
let mut str = StrBuf::new(); let mut str = StrBuf::new();
for i in range(doc.start, doc.end) { for i in range(doc.start, doc.end) {
str.push_char(doc.data[i] as char); str.push_char(doc.data[i] as char);
} }
str.into_owned() str
} }
} }

View file

@ -98,10 +98,10 @@ pub fn check_crate(tcx: &ty::ctxt,
make_stat(&bccx, bccx.stats.stable_paths.get())); make_stat(&bccx, bccx.stats.stable_paths.get()));
} }
fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> ~str { fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> StrBuf {
let stat_f = stat as f64; let stat_f = stat as f64;
let total = bccx.stats.guaranteed_paths.get() as f64; let total = bccx.stats.guaranteed_paths.get() as f64;
format!("{} ({:.0f}%)", stat , stat_f * 100.0 / total) format_strbuf!("{} ({:.0f}%)", stat , stat_f * 100.0 / total)
} }
} }
@ -303,8 +303,8 @@ impl BitAnd<RestrictionSet,RestrictionSet> for RestrictionSet {
} }
impl Repr for RestrictionSet { impl Repr for RestrictionSet {
fn repr(&self, _tcx: &ty::ctxt) -> ~str { fn repr(&self, _tcx: &ty::ctxt) -> StrBuf {
format!("RestrictionSet(0x{:x})", self.bits as uint) format_strbuf!("RestrictionSet(0x{:x})", self.bits as uint)
} }
} }
@ -447,7 +447,7 @@ impl<'a> BorrowckCtxt<'a> {
pub fn report(&self, err: BckError) { pub fn report(&self, err: BckError) {
self.span_err( self.span_err(
err.span, err.span,
self.bckerr_to_str(&err)); self.bckerr_to_str(&err).as_slice());
self.note_and_explain_bckerr(err); self.note_and_explain_bckerr(err);
} }
@ -572,28 +572,32 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_end_note(s, m); self.tcx.sess.span_end_note(s, m);
} }
pub fn bckerr_to_str(&self, err: &BckError) -> ~str { pub fn bckerr_to_str(&self, err: &BckError) -> StrBuf {
match err.code { match err.code {
err_mutbl => { err_mutbl => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
None => format!("{} {}", None => {
format_strbuf!("{} {}",
err.cmt.mutbl.to_user_str(), err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt)), self.cmt_to_str(&*err.cmt))
Some(lp) => format!("{} {} `{}`", }
Some(lp) => {
format_strbuf!("{} {} `{}`",
err.cmt.mutbl.to_user_str(), err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt), self.cmt_to_str(&*err.cmt),
self.loan_path_to_str(&*lp)), self.loan_path_to_str(&*lp))
}
}; };
match err.cause { match err.cause {
euv::ClosureCapture(_) => { euv::ClosureCapture(_) => {
format!("closure cannot assign to {}", descr) format_strbuf!("closure cannot assign to {}", descr)
} }
euv::OverloadedOperator | euv::OverloadedOperator |
euv::AddrOf | euv::AddrOf |
euv::RefBinding | euv::RefBinding |
euv::AutoRef => { euv::AutoRef => {
format!("cannot borrow {} as mutable", descr) format_strbuf!("cannot borrow {} as mutable", descr)
} }
euv::ClosureInvocation => { euv::ClosureInvocation => {
self.tcx.sess.span_bug(err.span, self.tcx.sess.span_bug(err.span,
@ -603,18 +607,22 @@ impl<'a> BorrowckCtxt<'a> {
} }
err_out_of_scope(..) => { err_out_of_scope(..) => {
let msg = match opt_loan_path(&err.cmt) { let msg = match opt_loan_path(&err.cmt) {
None => format!("borrowed value"), None => "borrowed value".to_strbuf(),
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)), Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
}; };
format!("{} does not live long enough", msg) format_strbuf!("{} does not live long enough", msg)
} }
err_borrowed_pointer_too_short(..) => { err_borrowed_pointer_too_short(..) => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)), Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
None => self.cmt_to_str(&*err.cmt), None => self.cmt_to_str(&*err.cmt),
}; };
format!("lifetime of {} is too short to guarantee \ format_strbuf!("lifetime of {} is too short to guarantee \
its contents can be safely reborrowed", its contents can be safely reborrowed",
descr) descr)
} }
@ -655,7 +663,8 @@ impl<'a> BorrowckCtxt<'a> {
mc::AliasableOther => { mc::AliasableOther => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("{} in an aliasable location", prefix)); format!("{} in an aliasable location",
prefix));
} }
mc::AliasableStatic(..) | mc::AliasableStatic(..) |
mc::AliasableStaticMut(..) => { mc::AliasableStaticMut(..) => {
@ -696,7 +705,9 @@ impl<'a> BorrowckCtxt<'a> {
err_borrowed_pointer_too_short(loan_scope, ptr_scope, _) => { err_borrowed_pointer_too_short(loan_scope, ptr_scope, _) => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)), Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
None => self.cmt_to_str(&*err.cmt), None => self.cmt_to_str(&*err.cmt),
}; };
note_and_explain_region( note_and_explain_region(
@ -764,13 +775,13 @@ impl<'a> BorrowckCtxt<'a> {
} }
} }
pub fn loan_path_to_str(&self, loan_path: &LoanPath) -> ~str { pub fn loan_path_to_str(&self, loan_path: &LoanPath) -> StrBuf {
let mut result = StrBuf::new(); let mut result = StrBuf::new();
self.append_loan_path_to_str(loan_path, &mut result); self.append_loan_path_to_str(loan_path, &mut result);
result.into_owned() result
} }
pub fn cmt_to_str(&self, cmt: &mc::cmt_) -> ~str { pub fn cmt_to_str(&self, cmt: &mc::cmt_) -> StrBuf {
self.mc().cmt_to_str(cmt) self.mc().cmt_to_str(cmt)
} }
} }
@ -788,38 +799,40 @@ impl DataFlowOperator for LoanDataFlowOperator {
} }
impl Repr for Loan { impl Repr for Loan {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("Loan_{:?}({}, {:?}, {:?}-{:?}, {})", (format!("Loan_{:?}({}, {:?}, {:?}-{:?}, {})",
self.index, self.index,
self.loan_path.repr(tcx), self.loan_path.repr(tcx),
self.kind, self.kind,
self.gen_scope, self.gen_scope,
self.kill_scope, self.kill_scope,
self.restrictions.repr(tcx)) self.restrictions.repr(tcx))).to_strbuf()
} }
} }
impl Repr for Restriction { impl Repr for Restriction {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("Restriction({}, {:x})", (format!("Restriction({}, {:x})",
self.loan_path.repr(tcx), self.loan_path.repr(tcx),
self.set.bits as uint) self.set.bits as uint)).to_strbuf()
} }
} }
impl Repr for LoanPath { impl Repr for LoanPath {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match self { match self {
&LpVar(id) => { &LpVar(id) => {
format!("$({})", tcx.map.node_to_str(id)) (format!("$({})", tcx.map.node_to_str(id))).to_strbuf()
} }
&LpExtend(ref lp, _, LpDeref(_)) => { &LpExtend(ref lp, _, LpDeref(_)) => {
format!("{}.*", lp.repr(tcx)) (format!("{}.*", lp.repr(tcx))).to_strbuf()
} }
&LpExtend(ref lp, _, LpInterior(ref interior)) => { &LpExtend(ref lp, _, LpInterior(ref interior)) => {
format!("{}.{}", lp.repr(tcx), interior.repr(tcx)) (format!("{}.{}",
lp.repr(tcx),
interior.repr(tcx))).to_strbuf()
} }
} }
} }

View file

@ -102,9 +102,12 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr, is_const: bool) {
ExprCast(_, _) => { ExprCast(_, _) => {
let ety = ty::expr_ty(v.tcx, e); let ety = ty::expr_ty(v.tcx, e);
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) { if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
v.tcx.sess.span_err(e.span, "can not cast to `".to_owned() + v.tcx
ppaux::ty_to_str(v.tcx, ety) + .sess
"` in a constant expression"); .span_err(e.span,
format!("can not cast to `{}` in a constant \
expression",
ppaux::ty_to_str(v.tcx, ety).as_slice()))
} }
} }
ExprPath(ref pth) => { ExprPath(ref pth) => {

View file

@ -33,7 +33,7 @@ use syntax::visit;
use syntax::print::pprust; use syntax::print::pprust;
fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<~str> { fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<StrBuf> {
let node_ty = ty::node_id_to_type(cx, e.id); let node_ty = ty::node_id_to_type(cx, e.id);
let tcontents = ty::type_contents(cx, node_ty); let tcontents = ty::type_contents(cx, node_ty);
debug!("safe_type_for_static_mut(dtor={}, managed={}, owned={})", debug!("safe_type_for_static_mut(dtor={}, managed={}, owned={})",
@ -49,7 +49,8 @@ fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<~str> {
return None; return None;
}; };
Some(format!("mutable static items are not allowed to have {}", suffix)) Some(format_strbuf!("mutable static items are not allowed to have {}",
suffix))
} }
struct CheckStaticVisitor<'a> { struct CheckStaticVisitor<'a> {
@ -61,11 +62,11 @@ pub fn check_crate(tcx: &ty::ctxt, krate: &ast::Crate) {
} }
impl<'a> CheckStaticVisitor<'a> { impl<'a> CheckStaticVisitor<'a> {
fn report_error(&self, span: Span, result: Option<~str>) -> bool { fn report_error(&self, span: Span, result: Option<StrBuf>) -> bool {
match result { match result {
None => { false } None => { false }
Some(msg) => { Some(msg) => {
self.tcx.sess.span_err(span, msg); self.tcx.sess.span_err(span, msg.as_slice());
true true
} }
} }
@ -132,7 +133,8 @@ impl<'a> Visitor<bool> for CheckStaticVisitor<'a> {
ty::ty_enum(did, _) => { ty::ty_enum(did, _) => {
if ty::has_dtor(self.tcx, did) { if ty::has_dtor(self.tcx, did) {
self.report_error(e.span, self.report_error(e.span,
Some("static items are not allowed to have destructors".to_owned())); Some("static items are not allowed to have \
destructors".to_strbuf()));
return; return;
} }
} }

View file

@ -295,21 +295,21 @@ pub enum const_val {
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
match eval_const_expr_partial(tcx, e) { match eval_const_expr_partial(tcx, e) {
Ok(r) => r, Ok(r) => r,
Err(s) => tcx.sess.span_fatal(e.span, s) Err(s) => tcx.sess.span_fatal(e.span, s.as_slice())
} }
} }
pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr) pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
-> Result<const_val, ~str> { -> Result<const_val, StrBuf> {
fn fromb(b: bool) -> Result<const_val, ~str> { Ok(const_int(b as i64)) } fn fromb(b: bool) -> Result<const_val, StrBuf> { Ok(const_int(b as i64)) }
match e.node { match e.node {
ExprUnary(UnNeg, inner) => { ExprUnary(UnNeg, inner) => {
match eval_const_expr_partial(tcx, inner) { match eval_const_expr_partial(tcx, inner) {
Ok(const_float(f)) => Ok(const_float(-f)), Ok(const_float(f)) => Ok(const_float(-f)),
Ok(const_int(i)) => Ok(const_int(-i)), Ok(const_int(i)) => Ok(const_int(-i)),
Ok(const_uint(i)) => Ok(const_uint(-i)), Ok(const_uint(i)) => Ok(const_uint(-i)),
Ok(const_str(_)) => Err("negate on string".to_owned()), Ok(const_str(_)) => Err("negate on string".to_strbuf()),
Ok(const_bool(_)) => Err("negate on boolean".to_owned()), Ok(const_bool(_)) => Err("negate on boolean".to_strbuf()),
ref err => ((*err).clone()) ref err => ((*err).clone())
} }
} }
@ -318,7 +318,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
Ok(const_int(i)) => Ok(const_int(!i)), Ok(const_int(i)) => Ok(const_int(!i)),
Ok(const_uint(i)) => Ok(const_uint(!i)), Ok(const_uint(i)) => Ok(const_uint(!i)),
Ok(const_bool(b)) => Ok(const_bool(!b)), Ok(const_bool(b)) => Ok(const_bool(!b)),
_ => Err("not on float or string".to_owned()) _ => Err("not on float or string".to_strbuf())
} }
} }
ExprBinary(op, a, b) => { ExprBinary(op, a, b) => {
@ -337,7 +337,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiNe => fromb(a != b), BiNe => fromb(a != b),
BiGe => fromb(a >= b), BiGe => fromb(a >= b),
BiGt => fromb(a > b), BiGt => fromb(a > b),
_ => Err("can't do this op on floats".to_owned()) _ => Err("can't do this op on floats".to_strbuf())
} }
} }
(Ok(const_int(a)), Ok(const_int(b))) => { (Ok(const_int(a)), Ok(const_int(b))) => {
@ -345,9 +345,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiAdd => Ok(const_int(a + b)), BiAdd => Ok(const_int(a + b)),
BiSub => Ok(const_int(a - b)), BiSub => Ok(const_int(a - b)),
BiMul => Ok(const_int(a * b)), BiMul => Ok(const_int(a * b)),
BiDiv if b == 0 => Err("attempted to divide by zero".to_owned()), BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
}
BiDiv => Ok(const_int(a / b)), BiDiv => Ok(const_int(a / b)),
BiRem if b == 0 => Err("attempted remainder with a divisor of zero".to_owned()), BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
}
BiRem => Ok(const_int(a % b)), BiRem => Ok(const_int(a % b)),
BiAnd | BiBitAnd => Ok(const_int(a & b)), BiAnd | BiBitAnd => Ok(const_int(a & b)),
BiOr | BiBitOr => Ok(const_int(a | b)), BiOr | BiBitOr => Ok(const_int(a | b)),
@ -367,9 +372,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiAdd => Ok(const_uint(a + b)), BiAdd => Ok(const_uint(a + b)),
BiSub => Ok(const_uint(a - b)), BiSub => Ok(const_uint(a - b)),
BiMul => Ok(const_uint(a * b)), BiMul => Ok(const_uint(a * b)),
BiDiv if b == 0 => Err("attempted to divide by zero".to_owned()), BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
}
BiDiv => Ok(const_uint(a / b)), BiDiv => Ok(const_uint(a / b)),
BiRem if b == 0 => Err("attempted remainder with a divisor of zero".to_owned()), BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
}
BiRem => Ok(const_uint(a % b)), BiRem => Ok(const_uint(a % b)),
BiAnd | BiBitAnd => Ok(const_uint(a & b)), BiAnd | BiBitAnd => Ok(const_uint(a & b)),
BiOr | BiBitOr => Ok(const_uint(a | b)), BiOr | BiBitOr => Ok(const_uint(a | b)),
@ -389,14 +399,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
match op { match op {
BiShl => Ok(const_int(a << b)), BiShl => Ok(const_int(a << b)),
BiShr => Ok(const_int(a >> b)), BiShr => Ok(const_int(a >> b)),
_ => Err("can't do this op on an int and uint".to_owned()) _ => Err("can't do this op on an int and uint".to_strbuf())
} }
} }
(Ok(const_uint(a)), Ok(const_int(b))) => { (Ok(const_uint(a)), Ok(const_int(b))) => {
match op { match op {
BiShl => Ok(const_uint(a << b)), BiShl => Ok(const_uint(a << b)),
BiShr => Ok(const_uint(a >> b)), BiShr => Ok(const_uint(a >> b)),
_ => Err("can't do this op on a uint and int".to_owned()) _ => Err("can't do this op on a uint and int".to_strbuf())
} }
} }
(Ok(const_bool(a)), Ok(const_bool(b))) => { (Ok(const_bool(a)), Ok(const_bool(b))) => {
@ -408,10 +418,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiBitOr => a | b, BiBitOr => a | b,
BiEq => a == b, BiEq => a == b,
BiNe => a != b, BiNe => a != b,
_ => return Err("can't do this op on bools".to_owned()) _ => return Err("can't do this op on bools".to_strbuf())
})) }))
} }
_ => Err("bad operands for binary".to_owned()) _ => Err("bad operands for binary".to_strbuf())
} }
} }
ExprCast(base, target_ty) => { ExprCast(base, target_ty) => {
@ -435,7 +445,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_float(u as f64)), const_uint(u) => Ok(const_float(u as f64)),
const_int(i) => Ok(const_float(i as f64)), const_int(i) => Ok(const_float(i as f64)),
const_float(f) => Ok(const_float(f)), const_float(f) => Ok(const_float(f)),
_ => Err("can't cast float to str".to_owned()), _ => Err("can't cast float to str".to_strbuf()),
} }
} }
ty::ty_uint(_) => { ty::ty_uint(_) => {
@ -443,7 +453,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_uint(u)), const_uint(u) => Ok(const_uint(u)),
const_int(i) => Ok(const_uint(i as u64)), const_int(i) => Ok(const_uint(i as u64)),
const_float(f) => Ok(const_uint(f as u64)), const_float(f) => Ok(const_uint(f as u64)),
_ => Err("can't cast str to uint".to_owned()), _ => Err("can't cast str to uint".to_strbuf()),
} }
} }
ty::ty_int(_) | ty::ty_bool => { ty::ty_int(_) | ty::ty_bool => {
@ -451,10 +461,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_int(u as i64)), const_uint(u) => Ok(const_int(u as i64)),
const_int(i) => Ok(const_int(i)), const_int(i) => Ok(const_int(i)),
const_float(f) => Ok(const_int(f as i64)), const_float(f) => Ok(const_int(f as i64)),
_ => Err("can't cast str to int".to_owned()), _ => Err("can't cast str to int".to_strbuf()),
} }
} }
_ => Err("can't cast this type".to_owned()) _ => Err("can't cast this type".to_strbuf())
} }
} }
} }
@ -462,14 +472,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
ExprPath(_) => { ExprPath(_) => {
match lookup_const(tcx.ty_ctxt(), e) { match lookup_const(tcx.ty_ctxt(), e) {
Some(actual_e) => eval_const_expr_partial(tcx.ty_ctxt(), actual_e), Some(actual_e) => eval_const_expr_partial(tcx.ty_ctxt(), actual_e),
None => Err("non-constant path in constant expr".to_owned()) None => Err("non-constant path in constant expr".to_strbuf())
} }
} }
ExprLit(lit) => Ok(lit_to_const(lit)), ExprLit(lit) => Ok(lit_to_const(lit)),
// If we have a vstore, just keep going; it has to be a string // If we have a vstore, just keep going; it has to be a string
ExprVstore(e, _) => eval_const_expr_partial(tcx, e), ExprVstore(e, _) => eval_const_expr_partial(tcx, e),
ExprParen(e) => eval_const_expr_partial(tcx, e), ExprParen(e) => eval_const_expr_partial(tcx, e),
_ => Err("unsupported constant expr".to_owned()) _ => Err("unsupported constant expr".to_strbuf())
} }
} }

View file

@ -112,8 +112,11 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
"".to_owned() "".to_owned()
}; };
try!(ps.synth_comment((format!("id {}: {}{}{}", id, entry_str, try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}",
gens_str, kills_str)).to_strbuf())); id,
entry_str,
gens_str,
kills_str)));
try!(pp::space(&mut ps.s)); try!(pp::space(&mut ps.s));
} }
Ok(()) Ok(())
@ -824,11 +827,11 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
} }
} }
fn mut_bits_to_str(words: &mut [uint]) -> ~str { fn mut_bits_to_str(words: &mut [uint]) -> StrBuf {
bits_to_str(words) bits_to_str(words)
} }
fn bits_to_str(words: &[uint]) -> ~str { fn bits_to_str(words: &[uint]) -> StrBuf {
let mut result = StrBuf::new(); let mut result = StrBuf::new();
let mut sep = '['; let mut sep = '[';
@ -844,7 +847,7 @@ fn bits_to_str(words: &[uint]) -> ~str {
} }
} }
result.push_char(']'); result.push_char(']');
return result.into_owned(); return result
} }
fn copy_bits(in_vec: &[uint], out_vec: &mut [uint]) -> bool { fn copy_bits(in_vec: &[uint], out_vec: &mut [uint]) -> bool {
@ -884,8 +887,8 @@ fn set_bit(words: &mut [uint], bit: uint) -> bool {
oldv != newv oldv != newv
} }
fn bit_str(bit: uint) -> ~str { fn bit_str(bit: uint) -> StrBuf {
let byte = bit >> 8; let byte = bit >> 8;
let lobits = 1 << (bit & 0xFF); let lobits = 1 << (bit & 0xFF);
format!("[{}:{}-{:02x}]", bit, byte, lobits) format_strbuf!("[{}:{}-{:02x}]", bit, byte, lobits)
} }

View file

@ -352,10 +352,16 @@ impl<'a> DeadVisitor<'a> {
false false
} }
fn warn_dead_code(&mut self, id: ast::NodeId, fn warn_dead_code(&mut self,
span: codemap::Span, ident: ast::Ident) { id: ast::NodeId,
self.tcx.sess.add_lint(DeadCode, id, span, span: codemap::Span,
format!("code is never used: `{}`", ident: ast::Ident) {
self.tcx
.sess
.add_lint(DeadCode,
id,
span,
format_strbuf!("code is never used: `{}`",
token::get_ident(ident))); token::get_ident(ident)));
} }
} }

View file

@ -562,7 +562,7 @@ pub fn check_cast_for_escaping_regions(
} }
// Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound). // Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound).
fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) { fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: StrBuf, sp: Span) {
if !ty::type_is_sized(tcx, ty) { if !ty::type_is_sized(tcx, ty) {
tcx.sess.span_err(sp, format!("variable `{}` has dynamically sized type `{}`", tcx.sess.span_err(sp, format!("variable `{}` has dynamically sized type `{}`",
name, ty_to_str(tcx, ty))); name, ty_to_str(tcx, ty)));
@ -572,8 +572,8 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) {
// Check that any variables in a pattern have types with statically known size. // Check that any variables in a pattern have types with statically known size.
fn check_pat(cx: &mut Context, pat: &Pat) { fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node { let var_name = match pat.node {
PatWild => Some("_".to_owned()), PatWild => Some("_".to_strbuf()),
PatIdent(_, ref path, _) => Some(path_to_str(path).to_owned()), PatIdent(_, ref path, _) => Some(path_to_str(path).to_strbuf()),
_ => None _ => None
}; };

View file

@ -71,11 +71,11 @@ impl LanguageItems {
} }
} }
pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> { pub fn require(&self, it: LangItem) -> Result<ast::DefId, StrBuf> {
match self.items.get(it as uint) { match self.items.get(it as uint) {
&Some(id) => Ok(id), &Some(id) => Ok(id),
&None => { &None => {
Err(format!("requires `{}` lang_item", Err(format_strbuf!("requires `{}` lang_item",
LanguageItems::item_name(it as uint))) LanguageItems::item_name(it as uint)))
} }
} }

View file

@ -1830,7 +1830,7 @@ impl<'a> IdVisitingOperation for Context<'a> {
None => {} None => {}
Some(l) => { Some(l) => {
for (lint, span, msg) in l.move_iter() { for (lint, span, msg) in l.move_iter() {
self.span_lint(lint, span, msg) self.span_lint(lint, span, msg.as_slice())
} }
} }
} }

View file

@ -150,13 +150,19 @@ enum LiveNodeKind {
ExitNode ExitNode
} }
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> ~str { fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> StrBuf {
let cm = cx.sess.codemap(); let cm = cx.sess.codemap();
match lnk { match lnk {
FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_str(s)), FreeVarNode(s) => {
ExprNode(s) => format!("Expr node [{}]", cm.span_to_str(s)), format_strbuf!("Free var node [{}]", cm.span_to_str(s))
VarDefNode(s) => format!("Var def node [{}]", cm.span_to_str(s)), }
ExitNode => "Exit node".to_owned() ExprNode(s) => {
format_strbuf!("Expr node [{}]", cm.span_to_str(s))
}
VarDefNode(s) => {
format_strbuf!("Var def node [{}]", cm.span_to_str(s))
}
ExitNode => "Exit node".to_strbuf(),
} }
} }
@ -308,18 +314,20 @@ impl<'a> IrMaps<'a> {
match self.variable_map.find(&node_id) { match self.variable_map.find(&node_id) {
Some(&var) => var, Some(&var) => var,
None => { None => {
self.tcx.sess.span_bug( self.tcx
span, format!("no variable registered for id {}", node_id)); .sess
.span_bug(span, format!("no variable registered for id {}",
node_id));
} }
} }
} }
fn variable_name(&self, var: Variable) -> ~str { fn variable_name(&self, var: Variable) -> StrBuf {
match self.var_kinds.get(var.get()) { match self.var_kinds.get(var.get()) {
&Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => { &Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
token::get_ident(nm).get().to_str() token::get_ident(nm).get().to_str().to_strbuf()
}, },
&ImplicitRet => "<implicit-ret>".to_owned() &ImplicitRet => "<implicit-ret>".to_strbuf()
} }
} }
@ -741,7 +749,7 @@ impl<'a> Liveness<'a> {
} }
#[allow(unused_must_use)] #[allow(unused_must_use)]
fn ln_str(&self, ln: LiveNode) -> ~str { fn ln_str(&self, ln: LiveNode) -> StrBuf {
let mut wr = io::MemWriter::new(); let mut wr = io::MemWriter::new();
{ {
let wr = &mut wr as &mut io::Writer; let wr = &mut wr as &mut io::Writer;
@ -751,7 +759,7 @@ impl<'a> Liveness<'a> {
self.write_vars(wr, ln, |idx| self.users.get(idx).writer); self.write_vars(wr, ln, |idx| self.users.get(idx).writer);
write!(wr, " precedes {}]", self.successors.get(ln.get()).to_str()); write!(wr, " precedes {}]", self.successors.get(ln.get()).to_str());
} }
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_owned() str::from_utf8(wr.unwrap().as_slice()).unwrap().to_strbuf()
} }
fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) { fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
@ -1532,9 +1540,13 @@ impl<'a> Liveness<'a> {
} }
} }
fn should_warn(&self, var: Variable) -> Option<~str> { fn should_warn(&self, var: Variable) -> Option<StrBuf> {
let name = self.ir.variable_name(var); let name = self.ir.variable_name(var);
if name.len() == 0 || name[0] == ('_' as u8) { None } else { Some(name) } if name.len() == 0 || name.as_slice()[0] == ('_' as u8) {
None
} else {
Some(name)
}
} }
fn warn_about_unused_args(&self, decl: &FnDecl, entry_ln: LiveNode) { fn warn_about_unused_args(&self, decl: &FnDecl, entry_ln: LiveNode) {
@ -1581,11 +1593,12 @@ impl<'a> Liveness<'a> {
if is_assigned { if is_assigned {
self.ir.tcx.sess.add_lint(UnusedVariable, id, sp, self.ir.tcx.sess.add_lint(UnusedVariable, id, sp,
format!("variable `{}` is assigned to, \ format_strbuf!("variable `{}` is assigned to, \
but never used", *name)); but never used",
*name));
} else { } else {
self.ir.tcx.sess.add_lint(UnusedVariable, id, sp, self.ir.tcx.sess.add_lint(UnusedVariable, id, sp,
format!("unused variable: `{}`", *name)); format_strbuf!("unused variable: `{}`", *name));
} }
} }
true true
@ -1603,7 +1616,8 @@ impl<'a> Liveness<'a> {
let r = self.should_warn(var); let r = self.should_warn(var);
for name in r.iter() { for name in r.iter() {
self.ir.tcx.sess.add_lint(DeadAssignment, id, sp, self.ir.tcx.sess.add_lint(DeadAssignment, id, sp,
format!("value assigned to `{}` is never read", *name)); format_strbuf!("value assigned to `{}` is never read",
*name));
} }
} }
} }

View file

@ -1093,50 +1093,51 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
Ok(()) Ok(())
} }
pub fn cmt_to_str(&self, cmt: &cmt_) -> ~str { pub fn cmt_to_str(&self, cmt: &cmt_) -> StrBuf {
match cmt.cat { match cmt.cat {
cat_static_item => { cat_static_item => {
"static item".to_owned() "static item".to_strbuf()
} }
cat_copied_upvar(_) => { cat_copied_upvar(_) => {
"captured outer variable in a proc".to_owned() "captured outer variable in a proc".to_strbuf()
} }
cat_rvalue(..) => { cat_rvalue(..) => {
"non-lvalue".to_owned() "non-lvalue".to_strbuf()
} }
cat_local(_) => { cat_local(_) => {
"local variable".to_owned() "local variable".to_strbuf()
} }
cat_arg(..) => { cat_arg(..) => {
"argument".to_owned() "argument".to_strbuf()
} }
cat_deref(ref base, _, pk) => { cat_deref(ref base, _, pk) => {
match base.cat { match base.cat {
cat_upvar(..) => { cat_upvar(..) => {
format!("captured outer variable") "captured outer variable".to_strbuf()
} }
_ => { _ => {
format!("dereference of `{}`-pointer", ptr_sigil(pk)) format_strbuf!("dereference of `{}`-pointer",
ptr_sigil(pk))
} }
} }
} }
cat_interior(_, InteriorField(NamedField(_))) => { cat_interior(_, InteriorField(NamedField(_))) => {
"field".to_owned() "field".to_strbuf()
} }
cat_interior(_, InteriorField(PositionalField(_))) => { cat_interior(_, InteriorField(PositionalField(_))) => {
"anonymous field".to_owned() "anonymous field".to_strbuf()
} }
cat_interior(_, InteriorElement(VecElement)) => { cat_interior(_, InteriorElement(VecElement)) => {
"vec content".to_owned() "vec content".to_strbuf()
} }
cat_interior(_, InteriorElement(StrElement)) => { cat_interior(_, InteriorElement(StrElement)) => {
"str content".to_owned() "str content".to_strbuf()
} }
cat_interior(_, InteriorElement(OtherElement)) => { cat_interior(_, InteriorElement(OtherElement)) => {
"indexed content".to_owned() "indexed content".to_strbuf()
} }
cat_upvar(..) => { cat_upvar(..) => {
"captured outer variable".to_owned() "captured outer variable".to_strbuf()
} }
cat_discr(ref cmt, _) => { cat_discr(ref cmt, _) => {
self.cmt_to_str(&**cmt) self.cmt_to_str(&**cmt)
@ -1248,8 +1249,8 @@ impl cmt_ {
} }
impl Repr for cmt_ { impl Repr for cmt_ {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("\\{{} id:{} m:{:?} ty:{}\\}", format_strbuf!("\\{{} id:{} m:{:?} ty:{}\\}",
self.cat.repr(tcx), self.cat.repr(tcx),
self.id, self.id,
self.mutbl, self.mutbl,
@ -1258,7 +1259,7 @@ impl Repr for cmt_ {
} }
impl Repr for categorization { impl Repr for categorization {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
cat_static_item | cat_static_item |
cat_rvalue(..) | cat_rvalue(..) |
@ -1266,21 +1267,19 @@ impl Repr for categorization {
cat_local(..) | cat_local(..) |
cat_upvar(..) | cat_upvar(..) |
cat_arg(..) => { cat_arg(..) => {
format!("{:?}", *self) format_strbuf!("{:?}", *self)
} }
cat_deref(ref cmt, derefs, ptr) => { cat_deref(ref cmt, derefs, ptr) => {
format!("{}-{}{}->", format_strbuf!("{}-{}{}->",
cmt.cat.repr(tcx), cmt.cat.repr(tcx),
ptr_sigil(ptr), ptr_sigil(ptr),
derefs) derefs)
} }
cat_interior(ref cmt, interior) => { cat_interior(ref cmt, interior) => {
format!("{}.{}", format_strbuf!("{}.{}", cmt.cat.repr(tcx), interior.repr(tcx))
cmt.cat.repr(tcx),
interior.repr(tcx))
} }
cat_downcast(ref cmt) => { cat_downcast(ref cmt) => {
format!("{}->(enum)", cmt.cat.repr(tcx)) format_strbuf!("{}->(enum)", cmt.cat.repr(tcx))
} }
cat_discr(ref cmt, _) => { cat_discr(ref cmt, _) => {
cmt.cat.repr(tcx) cmt.cat.repr(tcx)
@ -1301,13 +1300,13 @@ pub fn ptr_sigil(ptr: PointerKind) -> &'static str {
} }
impl Repr for InteriorKind { impl Repr for InteriorKind {
fn repr(&self, _tcx: &ty::ctxt) -> ~str { fn repr(&self, _tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
InteriorField(NamedField(fld)) => { InteriorField(NamedField(fld)) => {
token::get_name(fld).get().to_str() token::get_name(fld).get().to_str().to_strbuf()
} }
InteriorField(PositionalField(i)) => format!("\\#{:?}", i), InteriorField(PositionalField(i)) => format_strbuf!("\\#{:?}", i),
InteriorElement(_) => "[]".to_owned(), InteriorElement(_) => "[]".to_strbuf(),
} }
} }
} }

View file

@ -45,7 +45,7 @@ pub type PublicItems = NodeSet;
/// Result of a checking operation - None => no errors were found. Some => an /// Result of a checking operation - None => no errors were found. Some => an
/// error and contains the span and message for reporting that error and /// error and contains the span and message for reporting that error and
/// optionally the same for a note about the error. /// optionally the same for a note about the error.
type CheckResult = Option<(Span, ~str, Option<(Span, ~str)>)>; type CheckResult = Option<(Span, StrBuf, Option<(Span, StrBuf)>)>;
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// The parent visitor, used to determine what's the parent of what (node-wise) /// The parent visitor, used to determine what's the parent of what (node-wise)
@ -356,8 +356,8 @@ enum FieldName {
impl<'a> PrivacyVisitor<'a> { impl<'a> PrivacyVisitor<'a> {
// used when debugging // used when debugging
fn nodestr(&self, id: ast::NodeId) -> ~str { fn nodestr(&self, id: ast::NodeId) -> StrBuf {
self.tcx.map.node_to_str(id).to_owned() self.tcx.map.node_to_str(id).to_strbuf()
} }
// Determines whether the given definition is public from the point of view // Determines whether the given definition is public from the point of view
@ -511,9 +511,11 @@ impl<'a> PrivacyVisitor<'a> {
match result { match result {
None => true, None => true,
Some((span, msg, note)) => { Some((span, msg, note)) => {
self.tcx.sess.span_err(span, msg); self.tcx.sess.span_err(span, msg.as_slice());
match note { match note {
Some((span, msg)) => self.tcx.sess.span_note(span, msg), Some((span, msg)) => {
self.tcx.sess.span_note(span, msg.as_slice())
}
None => {}, None => {},
} }
false false
@ -528,7 +530,9 @@ impl<'a> PrivacyVisitor<'a> {
source_did: Option<ast::DefId>, msg: &str) -> CheckResult { source_did: Option<ast::DefId>, msg: &str) -> CheckResult {
let id = match self.def_privacy(to_check) { let id = match self.def_privacy(to_check) {
ExternallyDenied => { ExternallyDenied => {
return Some((span, format!("{} is private", msg), None)) return Some((span,
format_strbuf!("{} is private", msg),
None))
} }
Allowable => return None, Allowable => return None,
DisallowedBy(id) => id, DisallowedBy(id) => id,
@ -539,9 +543,11 @@ impl<'a> PrivacyVisitor<'a> {
// because the item itself is private or because its parent is private // because the item itself is private or because its parent is private
// and its parent isn't in our ancestry. // and its parent isn't in our ancestry.
let (err_span, err_msg) = if id == source_did.unwrap_or(to_check).node { let (err_span, err_msg) = if id == source_did.unwrap_or(to_check).node {
return Some((span, format!("{} is private", msg), None)); return Some((span,
format_strbuf!("{} is private", msg),
None));
} else { } else {
(span, format!("{} is inaccessible", msg)) (span, format_strbuf!("{} is inaccessible", msg))
}; };
let item = match self.tcx.map.find(id) { let item = match self.tcx.map.find(id) {
Some(ast_map::NodeItem(item)) => { Some(ast_map::NodeItem(item)) => {
@ -577,7 +583,8 @@ impl<'a> PrivacyVisitor<'a> {
ast::ItemEnum(..) => "enum", ast::ItemEnum(..) => "enum",
_ => return Some((err_span, err_msg, None)) _ => return Some((err_span, err_msg, None))
}; };
let msg = format!("{} `{}` is private", desc, let msg = format_strbuf!("{} `{}` is private",
desc,
token::get_ident(item.ident)); token::get_ident(item.ident));
Some((err_span, err_msg, Some((span, msg)))) Some((err_span, err_msg, Some((span, msg))))
} }
@ -1364,9 +1371,11 @@ impl<'a> Visitor<()> for VisiblePrivateTypesVisitor<'a> {
match t.node { match t.node {
ast::TyPath(ref p, _, path_id) => { ast::TyPath(ref p, _, path_id) => {
if self.path_is_private_type(path_id) { if self.path_is_private_type(path_id) {
self.tcx.sess.add_lint(lint::VisiblePrivateTypes, self.tcx.sess.add_lint(
lint::VisiblePrivateTypes,
path_id, p.span, path_id, p.span,
"private type in exported type signature".to_owned()); "private type in exported type \
signature".to_strbuf());
} }
} }
_ => {} _ => {}

View file

@ -57,7 +57,7 @@ pub type TraitMap = NodeMap<Vec<DefId> >;
pub type ExportMap2 = RefCell<NodeMap<Vec<Export2> >>; pub type ExportMap2 = RefCell<NodeMap<Vec<Export2> >>;
pub struct Export2 { pub struct Export2 {
pub name: ~str, // The name of the target. pub name: StrBuf, // The name of the target.
pub def_id: DefId, // The definition of the target. pub def_id: DefId, // The definition of the target.
} }
@ -2046,7 +2046,7 @@ impl<'a> Resolver<'a> {
} }
} }
fn idents_to_str(&mut self, idents: &[Ident]) -> ~str { fn idents_to_str(&mut self, idents: &[Ident]) -> StrBuf {
let mut first = true; let mut first = true;
let mut result = StrBuf::new(); let mut result = StrBuf::new();
for ident in idents.iter() { for ident in idents.iter() {
@ -2057,10 +2057,10 @@ impl<'a> Resolver<'a> {
} }
result.push_str(token::get_ident(*ident).get()); result.push_str(token::get_ident(*ident).get());
}; };
result.into_owned() result
} }
fn path_idents_to_str(&mut self, path: &Path) -> ~str { fn path_idents_to_str(&mut self, path: &Path) -> StrBuf {
let identifiers: Vec<ast::Ident> = path.segments let identifiers: Vec<ast::Ident> = path.segments
.iter() .iter()
.map(|seg| seg.identifier) .map(|seg| seg.identifier)
@ -2070,25 +2070,26 @@ impl<'a> Resolver<'a> {
fn import_directive_subclass_to_str(&mut self, fn import_directive_subclass_to_str(&mut self,
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> ~str { -> StrBuf {
match subclass { match subclass {
SingleImport(_, source) => { SingleImport(_, source) => {
token::get_ident(source).get().to_str() token::get_ident(source).get().to_strbuf()
} }
GlobImport => "*".to_owned() GlobImport => "*".to_strbuf()
} }
} }
fn import_path_to_str(&mut self, fn import_path_to_str(&mut self,
idents: &[Ident], idents: &[Ident],
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> ~str { -> StrBuf {
if idents.is_empty() { if idents.is_empty() {
self.import_directive_subclass_to_str(subclass) self.import_directive_subclass_to_str(subclass)
} else { } else {
(format!("{}::{}", (format!("{}::{}",
self.idents_to_str(idents), self.idents_to_str(idents),
self.import_directive_subclass_to_str(subclass))) self.import_directive_subclass_to_str(
subclass))).to_strbuf()
} }
} }
@ -2219,8 +2220,11 @@ impl<'a> Resolver<'a> {
let lp = match lp { let lp = match lp {
LastMod(lp) => lp, LastMod(lp) => lp,
LastImport{..} => self.session.span_bug(directive.span, LastImport {..} => {
"Not expecting Import here, must be LastMod"), self.session
.span_bug(directive.span,
"not expecting Import here, must be LastMod")
}
}; };
// We need to resolve both namespaces for this to succeed. // We need to resolve both namespaces for this to succeed.
@ -2614,7 +2618,7 @@ impl<'a> Resolver<'a> {
Failed => { Failed => {
let segment_name = token::get_ident(name); let segment_name = token::get_ident(name);
let module_name = self.module_to_str(&*search_module); let module_name = self.module_to_str(&*search_module);
if "???" == module_name { if "???" == module_name.as_slice() {
let span = Span { let span = Span {
lo: span.lo, lo: span.lo,
hi: span.lo + Pos::from_uint(segment_name.get().len()), hi: span.lo + Pos::from_uint(segment_name.get().len()),
@ -2732,14 +2736,18 @@ impl<'a> Resolver<'a> {
match module_prefix_result { match module_prefix_result {
Failed => { Failed => {
let mpath = self.idents_to_str(module_path); let mpath = self.idents_to_str(module_path);
match mpath.rfind(':') { match mpath.as_slice().rfind(':') {
Some(idx) => { Some(idx) => {
self.resolve_error(span, format!("unresolved import: could not find `{}` \ self.resolve_error(span,
in `{}`", format!("unresolved import: could \
// idx +- 1 to account for the colons not find `{}` in `{}`",
// on either side // idx +- 1 to account for
mpath.slice_from(idx + 1), // the colons on either
mpath.slice_to(idx - 1))); // side
mpath.as_slice()
.slice_from(idx + 1),
mpath.as_slice()
.slice_to(idx - 1)));
}, },
None => (), None => (),
}; };
@ -3283,7 +3291,7 @@ impl<'a> Resolver<'a> {
debug!("(computing exports) YES: export '{}' => {:?}", debug!("(computing exports) YES: export '{}' => {:?}",
name, def_id_of_def(d)); name, def_id_of_def(d));
exports2.push(Export2 { exports2.push(Export2 {
name: name.get().to_str(), name: name.get().to_strbuf(),
def_id: def_id_of_def(d) def_id: def_id_of_def(d)
}); });
} }
@ -4557,10 +4565,11 @@ impl<'a> Resolver<'a> {
let def = self.resolve_module_relative_path(path, namespace); let def = self.resolve_module_relative_path(path, namespace);
match (def, unqualified_def) { match (def, unqualified_def) {
(Some((d, _)), Some((ud, _))) if d == ud => { (Some((d, _)), Some((ud, _))) if d == ud => {
self.session.add_lint(UnnecessaryQualification, self.session
.add_lint(UnnecessaryQualification,
id, id,
path.span, path.span,
"unnecessary qualification".to_owned()); "unnecessary qualification".to_strbuf());
} }
_ => () _ => ()
} }
@ -4875,7 +4884,7 @@ impl<'a> Resolver<'a> {
} }
fn find_best_match_for_name(&mut self, name: &str, max_distance: uint) fn find_best_match_for_name(&mut self, name: &str, max_distance: uint)
-> Option<~str> { -> Option<StrBuf> {
let this = &mut *self; let this = &mut *self;
let mut maybes: Vec<token::InternedString> = Vec::new(); let mut maybes: Vec<token::InternedString> = Vec::new();
@ -4907,7 +4916,7 @@ impl<'a> Resolver<'a> {
*values.get(smallest) <= max_distance && *values.get(smallest) <= max_distance &&
name != maybes.get(smallest).get() { name != maybes.get(smallest).get() {
Some(maybes.get(smallest).get().to_str()) Some(maybes.get(smallest).get().to_strbuf())
} else { } else {
None None
@ -4977,17 +4986,20 @@ impl<'a> Resolver<'a> {
_ => _ =>
// limit search to 5 to reduce the number // limit search to 5 to reduce the number
// of stupid suggestions // of stupid suggestions
match self.find_best_match_for_name(wrong_name, 5) { match self.find_best_match_for_name(
wrong_name.as_slice(),
5) {
Some(m) => { Some(m) => {
self.resolve_error(expr.span, self.resolve_error(expr.span,
format!("unresolved name `{}`. \ format!("unresolved name `{}`. \
Did you mean `{}`?", Did you mean `{}`?",
wrong_name, m)); wrong_name,
m));
} }
None => { None => {
self.resolve_error(expr.span, self.resolve_error(expr.span,
format!("unresolved name `{}`.", format!("unresolved name `{}`.",
wrong_name)); wrong_name.as_slice()));
} }
} }
} }
@ -5240,8 +5252,11 @@ impl<'a> Resolver<'a> {
ViewPathGlob(_, id) => { ViewPathGlob(_, id) => {
if !self.used_imports.contains(&(id, TypeNS)) && if !self.used_imports.contains(&(id, TypeNS)) &&
!self.used_imports.contains(&(id, ValueNS)) { !self.used_imports.contains(&(id, ValueNS)) {
self.session.add_lint(UnusedImports, id, p.span, self.session
"unused import".to_owned()); .add_lint(UnusedImports,
id,
p.span,
"unused import".to_strbuf());
} }
}, },
} }
@ -5257,19 +5272,27 @@ impl<'a> Resolver<'a> {
// public or private item, we will check the correct thing, dependent on how the import // public or private item, we will check the correct thing, dependent on how the import
// is used. // is used.
fn finalize_import(&mut self, id: NodeId, span: Span) { fn finalize_import(&mut self, id: NodeId, span: Span) {
debug!("finalizing import uses for {}", self.session.codemap().span_to_snippet(span)); debug!("finalizing import uses for {}",
self.session.codemap().span_to_snippet(span));
if !self.used_imports.contains(&(id, TypeNS)) && if !self.used_imports.contains(&(id, TypeNS)) &&
!self.used_imports.contains(&(id, ValueNS)) { !self.used_imports.contains(&(id, ValueNS)) {
self.session.add_lint(UnusedImports, id, span, "unused import".to_owned()); self.session.add_lint(UnusedImports,
id,
span,
"unused import".to_strbuf());
} }
let (v_priv, t_priv) = match self.last_private.find(&id) { let (v_priv, t_priv) = match self.last_private.find(&id) {
Some(&LastImport{value_priv: v, Some(&LastImport {
value_priv: v,
value_used: _, value_used: _,
type_priv: t, type_priv: t,
type_used: _}) => (v, t), type_used: _
Some(_) => fail!("We should only have LastImport for `use` directives"), }) => (v, t),
Some(_) => {
fail!("we should only have LastImport for `use` directives")
}
_ => return, _ => return,
}; };
@ -5306,7 +5329,7 @@ impl<'a> Resolver<'a> {
// //
/// A somewhat inefficient routine to obtain the name of a module. /// A somewhat inefficient routine to obtain the name of a module.
fn module_to_str(&mut self, module: &Module) -> ~str { fn module_to_str(&mut self, module: &Module) -> StrBuf {
let mut idents = Vec::new(); let mut idents = Vec::new();
fn collect_mod(idents: &mut Vec<ast::Ident>, module: &Module) { fn collect_mod(idents: &mut Vec<ast::Ident>, module: &Module) {
@ -5325,7 +5348,7 @@ impl<'a> Resolver<'a> {
collect_mod(&mut idents, module); collect_mod(&mut idents, module);
if idents.len() == 0 { if idents.len() == 0 {
return "???".to_owned(); return "???".to_strbuf();
} }
self.idents_to_str(idents.move_iter().rev() self.idents_to_str(idents.move_iter().rev()
.collect::<Vec<ast::Ident>>() .collect::<Vec<ast::Ident>>()

View file

@ -400,12 +400,12 @@ struct Match<'a, 'b> {
} }
impl<'a, 'b> Repr for Match<'a, 'b> { impl<'a, 'b> Repr for Match<'a, 'b> {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
if tcx.sess.verbose() { if tcx.sess.verbose() {
// for many programs, this just take too long to serialize // for many programs, this just take too long to serialize
self.pats.repr(tcx) self.pats.repr(tcx)
} else { } else {
format!("{} pats", self.pats.len()) format_strbuf!("{} pats", self.pats.len())
} }
} }
} }
@ -1851,11 +1851,14 @@ fn create_bindings_map(bcx: &Block, pat: @ast::Pat) -> BindingsMap {
// but during matching we need to store a *T as explained // but during matching we need to store a *T as explained
// above // above
llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch"); llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
trmode = TrByValue(alloca(bcx, llvariable_ty, trmode = TrByValue(alloca(bcx,
bcx.ident(ident))); llvariable_ty,
bcx.ident(ident).as_slice()));
} }
ast::BindByRef(_) => { ast::BindByRef(_) => {
llmatch = alloca(bcx, llvariable_ty, bcx.ident(ident)); llmatch = alloca(bcx,
llvariable_ty,
bcx.ident(ident).as_slice());
trmode = TrByRef; trmode = TrByRef;
} }
}; };
@ -2103,7 +2106,7 @@ fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>,
let ident = ast_util::path_to_ident(path); let ident = ast_util::path_to_ident(path);
// Allocate memory on stack for the binding. // Allocate memory on stack for the binding.
let llval = alloc_ty(bcx, var_ty, bcx.ident(ident)); let llval = alloc_ty(bcx, var_ty, bcx.ident(ident).as_slice());
// Subtle: be sure that we *populate* the memory *before* // Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup. // we schedule the cleanup.

View file

@ -65,15 +65,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
let mut constraints = let mut constraints =
StrBuf::from_str(constraints.iter() StrBuf::from_str(constraints.iter()
.map(|s| s.get().to_str()) .map(|s| s.get().to_strbuf())
.collect::<Vec<~str>>() .collect::<Vec<StrBuf>>()
.connect(",")); .connect(","));
let mut clobbers = StrBuf::from_str(getClobbers()); let mut clobbers = getClobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() { if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = StrBuf::from_owned_str(format!("{},{}", clobbers = format_strbuf!("{},{}", ia.clobbers.get(), clobbers);
ia.clobbers.get(),
clobbers));
} else { } else {
clobbers.push_str(ia.clobbers.get()); clobbers.push_str(ia.clobbers.get());
} }
@ -136,12 +134,12 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
#[cfg(target_arch = "arm")] #[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")] #[cfg(target_arch = "mips")]
fn getClobbers() -> ~str { fn getClobbers() -> StrBuf {
"".to_owned() "".to_strbuf()
} }
#[cfg(target_arch = "x86")] #[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
fn getClobbers() -> ~str { fn getClobbers() -> StrBuf {
"~{dirflag},~{fpsr},~{flags}".to_owned() "~{dirflag},~{fpsr},~{flags}".to_strbuf()
} }

View file

@ -124,13 +124,13 @@ pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
pub struct StatRecorder<'a> { pub struct StatRecorder<'a> {
ccx: &'a CrateContext, ccx: &'a CrateContext,
name: Option<~str>, name: Option<StrBuf>,
start: u64, start: u64,
istart: uint, istart: uint,
} }
impl<'a> StatRecorder<'a> { impl<'a> StatRecorder<'a> {
pub fn new(ccx: &'a CrateContext, name: ~str) -> StatRecorder<'a> { pub fn new(ccx: &'a CrateContext, name: StrBuf) -> StatRecorder<'a> {
let start = if ccx.sess().trans_stats() { let start = if ccx.sess().trans_stats() {
time::precise_time_ns() time::precise_time_ns()
} else { } else {
@ -206,15 +206,19 @@ pub fn decl_cdecl_fn(llmod: ModuleRef,
} }
// only use this for foreign function ABIs and glue, use `get_extern_rust_fn` for Rust functions // only use this for foreign function ABIs and glue, use `get_extern_rust_fn` for Rust functions
pub fn get_extern_fn(externs: &mut ExternMap, llmod: ModuleRef, pub fn get_extern_fn(externs: &mut ExternMap,
name: &str, cc: lib::llvm::CallConv, llmod: ModuleRef,
ty: Type, output: ty::t) -> ValueRef { name: &str,
cc: lib::llvm::CallConv,
ty: Type,
output: ty::t)
-> ValueRef {
match externs.find_equiv(&name) { match externs.find_equiv(&name) {
Some(n) => return *n, Some(n) => return *n,
None => {} None => {}
} }
let f = decl_fn(llmod, name, cc, ty, output); let f = decl_fn(llmod, name, cc, ty, output);
externs.insert(name.to_owned(), f); externs.insert(name.to_strbuf(), f);
f f
} }
@ -231,7 +235,7 @@ fn get_extern_rust_fn(ccx: &CrateContext, inputs: &[ty::t], output: ty::t,
.collect::<Vec<_>>().as_slice(), f) .collect::<Vec<_>>().as_slice(), f)
}); });
ccx.externs.borrow_mut().insert(name.to_owned(), f); ccx.externs.borrow_mut().insert(name.to_strbuf(), f);
f f
} }
@ -315,7 +319,7 @@ pub fn get_extern_const(externs: &mut ExternMap, llmod: ModuleRef,
let c = name.with_c_str(|buf| { let c = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf) llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf)
}); });
externs.insert(name.to_owned(), c); externs.insert(name.to_strbuf(), c);
return c; return c;
} }
} }
@ -469,9 +473,9 @@ pub fn unset_split_stack(f: ValueRef) {
// Double-check that we never ask LLVM to declare the same symbol twice. It // Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model. // silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: ~str) { pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: StrBuf) {
if ccx.all_llvm_symbols.borrow().contains(&sym) { if ccx.all_llvm_symbols.borrow().contains(&sym) {
ccx.sess().bug("duplicate LLVM symbol: ".to_owned() + sym); ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym));
} }
ccx.all_llvm_symbols.borrow_mut().insert(sym); ccx.all_llvm_symbols.borrow_mut().insert(sym);
} }
@ -505,8 +509,12 @@ pub fn get_res_dtor(ccx: &CrateContext,
ty::lookup_item_type(tcx, parent_id).ty); ty::lookup_item_type(tcx, parent_id).ty);
let llty = type_of_dtor(ccx, class_ty); let llty = type_of_dtor(ccx, class_ty);
get_extern_fn(&mut *ccx.externs.borrow_mut(), ccx.llmod, name, get_extern_fn(&mut *ccx.externs.borrow_mut(),
lib::llvm::CCallConv, llty, ty::mk_nil()) ccx.llmod,
name.as_slice(),
lib::llvm::CCallConv,
llty,
ty::mk_nil())
} }
} }
@ -829,8 +837,8 @@ pub fn fail_if_zero<'a>(
ICmp(cx, lib::llvm::IntEQ, rhs, zero) ICmp(cx, lib::llvm::IntEQ, rhs, zero)
} }
_ => { _ => {
cx.sess().bug("fail-if-zero on unexpected type: ".to_owned() + cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
ty_to_str(cx.tcx(), rhs_t)); ty_to_str(cx.tcx(), rhs_t)));
} }
}; };
with_cond(cx, is_zero, |bcx| { with_cond(cx, is_zero, |bcx| {
@ -848,15 +856,19 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
get_extern_rust_fn(ccx, get_extern_rust_fn(ccx,
fn_ty.sig.inputs.as_slice(), fn_ty.sig.inputs.as_slice(),
fn_ty.sig.output, fn_ty.sig.output,
name, name.as_slice(),
did) did)
} }
Some(..) | None => { Some(..) | None => {
let c = foreign::llvm_calling_convention(ccx, fn_ty.abi); let c = foreign::llvm_calling_convention(ccx, fn_ty.abi);
let cconv = c.unwrap_or(lib::llvm::CCallConv); let cconv = c.unwrap_or(lib::llvm::CCallConv);
let llty = type_of_fn_from_ty(ccx, t); let llty = type_of_fn_from_ty(ccx, t);
get_extern_fn(&mut *ccx.externs.borrow_mut(), ccx.llmod, get_extern_fn(&mut *ccx.externs.borrow_mut(),
name, cconv, llty, fn_ty.sig.output) ccx.llmod,
name.as_slice(),
cconv,
llty,
fn_ty.sig.output)
} }
} }
} }
@ -864,12 +876,14 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
get_extern_rust_fn(ccx, get_extern_rust_fn(ccx,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, f.sig.output,
name, name.as_slice(),
did) did)
} }
_ => { _ => {
let llty = type_of(ccx, t); let llty = type_of(ccx, t);
get_extern_const(&mut *ccx.externs.borrow_mut(), ccx.llmod, name, get_extern_const(&mut *ccx.externs.borrow_mut(),
ccx.llmod,
name.as_slice(),
llty) llty)
} }
} }
@ -1443,7 +1457,7 @@ pub fn trans_fn(ccx: &CrateContext,
param_substs: Option<&param_substs>, param_substs: Option<&param_substs>,
id: ast::NodeId, id: ast::NodeId,
attrs: &[ast::Attribute]) { attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_owned()); let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_strbuf());
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx()))); debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
let _icx = push_ctxt("trans_fn"); let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id)); let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
@ -1661,7 +1675,7 @@ pub fn trans_mod(ccx: &CrateContext, m: &ast::Mod) {
} }
} }
fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: ~str, node_id: ast::NodeId, fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: StrBuf, node_id: ast::NodeId,
llfn: ValueRef) { llfn: ValueRef) {
ccx.item_symbols.borrow_mut().insert(node_id, sym); ccx.item_symbols.borrow_mut().insert(node_id, sym);
@ -1676,7 +1690,7 @@ fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: ~str, node_id: ast::Nod
fn register_fn(ccx: &CrateContext, fn register_fn(ccx: &CrateContext,
sp: Span, sp: Span,
sym: ~str, sym: StrBuf,
node_id: ast::NodeId, node_id: ast::NodeId,
node_type: ty::t) node_type: ty::t)
-> ValueRef { -> ValueRef {
@ -1692,7 +1706,7 @@ fn register_fn(ccx: &CrateContext,
false, false,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, f.sig.output,
sym); sym.as_slice());
finish_register_fn(ccx, sp, sym, node_id, llfn); finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn llfn
} }
@ -1700,14 +1714,14 @@ fn register_fn(ccx: &CrateContext,
// only use this for foreign function ABIs and glue, use `register_fn` for Rust functions // only use this for foreign function ABIs and glue, use `register_fn` for Rust functions
pub fn register_fn_llvmty(ccx: &CrateContext, pub fn register_fn_llvmty(ccx: &CrateContext,
sp: Span, sp: Span,
sym: ~str, sym: StrBuf,
node_id: ast::NodeId, node_id: ast::NodeId,
cc: lib::llvm::CallConv, cc: lib::llvm::CallConv,
fn_ty: Type, fn_ty: Type,
output: ty::t) -> ValueRef { output: ty::t) -> ValueRef {
debug!("register_fn_llvmty id={} sym={}", node_id, sym); debug!("register_fn_llvmty id={} sym={}", node_id, sym);
let llfn = decl_fn(ccx.llmod, sym, cc, fn_ty, output); let llfn = decl_fn(ccx.llmod, sym.as_slice(), cc, fn_ty, output);
finish_register_fn(ccx, sp, sym, node_id, llfn); finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn llfn
} }
@ -1752,7 +1766,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
let (start_fn, args) = if use_start_lang_item { let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx.lang_items.require(StartFnLangItem) { let start_def_id = match ccx.tcx.lang_items.require(StartFnLangItem) {
Ok(id) => id, Ok(id) => id,
Err(s) => { ccx.sess().fatal(s); } Err(s) => { ccx.sess().fatal(s.as_slice()); }
}; };
let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
get_item_val(ccx, start_def_id.node) get_item_val(ccx, start_def_id.node)
@ -1796,15 +1810,15 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
} }
fn exported_name(ccx: &CrateContext, id: ast::NodeId, fn exported_name(ccx: &CrateContext, id: ast::NodeId,
ty: ty::t, attrs: &[ast::Attribute]) -> ~str { ty: ty::t, attrs: &[ast::Attribute]) -> StrBuf {
match attr::first_attr_value_str_by_name(attrs, "export_name") { match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name // Use provided name
Some(name) => name.get().to_owned(), Some(name) => name.get().to_strbuf(),
_ => ccx.tcx.map.with_path(id, |mut path| { _ => ccx.tcx.map.with_path(id, |mut path| {
if attr::contains_name(attrs, "no_mangle") { if attr::contains_name(attrs, "no_mangle") {
// Don't mangle // Don't mangle
path.last().unwrap().to_str() path.last().unwrap().to_str().to_strbuf()
} else { } else {
// Usual name mangling // Usual name mangling
mangle_exported_name(ccx, path, ty, id) mangle_exported_name(ccx, path, ty, id)
@ -1854,7 +1868,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
unsafe { unsafe {
let llty = llvm::LLVMTypeOf(v); let llty = llvm::LLVMTypeOf(v);
let g = sym.with_c_str(|buf| { let g = sym.as_slice().with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, llty, buf) llvm::LLVMAddGlobal(ccx.llmod, llty, buf)
}); });
@ -2096,7 +2110,12 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
}); });
unsafe { unsafe {
llvm::LLVMSetInitializer(llglobal, llconst); llvm::LLVMSetInitializer(llglobal, llconst);
cx.sess().targ_cfg.target_strs.meta_sect_name.with_c_str(|buf| { cx.sess()
.targ_cfg
.target_strs
.meta_sect_name
.as_slice()
.with_c_str(|buf| {
llvm::LLVMSetSection(llglobal, buf) llvm::LLVMSetSection(llglobal, buf)
}); });
} }
@ -2126,7 +2145,8 @@ pub fn trans_crate(krate: ast::Crate,
} }
} }
let link_meta = link::build_link_meta(&krate, output.out_filestem); let link_meta = link::build_link_meta(&krate,
output.out_filestem.as_slice());
// Append ".rs" to crate name as LLVM module identifier. // Append ".rs" to crate name as LLVM module identifier.
// //
@ -2186,8 +2206,8 @@ pub fn trans_crate(krate: ast::Crate,
let link_meta = ccx.link_meta.clone(); let link_meta = ccx.link_meta.clone();
let llmod = ccx.llmod; let llmod = ccx.llmod;
let mut reachable: Vec<~str> = ccx.reachable.iter().filter_map(|id| { let mut reachable: Vec<StrBuf> = ccx.reachable.iter().filter_map(|id| {
ccx.item_symbols.borrow().find(id).map(|s| s.to_owned()) ccx.item_symbols.borrow().find(id).map(|s| s.to_strbuf())
}).collect(); }).collect();
// Make sure that some other crucial symbols are not eliminated from the // Make sure that some other crucial symbols are not eliminated from the
@ -2196,12 +2216,13 @@ pub fn trans_crate(krate: ast::Crate,
// symbol. This symbol is required for use by the libmorestack library that // symbol. This symbol is required for use by the libmorestack library that
// we link in, so we must ensure that this symbol is not internalized (if // we link in, so we must ensure that this symbol is not internalized (if
// defined in the crate). // defined in the crate).
reachable.push("main".to_owned()); reachable.push("main".to_strbuf());
reachable.push("rust_stack_exhausted".to_owned()); reachable.push("rust_stack_exhausted".to_strbuf());
// referenced from .eh_frame section on some platforms // referenced from .eh_frame section on some platforms
reachable.push("rust_eh_personality".to_owned()); reachable.push("rust_eh_personality".to_strbuf());
reachable.push("rust_eh_personality_catch".to_owned()); // referenced from rt/rust_try.ll // referenced from rt/rust_try.ll
reachable.push("rust_eh_personality_catch".to_strbuf());
let metadata_module = ccx.metadata_llmod; let metadata_module = ccx.metadata_llmod;
let formats = ccx.tcx.dependency_formats.borrow().clone(); let formats = ccx.tcx.dependency_formats.borrow().clone();

View file

@ -122,7 +122,7 @@ pub fn Invoke(cx: &Block,
terminate(cx, "Invoke"); terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))", debug!("Invoke({} with arguments ({}))",
cx.val_to_str(fn_), cx.val_to_str(fn_),
args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", ")); args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<StrBuf>>().connect(", "));
B(cx).invoke(fn_, args, then, catch, attributes) B(cx).invoke(fn_, args, then, catch, attributes)
} }

View file

@ -81,8 +81,7 @@ impl<'a> Builder<'a> {
s.push_char('/'); s.push_char('/');
s.push_str(category); s.push_str(category);
let s = s.into_owned(); let n = match h.find(&s) {
let n = match h.find_equiv(&s) {
Some(&n) => n, Some(&n) => n,
_ => 0u _ => 0u
}; };
@ -805,7 +804,7 @@ impl<'a> Builder<'a> {
self.ccx.tn.val_to_str(llfn), self.ccx.tn.val_to_str(llfn),
args.iter() args.iter()
.map(|&v| self.ccx.tn.val_to_str(v)) .map(|&v| self.ccx.tn.val_to_str(v))
.collect::<Vec<~str>>() .collect::<Vec<StrBuf>>()
.connect(", ")); .connect(", "));
unsafe { unsafe {

View file

@ -599,7 +599,9 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
{ {
let name = scope.block_name("clean"); let name = scope.block_name("clean");
debug!("generating cleanups for {}", name); debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(), name, None); let bcx_in = self.new_block(label.is_unwind(),
name.as_slice(),
None);
let mut bcx_out = bcx_in; let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().rev() { for cleanup in scope.cleanups.iter().rev() {
if cleanup_is_suitable_for(*cleanup, label) { if cleanup_is_suitable_for(*cleanup, label) {
@ -649,7 +651,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
Some(llbb) => { return llbb; } Some(llbb) => { return llbb; }
None => { None => {
let name = last_scope.block_name("unwind"); let name = last_scope.block_name("unwind");
pad_bcx = self.new_block(true, name, None); pad_bcx = self.new_block(true, name.as_slice(), None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb); last_scope.cached_landing_pad = Some(pad_bcx.llbb);
} }
} }
@ -731,16 +733,16 @@ impl<'a> CleanupScope<'a> {
self.cleanups.iter().any(|c| c.clean_on_unwind()) self.cleanups.iter().any(|c| c.clean_on_unwind())
} }
fn block_name(&self, prefix: &str) -> ~str { fn block_name(&self, prefix: &str) -> StrBuf {
/*! /*!
* Returns a suitable name to use for the basic block that * Returns a suitable name to use for the basic block that
* handles this cleanup scope * handles this cleanup scope
*/ */
match self.kind { match self.kind {
CustomScopeKind => format!("{}_custom_", prefix), CustomScopeKind => format_strbuf!("{}_custom_", prefix),
AstScopeKind(id) => format!("{}_ast_{}_", prefix, id), AstScopeKind(id) => format_strbuf!("{}_ast_{}_", prefix, id),
LoopScopeKind(id, _) => format!("{}_loop_{}_", prefix, id), LoopScopeKind(id, _) => format_strbuf!("{}_loop_{}_", prefix, id),
} }
} }
} }

View file

@ -104,8 +104,8 @@ pub struct EnvValue {
} }
impl EnvValue { impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> ~str { pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format!("{}({})", self.action, self.datum.to_str(ccx)) format_strbuf!("{}({})", self.action, self.datum.to_str(ccx))
} }
} }
@ -354,7 +354,7 @@ pub fn trans_expr_fn<'a>(
true, true,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, f.sig.output,
s); s.as_slice());
// set an inline hint for all closures // set an inline hint for all closures
set_inline_hint(llfn); set_inline_hint(llfn);
@ -417,9 +417,13 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
true, true,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, f.sig.output,
name) name.as_slice())
} else { } else {
decl_rust_fn(ccx, true, f.sig.inputs.as_slice(), f.sig.output, name) decl_rust_fn(ccx,
true,
f.sig.inputs.as_slice(),
f.sig.output,
name.as_slice())
}; };
ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn); ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);

View file

@ -173,7 +173,7 @@ pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
} }
} }
pub type ExternMap = HashMap<~str, ValueRef>; pub type ExternMap = HashMap<StrBuf, ValueRef>;
// Here `self_ty` is the real type of the self parameter to this method. It // Here `self_ty` is the real type of the self parameter to this method. It
// will only be set in the case of default methods. // will only be set in the case of default methods.
@ -194,12 +194,12 @@ impl param_substs {
} }
} }
fn param_substs_to_str(this: &param_substs, tcx: &ty::ctxt) -> ~str { fn param_substs_to_str(this: &param_substs, tcx: &ty::ctxt) -> StrBuf {
format!("param_substs({})", this.substs.repr(tcx)) format_strbuf!("param_substs({})", this.substs.repr(tcx))
} }
impl Repr for param_substs { impl Repr for param_substs {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
param_substs_to_str(self, tcx) param_substs_to_str(self, tcx)
} }
} }
@ -442,15 +442,15 @@ impl<'a> Block<'a> {
} }
pub fn sess(&self) -> &'a Session { self.fcx.ccx.sess() } pub fn sess(&self) -> &'a Session { self.fcx.ccx.sess() }
pub fn ident(&self, ident: Ident) -> ~str { pub fn ident(&self, ident: Ident) -> StrBuf {
token::get_ident(ident).get().to_str() token::get_ident(ident).get().to_strbuf()
} }
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str { pub fn node_id_to_str(&self, id: ast::NodeId) -> StrBuf {
self.tcx().map.node_to_str(id).to_owned() self.tcx().map.node_to_str(id).to_strbuf()
} }
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str { pub fn expr_to_str(&self, e: &ast::Expr) -> StrBuf {
e.repr(self.tcx()) e.repr(self.tcx())
} }
@ -464,21 +464,21 @@ impl<'a> Block<'a> {
} }
} }
pub fn val_to_str(&self, val: ValueRef) -> ~str { pub fn val_to_str(&self, val: ValueRef) -> StrBuf {
self.ccx().tn.val_to_str(val) self.ccx().tn.val_to_str(val)
} }
pub fn llty_str(&self, ty: Type) -> ~str { pub fn llty_str(&self, ty: Type) -> StrBuf {
self.ccx().tn.type_to_str(ty) self.ccx().tn.type_to_str(ty)
} }
pub fn ty_to_str(&self, t: ty::t) -> ~str { pub fn ty_to_str(&self, t: ty::t) -> StrBuf {
t.repr(self.tcx()) t.repr(self.tcx())
} }
pub fn to_str(&self) -> ~str { pub fn to_str(&self) -> StrBuf {
let blk: *Block = self; let blk: *Block = self;
format!("[block {}]", blk) format_strbuf!("[block {}]", blk)
} }
} }

View file

@ -45,9 +45,9 @@ pub struct Stats {
pub n_inlines: Cell<uint>, pub n_inlines: Cell<uint>,
pub n_closures: Cell<uint>, pub n_closures: Cell<uint>,
pub n_llvm_insns: Cell<uint>, pub n_llvm_insns: Cell<uint>,
pub llvm_insns: RefCell<HashMap<~str, uint>>, pub llvm_insns: RefCell<HashMap<StrBuf, uint>>,
// (ident, time-in-ms, llvm-instructions) // (ident, time-in-ms, llvm-instructions)
pub fn_stats: RefCell<Vec<(~str, uint, uint)> >, pub fn_stats: RefCell<Vec<(StrBuf, uint, uint)> >,
} }
pub struct CrateContext { pub struct CrateContext {
@ -60,7 +60,7 @@ pub struct CrateContext {
pub item_vals: RefCell<NodeMap<ValueRef>>, pub item_vals: RefCell<NodeMap<ValueRef>>,
pub exp_map2: resolve::ExportMap2, pub exp_map2: resolve::ExportMap2,
pub reachable: NodeSet, pub reachable: NodeSet,
pub item_symbols: RefCell<NodeMap<~str>>, pub item_symbols: RefCell<NodeMap<StrBuf>>,
pub link_meta: LinkMeta, pub link_meta: LinkMeta,
pub drop_glues: RefCell<HashMap<ty::t, ValueRef>>, pub drop_glues: RefCell<HashMap<ty::t, ValueRef>>,
pub tydescs: RefCell<HashMap<ty::t, Rc<tydesc_info>>>, pub tydescs: RefCell<HashMap<ty::t, Rc<tydesc_info>>>,
@ -109,8 +109,8 @@ pub struct CrateContext {
pub llsizingtypes: RefCell<HashMap<ty::t, Type>>, pub llsizingtypes: RefCell<HashMap<ty::t, Type>>,
pub adt_reprs: RefCell<HashMap<ty::t, Rc<adt::Repr>>>, pub adt_reprs: RefCell<HashMap<ty::t, Rc<adt::Repr>>>,
pub symbol_hasher: RefCell<Sha256>, pub symbol_hasher: RefCell<Sha256>,
pub type_hashcodes: RefCell<HashMap<ty::t, ~str>>, pub type_hashcodes: RefCell<HashMap<ty::t, StrBuf>>,
pub all_llvm_symbols: RefCell<HashSet<~str>>, pub all_llvm_symbols: RefCell<HashSet<StrBuf>>,
pub tcx: ty::ctxt, pub tcx: ty::ctxt,
pub stats: Stats, pub stats: Stats,
pub int_type: Type, pub int_type: Type,
@ -141,16 +141,30 @@ impl CrateContext {
let metadata_llmod = format!("{}_metadata", name).with_c_str(|buf| { let metadata_llmod = format!("{}_metadata", name).with_c_str(|buf| {
llvm::LLVMModuleCreateWithNameInContext(buf, llcx) llvm::LLVMModuleCreateWithNameInContext(buf, llcx)
}); });
tcx.sess.targ_cfg.target_strs.data_layout.with_c_str(|buf| { tcx.sess
.targ_cfg
.target_strs
.data_layout
.as_slice()
.with_c_str(|buf| {
llvm::LLVMSetDataLayout(llmod, buf); llvm::LLVMSetDataLayout(llmod, buf);
llvm::LLVMSetDataLayout(metadata_llmod, buf); llvm::LLVMSetDataLayout(metadata_llmod, buf);
}); });
tcx.sess.targ_cfg.target_strs.target_triple.with_c_str(|buf| { tcx.sess
.targ_cfg
.target_strs
.target_triple
.as_slice()
.with_c_str(|buf| {
llvm::LLVMRustSetNormalizedTarget(llmod, buf); llvm::LLVMRustSetNormalizedTarget(llmod, buf);
llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf); llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf);
}); });
let td = mk_target_data(tcx.sess.targ_cfg.target_strs.data_layout); let td = mk_target_data(tcx.sess
.targ_cfg
.target_strs
.data_layout
.as_slice());
let dbg_cx = if tcx.sess.opts.debuginfo != NoDebugInfo { let dbg_cx = if tcx.sess.opts.debuginfo != NoDebugInfo {
Some(debuginfo::CrateDebugContext::new(llmod)) Some(debuginfo::CrateDebugContext::new(llmod))

View file

@ -40,7 +40,7 @@ pub fn trans_stmt<'a>(cx: &'a Block<'a>,
debug!("trans_stmt({})", s.repr(cx.tcx())); debug!("trans_stmt({})", s.repr(cx.tcx()));
if cx.sess().asm_comments() { if cx.sess().asm_comments() {
add_span_comment(cx, s.span, s.repr(cx.tcx())); add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice());
} }
let mut bcx = cx; let mut bcx = cx;

View file

@ -624,8 +624,8 @@ impl<K:KindOps> Datum<K> {
} }
#[allow(dead_code)] // useful for debugging #[allow(dead_code)] // useful for debugging
pub fn to_str(&self, ccx: &CrateContext) -> ~str { pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format!("Datum({}, {}, {:?})", format_strbuf!("Datum({}, {}, {:?})",
ccx.tn.val_to_str(self.val), ccx.tn.val_to_str(self.val),
ty_to_str(ccx.tcx(), self.ty), ty_to_str(ccx.tcx(), self.ty),
self.kind) self.kind)

View file

@ -178,7 +178,7 @@ pub struct CrateDebugContext {
llcontext: ContextRef, llcontext: ContextRef,
builder: DIBuilderRef, builder: DIBuilderRef,
current_debug_location: Cell<DebugLocation>, current_debug_location: Cell<DebugLocation>,
created_files: RefCell<HashMap<~str, DIFile>>, created_files: RefCell<HashMap<StrBuf, DIFile>>,
created_types: RefCell<HashMap<uint, DIType>>, created_types: RefCell<HashMap<uint, DIType>>,
created_enum_disr_types: RefCell<HashMap<ast::DefId, DIType>>, created_enum_disr_types: RefCell<HashMap<ast::DefId, DIType>>,
namespace_map: RefCell<HashMap<Vec<ast::Name>, Rc<NamespaceTreeNode>>>, namespace_map: RefCell<HashMap<Vec<ast::Name>, Rc<NamespaceTreeNode>>>,
@ -343,8 +343,8 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let linkage_name = namespace_node.mangled_name_of_contained_item(var_name); let linkage_name = namespace_node.mangled_name_of_contained_item(var_name);
let var_scope = namespace_node.scope; let var_scope = namespace_node.scope;
var_name.with_c_str(|var_name| { var_name.as_slice().with_c_str(|var_name| {
linkage_name.with_c_str(|linkage_name| { linkage_name.as_slice().with_c_str(|linkage_name| {
unsafe { unsafe {
llvm::LLVMDIBuilderCreateStaticVariable(DIB(cx), llvm::LLVMDIBuilderCreateStaticVariable(DIB(cx),
var_scope, var_scope,
@ -725,7 +725,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let containing_scope = namespace_node.scope; let containing_scope = namespace_node.scope;
(linkage_name, containing_scope) (linkage_name, containing_scope)
} else { } else {
(function_name.as_slice().to_owned(), file_metadata) (function_name.as_slice().to_strbuf(), file_metadata)
}; };
// Clang sets this parameter to the opening brace of the function's block, so let's do this too. // Clang sets this parameter to the opening brace of the function's block, so let's do this too.
@ -734,7 +734,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let is_local_to_unit = is_node_local_to_unit(cx, fn_ast_id); let is_local_to_unit = is_node_local_to_unit(cx, fn_ast_id);
let fn_metadata = function_name.as_slice().with_c_str(|function_name| { let fn_metadata = function_name.as_slice().with_c_str(|function_name| {
linkage_name.with_c_str(|linkage_name| { linkage_name.as_slice().with_c_str(|linkage_name| {
unsafe { unsafe {
llvm::LLVMDIBuilderCreateFunction( llvm::LLVMDIBuilderCreateFunction(
DIB(cx), DIB(cx),
@ -838,7 +838,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
let actual_self_type = self_type.unwrap(); let actual_self_type = self_type.unwrap();
// Add self type name to <...> clause of function name // Add self type name to <...> clause of function name
let actual_self_type_name = ppaux::ty_to_str(cx.tcx(), actual_self_type); let actual_self_type_name = ppaux::ty_to_str(cx.tcx(), actual_self_type);
name_to_append_suffix_to.push_str(actual_self_type_name); name_to_append_suffix_to.push_str(
actual_self_type_name.as_slice());
if generics.is_type_parameterized() { if generics.is_type_parameterized() {
name_to_append_suffix_to.push_str(","); name_to_append_suffix_to.push_str(",");
@ -882,7 +883,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let actual_type = *actual_types.get(index); let actual_type = *actual_types.get(index);
// Add actual type name to <...> clause of function name // Add actual type name to <...> clause of function name
let actual_type_name = ppaux::ty_to_str(cx.tcx(), actual_type); let actual_type_name = ppaux::ty_to_str(cx.tcx(), actual_type);
name_to_append_suffix_to.push_str(actual_type_name); name_to_append_suffix_to.push_str(actual_type_name.as_slice());
if index != generics.ty_params.len() - 1 { if index != generics.ty_params.len() - 1 {
name_to_append_suffix_to.push_str(","); name_to_append_suffix_to.push_str(",");
@ -1107,7 +1108,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
}); });
let mut created_files = debug_context(cx).created_files.borrow_mut(); let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(full_path.to_owned(), file_metadata); created_files.insert(full_path.to_strbuf(), file_metadata);
return file_metadata; return file_metadata;
} }
@ -1182,7 +1183,7 @@ fn pointer_type_metadata(cx: &CrateContext,
let pointer_llvm_type = type_of::type_of(cx, pointer_type); let pointer_llvm_type = type_of::type_of(cx, pointer_type);
let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type); let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type);
let name = ppaux::ty_to_str(cx.tcx(), pointer_type); let name = ppaux::ty_to_str(cx.tcx(), pointer_type);
let ptr_metadata = name.with_c_str(|name| { let ptr_metadata = name.as_slice().with_c_str(|name| {
unsafe { unsafe {
llvm::LLVMDIBuilderCreatePointerType( llvm::LLVMDIBuilderCreatePointerType(
DIB(cx), DIB(cx),
@ -1232,9 +1233,9 @@ impl StructMemberDescriptionFactory {
-> Vec<MemberDescription> { -> Vec<MemberDescription> {
self.fields.iter().map(|field| { self.fields.iter().map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name { let name = if field.ident.name == special_idents::unnamed_field.name {
"".to_owned() "".to_strbuf()
} else { } else {
token::get_ident(field.ident).get().to_str() token::get_ident(field.ident).get().to_strbuf()
}; };
MemberDescription { MemberDescription {
@ -1263,7 +1264,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
let struct_metadata_stub = create_struct_stub(cx, let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type, struct_llvm_type,
struct_name, struct_name.as_slice(),
containing_scope, containing_scope,
file_metadata, file_metadata,
definition_span); definition_span);
@ -1335,7 +1336,7 @@ impl TupleMemberDescriptionFactory {
-> Vec<MemberDescription> { -> Vec<MemberDescription> {
self.component_types.iter().map(|&component_type| { self.component_types.iter().map(|&component_type| {
MemberDescription { MemberDescription {
name: "".to_owned(), name: "".to_strbuf(),
llvm_type: type_of::type_of(cx, component_type), llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span), type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
@ -1359,7 +1360,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
cache_id: cache_id_for_type(tuple_type), cache_id: cache_id_for_type(tuple_type),
metadata_stub: create_struct_stub(cx, metadata_stub: create_struct_stub(cx,
tuple_llvm_type, tuple_llvm_type,
tuple_name, tuple_name.as_slice(),
file_metadata, file_metadata,
file_metadata, file_metadata,
span), span),
@ -1413,7 +1414,7 @@ impl GeneralMemberDescriptionFactory {
self.file_metadata, self.file_metadata,
codemap::DUMMY_SP); codemap::DUMMY_SP);
MemberDescription { MemberDescription {
name: "".to_owned(), name: "".to_strbuf(),
llvm_type: variant_llvm_type, llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata, type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 }, offset: FixedMemberOffset { bytes: 0 },
@ -1423,7 +1424,7 @@ impl GeneralMemberDescriptionFactory {
} }
struct EnumVariantMemberDescriptionFactory { struct EnumVariantMemberDescriptionFactory {
args: Vec<(~str, ty::t)> , args: Vec<(StrBuf, ty::t)> ,
discriminant_type_metadata: Option<DIType>, discriminant_type_metadata: Option<DIType>,
span: Span, span: Span,
} }
@ -1433,7 +1434,7 @@ impl EnumVariantMemberDescriptionFactory {
-> Vec<MemberDescription> { -> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| { self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription { MemberDescription {
name: name.to_str(), name: name.to_strbuf(),
llvm_type: type_of::type_of(cx, ty), llvm_type: type_of::type_of(cx, ty),
type_metadata: match self.discriminant_type_metadata { type_metadata: match self.discriminant_type_metadata {
Some(metadata) if i == 0 => metadata, Some(metadata) if i == 0 => metadata,
@ -1491,9 +1492,9 @@ fn describe_enum_variant(cx: &CrateContext,
} }
// Build an array of (field name, field type) pairs to be captured in the factory closure. // Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(~str, ty::t)> = arg_names.iter() let args: Vec<(StrBuf, ty::t)> = arg_names.iter()
.zip(struct_def.fields.iter()) .zip(struct_def.fields.iter())
.map(|(s, &t)| (s.to_str(), t)) .map(|(s, &t)| (s.to_strbuf(), t))
.collect(); .collect();
let member_description_factory = let member_description_factory =
@ -1520,9 +1521,10 @@ fn prepare_enum_metadata(cx: &CrateContext,
// For empty enums there is an early exit. Just describe it as an empty struct with the // For empty enums there is an early exit. Just describe it as an empty struct with the
// appropriate type name // appropriate type name
if ty::type_is_empty(cx.tcx(), enum_type) { if ty::type_is_empty(cx.tcx(), enum_type) {
let empty_type_metadata = composite_type_metadata(cx, let empty_type_metadata = composite_type_metadata(
cx,
Type::nil(cx), Type::nil(cx),
enum_name, enum_name.as_slice(),
[], [],
containing_scope, containing_scope,
file_metadata, file_metadata,
@ -1621,8 +1623,8 @@ fn prepare_enum_metadata(cx: &CrateContext,
let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type); let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type);
let unique_id = generate_unique_type_id("DI_ENUM_"); let unique_id = generate_unique_type_id("DI_ENUM_");
let enum_metadata = enum_name.with_c_str(|enum_name| { let enum_metadata = enum_name.as_slice().with_c_str(|enum_name| {
unique_id.with_c_str(|unique_id| { unique_id.as_slice().with_c_str(|unique_id| {
unsafe { unsafe {
llvm::LLVMDIBuilderCreateUnionType( llvm::LLVMDIBuilderCreateUnionType(
DIB(cx), DIB(cx),
@ -1694,7 +1696,7 @@ enum MemberOffset {
} }
struct MemberDescription { struct MemberDescription {
name: ~str, name: StrBuf,
llvm_type: Type, llvm_type: Type,
type_metadata: DIType, type_metadata: DIType,
offset: MemberOffset, offset: MemberOffset,
@ -1764,7 +1766,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i) ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i)
}; };
member_description.name.with_c_str(|member_name| { member_description.name.as_slice().with_c_str(|member_name| {
unsafe { unsafe {
llvm::LLVMDIBuilderCreateMemberType( llvm::LLVMDIBuilderCreateMemberType(
DIB(cx), DIB(cx),
@ -1806,7 +1808,7 @@ fn create_struct_stub(cx: &CrateContext,
return unsafe { return unsafe {
struct_type_name.with_c_str(|name| { struct_type_name.with_c_str(|name| {
unique_id.with_c_str(|unique_id| { unique_id.as_slice().with_c_str(|unique_id| {
// LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to // LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to
// hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp // hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp
let empty_array = create_DIArray(DIB(cx), []); let empty_array = create_DIArray(DIB(cx), []);
@ -1853,31 +1855,31 @@ fn boxed_type_metadata(cx: &CrateContext,
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: "refcnt".to_owned(), name: "refcnt".to_strbuf(),
llvm_type: *member_llvm_types.get(0), llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP), type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "drop_glue".to_owned(), name: "drop_glue".to_strbuf(),
llvm_type: *member_llvm_types.get(1), llvm_type: *member_llvm_types.get(1),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "prev".to_owned(), name: "prev".to_strbuf(),
llvm_type: *member_llvm_types.get(2), llvm_type: *member_llvm_types.get(2),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "next".to_owned(), name: "next".to_strbuf(),
llvm_type: *member_llvm_types.get(3), llvm_type: *member_llvm_types.get(3),
type_metadata: nil_pointer_type_metadata, type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "val".to_owned(), name: "val".to_strbuf(),
llvm_type: *member_llvm_types.get(4), llvm_type: *member_llvm_types.get(4),
type_metadata: content_type_metadata, type_metadata: content_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
@ -1964,19 +1966,19 @@ fn vec_metadata(cx: &CrateContext,
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: "fill".to_owned(), name: "fill".to_strbuf(),
llvm_type: *member_llvm_types.get(0), llvm_type: *member_llvm_types.get(0),
type_metadata: int_type_metadata, type_metadata: int_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "alloc".to_owned(), name: "alloc".to_strbuf(),
llvm_type: *member_llvm_types.get(1), llvm_type: *member_llvm_types.get(1),
type_metadata: int_type_metadata, type_metadata: int_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "elements".to_owned(), name: "elements".to_strbuf(),
llvm_type: *member_llvm_types.get(2), llvm_type: *member_llvm_types.get(2),
type_metadata: array_type_metadata, type_metadata: array_type_metadata,
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
@ -2021,13 +2023,13 @@ fn vec_slice_metadata(cx: &CrateContext,
let member_descriptions = [ let member_descriptions = [
MemberDescription { MemberDescription {
name: "data_ptr".to_owned(), name: "data_ptr".to_strbuf(),
llvm_type: *member_llvm_types.get(0), llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, data_ptr_type, span), type_metadata: type_metadata(cx, data_ptr_type, span),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
}, },
MemberDescription { MemberDescription {
name: "length".to_owned(), name: "length".to_strbuf(),
llvm_type: *member_llvm_types.get(1), llvm_type: *member_llvm_types.get(1),
type_metadata: type_metadata(cx, ty::mk_uint(), span), type_metadata: type_metadata(cx, ty::mk_uint(), span),
offset: ComputedMemberOffset, offset: ComputedMemberOffset,
@ -2042,7 +2044,7 @@ fn vec_slice_metadata(cx: &CrateContext,
return composite_type_metadata( return composite_type_metadata(
cx, cx,
slice_llvm_type, slice_llvm_type,
slice_type_name, slice_type_name.as_slice(),
member_descriptions, member_descriptions,
file_metadata, file_metadata,
file_metadata, file_metadata,
@ -2099,11 +2101,15 @@ fn trait_metadata(cx: &CrateContext,
// the trait's methods. // the trait's methods.
let last = ty::with_path(cx.tcx(), def_id, |mut path| path.last().unwrap()); let last = ty::with_path(cx.tcx(), def_id, |mut path| path.last().unwrap());
let ident_string = token::get_name(last.name()); let ident_string = token::get_name(last.name());
let name = ppaux::trait_store_to_str(cx.tcx(), trait_store) + let mut name = ppaux::trait_store_to_str(cx.tcx(), trait_store);
ident_string.get(); name.push_str(ident_string.get());
// Add type and region parameters // Add type and region parameters
let name = ppaux::parameterized(cx.tcx(), name, &substs.regions, let name = ppaux::parameterized(cx.tcx(),
substs.tps.as_slice(), def_id, true); name.as_slice(),
&substs.regions,
substs.tps.as_slice(),
def_id,
true);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
@ -2114,7 +2120,7 @@ fn trait_metadata(cx: &CrateContext,
composite_type_metadata(cx, composite_type_metadata(cx,
trait_llvm_type, trait_llvm_type,
name, name.as_slice(),
[], [],
containing_scope, containing_scope,
file_metadata, file_metadata,
@ -2136,7 +2142,8 @@ fn type_metadata(cx: &CrateContext,
pointer_type: ty::t, pointer_type: ty::t,
type_in_box: ty::t) type_in_box: ty::t)
-> DIType { -> DIType {
let content_type_name: &str = ppaux::ty_to_str(cx.tcx(), type_in_box); let content_type_name = ppaux::ty_to_str(cx.tcx(), type_in_box);
let content_type_name = content_type_name.as_slice();
let content_llvm_type = type_of::type_of(cx, type_in_box); let content_llvm_type = type_of::type_of(cx, type_in_box);
let content_type_metadata = type_metadata( let content_type_metadata = type_metadata(
cx, cx,
@ -2296,10 +2303,12 @@ fn cache_id_for_type(t: ty::t) -> uint {
// Used to avoid LLVM metadata uniquing problems. See `create_struct_stub()` and // Used to avoid LLVM metadata uniquing problems. See `create_struct_stub()` and
// `prepare_enum_metadata()`. // `prepare_enum_metadata()`.
fn generate_unique_type_id(prefix: &'static str) -> ~str { fn generate_unique_type_id(prefix: &'static str) -> StrBuf {
unsafe { unsafe {
static mut unique_id_counter: atomics::AtomicUint = atomics::INIT_ATOMIC_UINT; static mut unique_id_counter: atomics::AtomicUint = atomics::INIT_ATOMIC_UINT;
format!("{}{}", prefix, unique_id_counter.fetch_add(1, atomics::SeqCst)) format_strbuf!("{}{}",
prefix,
unique_id_counter.fetch_add(1, atomics::SeqCst))
} }
} }
@ -2796,7 +2805,7 @@ fn populate_scope_map(cx: &CrateContext,
ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs, ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs,
outputs: ref outputs, outputs: ref outputs,
.. }) => { .. }) => {
// inputs, outputs: ~[(~str, @expr)] // inputs, outputs: ~[(StrBuf, @expr)]
for &(_, exp) in inputs.iter() { for &(_, exp) in inputs.iter() {
walk_expr(cx, exp, scope_stack, scope_map); walk_expr(cx, exp, scope_stack, scope_map);
} }
@ -2821,7 +2830,7 @@ struct NamespaceTreeNode {
} }
impl NamespaceTreeNode { impl NamespaceTreeNode {
fn mangled_name_of_contained_item(&self, item_name: &str) -> ~str { fn mangled_name_of_contained_item(&self, item_name: &str) -> StrBuf {
fn fill_nested(node: &NamespaceTreeNode, output: &mut StrBuf) { fn fill_nested(node: &NamespaceTreeNode, output: &mut StrBuf) {
match node.parent { match node.parent {
Some(ref parent) => fill_nested(&*parent.upgrade().unwrap(), output), Some(ref parent) => fill_nested(&*parent.upgrade().unwrap(), output),
@ -2837,7 +2846,7 @@ impl NamespaceTreeNode {
name.push_str(format!("{}", item_name.len())); name.push_str(format!("{}", item_name.len()));
name.push_str(item_name); name.push_str(item_name);
name.push_char('E'); name.push_char('E');
name.into_owned() name
} }
} }

View file

@ -86,10 +86,10 @@ pub enum Dest {
} }
impl Dest { impl Dest {
pub fn to_str(&self, ccx: &CrateContext) -> ~str { pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
match *self { match *self {
SaveIn(v) => format!("SaveIn({})", ccx.tn.val_to_str(v)), SaveIn(v) => format_strbuf!("SaveIn({})", ccx.tn.val_to_str(v)),
Ignore => "Ignore".to_owned() Ignore => "Ignore".to_strbuf()
} }
} }
} }
@ -545,7 +545,7 @@ fn trans_def<'a>(bcx: &'a Block<'a>,
let symbol = csearch::get_symbol( let symbol = csearch::get_symbol(
&bcx.ccx().sess().cstore, &bcx.ccx().sess().cstore,
did); did);
let llval = symbol.with_c_str(|buf| { let llval = symbol.as_slice().with_c_str(|buf| {
llvm::LLVMAddGlobal(bcx.ccx().llmod, llvm::LLVMAddGlobal(bcx.ccx().llmod,
llty.to_ref(), llty.to_ref(),
buf) buf)

View file

@ -445,7 +445,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
let lname = link_name(foreign_item); let lname = link_name(foreign_item);
ccx.item_symbols.borrow_mut().insert(foreign_item.id, ccx.item_symbols.borrow_mut().insert(foreign_item.id,
lname.get().to_owned()); lname.get().to_strbuf());
} }
} }
@ -476,7 +476,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
pub fn register_rust_fn_with_foreign_abi(ccx: &CrateContext, pub fn register_rust_fn_with_foreign_abi(ccx: &CrateContext,
sp: Span, sp: Span,
sym: ~str, sym: StrBuf,
node_id: ast::NodeId) node_id: ast::NodeId)
-> ValueRef { -> ValueRef {
let _icx = push_ctxt("foreign::register_foreign_fn"); let _icx = push_ctxt("foreign::register_foreign_fn");
@ -553,7 +553,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
false, false,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, f.sig.output,
ps); ps.as_slice());
base::set_llvm_fn_attrs(attrs, llfn); base::set_llvm_fn_attrs(attrs, llfn);
base::trans_fn(ccx, decl, body, llfn, None, id, []); base::trans_fn(ccx, decl, body, llfn, None, id, []);
llfn llfn

View file

@ -203,7 +203,7 @@ fn make_visit_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
ty::ReStatic) { ty::ReStatic) {
Ok(pair) => pair, Ok(pair) => pair,
Err(s) => { Err(s) => {
bcx.tcx().sess.fatal(s); bcx.tcx().sess.fatal(s.as_slice());
} }
}; };
let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to()); let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to());
@ -413,14 +413,15 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info {
let llalign = llalign_of(ccx, llty); let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx(), t), name); debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx(), t), name);
let gvar = name.with_c_str(|buf| { let gvar = name.as_slice().with_c_str(|buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf)
} }
}); });
note_unique_llvm_symbol(ccx, name); note_unique_llvm_symbol(ccx, name);
let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx(), t)); let ty_name = token::intern_and_get_ident(
ppaux::ty_to_str(ccx.tcx(), t).as_slice());
let ty_name = C_str_slice(ccx, ty_name); let ty_name = C_str_slice(ccx, ty_name);
debug!("--- declare_tydesc {}", ppaux::ty_to_str(ccx.tcx(), t)); debug!("--- declare_tydesc {}", ppaux::ty_to_str(ccx.tcx(), t));
@ -439,7 +440,10 @@ fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type,
let _icx = push_ctxt("declare_generic_glue"); let _icx = push_ctxt("declare_generic_glue");
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "glue_".to_owned() + name); let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "glue_".to_owned() + name);
debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx(), t)); debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx(), t));
let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty, ty::mk_nil()); let llfn = decl_cdecl_fn(ccx.llmod,
fn_nm.as_slice(),
llfnty,
ty::mk_nil());
note_unique_llvm_symbol(ccx, fn_nm); note_unique_llvm_symbol(ccx, fn_nm);
return llfn; return llfn;
} }
@ -452,7 +456,9 @@ fn make_generic_glue(ccx: &CrateContext,
name: &str) name: &str)
-> ValueRef { -> ValueRef {
let _icx = push_ctxt("make_generic_glue"); let _icx = push_ctxt("make_generic_glue");
let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx(), t)); let glue_name = format_strbuf!("glue {} {}",
name,
ty_to_short_str(ccx.tcx(), t));
let _s = StatRecorder::new(ccx, glue_name); let _s = StatRecorder::new(ccx, glue_name);
let arena = TypedArena::new(); let arena = TypedArena::new();

View file

@ -13,24 +13,24 @@ use middle::trans::type_::Type;
use lib::llvm::ValueRef; use lib::llvm::ValueRef;
pub trait LlvmRepr { pub trait LlvmRepr {
fn llrepr(&self, ccx: &CrateContext) -> ~str; fn llrepr(&self, ccx: &CrateContext) -> StrBuf;
} }
impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] { impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
fn llrepr(&self, ccx: &CrateContext) -> ~str { fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
let reprs: Vec<~str> = self.iter().map(|t| t.llrepr(ccx)).collect(); let reprs: Vec<StrBuf> = self.iter().map(|t| t.llrepr(ccx)).collect();
format!("[{}]", reprs.connect(",")) format_strbuf!("[{}]", reprs.connect(","))
} }
} }
impl LlvmRepr for Type { impl LlvmRepr for Type {
fn llrepr(&self, ccx: &CrateContext) -> ~str { fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
ccx.tn.type_to_str(*self) ccx.tn.type_to_str(*self)
} }
} }
impl LlvmRepr for ValueRef { impl LlvmRepr for ValueRef {
fn llrepr(&self, ccx: &CrateContext) -> ~str { fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
ccx.tn.val_to_str(*self) ccx.tn.val_to_str(*self)
} }
} }

View file

@ -109,9 +109,10 @@ pub fn monomorphic_fn(ccx: &CrateContext,
ccx.sess(), ccx.sess(),
ccx.tcx.map.find(fn_id.node), ccx.tcx.map.find(fn_id.node),
|| { || {
(format!("while monomorphizing {:?}, couldn't find it in the \ format_strbuf!("while monomorphizing {:?}, couldn't find it in \
item map (may have attempted to monomorphize an item \ the item map (may have attempted to monomorphize \
defined in a different crate?)", fn_id)).to_strbuf() an item defined in a different crate?)",
fn_id)
}); });
match map_node { match map_node {
@ -212,9 +213,11 @@ pub fn monomorphic_fn(ccx: &CrateContext,
// This shouldn't need to option dance. // This shouldn't need to option dance.
let mut hash_id = Some(hash_id); let mut hash_id = Some(hash_id);
let mk_lldecl = || { let mk_lldecl = || {
let lldecl = decl_internal_rust_fn(ccx, false, let lldecl = decl_internal_rust_fn(ccx,
false,
f.sig.inputs.as_slice(), f.sig.inputs.as_slice(),
f.sig.output, s); f.sig.output,
s.as_slice());
ccx.monomorphized.borrow_mut().insert(hash_id.take_unwrap(), lldecl); ccx.monomorphized.borrow_mut().insert(hash_id.take_unwrap(), lldecl);
lldecl lldecl
}; };

View file

@ -254,8 +254,9 @@ impl<'a, 'b> Reflector<'a, 'b> {
} }
let extra = (vec!( let extra = (vec!(
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, self.c_slice(
t))), token::intern_and_get_ident(ty_to_str(tcx,
t).as_slice())),
self.c_bool(named_fields), self.c_bool(named_fields),
self.c_uint(fields.len()) self.c_uint(fields.len())
)).append(self.c_size_and_align(t).as_slice()); )).append(self.c_size_and_align(t).as_slice());
@ -288,7 +289,11 @@ impl<'a, 'b> Reflector<'a, 'b> {
let sym = mangle_internal_name_by_path_and_seq( let sym = mangle_internal_name_by_path_and_seq(
ast_map::Values([].iter()).chain(None), "get_disr"); ast_map::Values([].iter()).chain(None), "get_disr");
let llfdecl = decl_internal_rust_fn(ccx, false, [opaqueptrty], ty::mk_u64(), sym); let llfdecl = decl_internal_rust_fn(ccx,
false,
[opaqueptrty],
ty::mk_u64(),
sym.as_slice());
let arena = TypedArena::new(); let arena = TypedArena::new();
let fcx = new_fn_ctxt(ccx, llfdecl, -1, false, let fcx = new_fn_ctxt(ccx, llfdecl, -1, false,
ty::mk_u64(), None, None, &arena); ty::mk_u64(), None, None, &arena);
@ -344,7 +349,8 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_trait(..) => { ty::ty_trait(..) => {
let extra = [ let extra = [
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t))) self.c_slice(token::intern_and_get_ident(
ty_to_str(tcx, t).as_slice()))
]; ];
self.visit("trait", extra); self.visit("trait", extra);
} }

View file

@ -73,9 +73,9 @@ pub struct VecTypes {
} }
impl VecTypes { impl VecTypes {
pub fn to_str(&self, ccx: &CrateContext) -> ~str { pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format!("VecTypes \\{unit_ty={}, llunit_ty={}, llunit_size={}, \ format_strbuf!("VecTypes \\{unit_ty={}, llunit_ty={}, \
llunit_alloc_size={}\\}", llunit_size={}, llunit_alloc_size={}\\}",
ty_to_str(ccx.tcx(), self.unit_ty), ty_to_str(ccx.tcx(), self.unit_ty),
ccx.tn.type_to_str(self.llunit_ty), ccx.tn.type_to_str(self.llunit_ty),
ccx.tn.val_to_str(self.llunit_size), ccx.tn.val_to_str(self.llunit_size),

View file

@ -206,7 +206,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
// of the enum's variants refers to the enum itself. // of the enum's variants refers to the enum itself.
let repr = adt::represent_type(cx, t); let repr = adt::represent_type(cx, t);
let name = llvm_type_name(cx, an_enum, did, substs.tps.as_slice()); let name = llvm_type_name(cx, an_enum, did, substs.tps.as_slice());
adt::incomplete_type_of(cx, &*repr, name) adt::incomplete_type_of(cx, &*repr, name.as_slice())
} }
ty::ty_box(typ) => { ty::ty_box(typ) => {
Type::at_box(cx, type_of(cx, typ)).ptr_to() Type::at_box(cx, type_of(cx, typ)).ptr_to()
@ -264,7 +264,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
a_struct, a_struct,
did, did,
substs.tps.as_slice()); substs.tps.as_slice());
adt::incomplete_type_of(cx, &*repr, name) adt::incomplete_type_of(cx, &*repr, name.as_slice())
} }
} }
@ -301,18 +301,24 @@ pub enum named_ty { a_struct, an_enum }
pub fn llvm_type_name(cx: &CrateContext, pub fn llvm_type_name(cx: &CrateContext,
what: named_ty, what: named_ty,
did: ast::DefId, did: ast::DefId,
tps: &[ty::t]) -> ~str { tps: &[ty::t])
-> StrBuf {
let name = match what { let name = match what {
a_struct => { "struct" } a_struct => { "struct" }
an_enum => { "enum" } an_enum => { "enum" }
}; };
let tstr = ppaux::parameterized(cx.tcx(), ty::item_path_str(cx.tcx(), did), let tstr = ppaux::parameterized(cx.tcx(),
&ty::NonerasedRegions(OwnedSlice::empty()), ty::item_path_str(cx.tcx(),
tps, did, false); did).as_slice(),
&ty::NonerasedRegions(
OwnedSlice::empty()),
tps,
did,
false);
if did.krate == 0 { if did.krate == 0 {
format!("{}.{}", name, tstr) format_strbuf!("{}.{}", name, tstr)
} else { } else {
format!("{}.{}[\\#{}]", name, tstr, did.krate) format_strbuf!("{}.{}[\\#{}]", name, tstr, did.krate)
} }
} }

View file

@ -278,7 +278,7 @@ pub struct ctxt {
pub freevars: RefCell<freevars::freevar_map>, pub freevars: RefCell<freevars::freevar_map>,
pub tcache: type_cache, pub tcache: type_cache,
pub rcache: creader_cache, pub rcache: creader_cache,
pub short_names_cache: RefCell<HashMap<t, ~str>>, pub short_names_cache: RefCell<HashMap<t, StrBuf>>,
pub needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>, pub needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>,
pub tc_cache: RefCell<HashMap<uint, TypeContents>>, pub tc_cache: RefCell<HashMap<uint, TypeContents>>,
pub ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>, pub ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>,
@ -1538,7 +1538,7 @@ pub fn substs_is_noop(substs: &substs) -> bool {
substs.self_ty.is_none() substs.self_ty.is_none()
} }
pub fn substs_to_str(cx: &ctxt, substs: &substs) -> ~str { pub fn substs_to_str(cx: &ctxt, substs: &substs) -> StrBuf {
substs.repr(cx) substs.repr(cx)
} }
@ -3189,7 +3189,9 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
tcx.sess.bug(format!( tcx.sess.bug(format!(
"no field named `{}` found in the list of fields `{:?}`", "no field named `{}` found in the list of fields `{:?}`",
token::get_name(name), token::get_name(name),
fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>())); fields.iter()
.map(|f| token::get_ident(f.ident).get().to_strbuf())
.collect::<Vec<StrBuf>>()));
} }
pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> { pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> {
@ -3212,34 +3214,38 @@ pub fn param_tys_in_type(ty: t) -> Vec<param_ty> {
rslt rslt
} }
pub fn ty_sort_str(cx: &ctxt, t: t) -> ~str { pub fn ty_sort_str(cx: &ctxt, t: t) -> StrBuf {
match get(t).sty { match get(t).sty {
ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) | ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) |
ty_uint(_) | ty_float(_) | ty_str => { ty_uint(_) | ty_float(_) | ty_str => {
::util::ppaux::ty_to_str(cx, t) ::util::ppaux::ty_to_str(cx, t)
} }
ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)), ty_enum(id, _) => format_strbuf!("enum {}", item_path_str(cx, id)),
ty_box(_) => "@-ptr".to_owned(), ty_box(_) => "@-ptr".to_strbuf(),
ty_uniq(_) => "box".to_owned(), ty_uniq(_) => "box".to_strbuf(),
ty_vec(_, _) => "vector".to_owned(), ty_vec(_, _) => "vector".to_strbuf(),
ty_ptr(_) => "*-ptr".to_owned(), ty_ptr(_) => "*-ptr".to_strbuf(),
ty_rptr(_, _) => "&-ptr".to_owned(), ty_rptr(_, _) => "&-ptr".to_strbuf(),
ty_bare_fn(_) => "extern fn".to_owned(), ty_bare_fn(_) => "extern fn".to_strbuf(),
ty_closure(_) => "fn".to_owned(), ty_closure(_) => "fn".to_strbuf(),
ty_trait(ref inner) => format!("trait {}", item_path_str(cx, inner.def_id)), ty_trait(ref inner) => {
ty_struct(id, _) => format!("struct {}", item_path_str(cx, id)), format_strbuf!("trait {}", item_path_str(cx, inner.def_id))
ty_tup(_) => "tuple".to_owned(), }
ty_infer(TyVar(_)) => "inferred type".to_owned(), ty_struct(id, _) => {
ty_infer(IntVar(_)) => "integral variable".to_owned(), format_strbuf!("struct {}", item_path_str(cx, id))
ty_infer(FloatVar(_)) => "floating-point variable".to_owned(), }
ty_param(_) => "type parameter".to_owned(), ty_tup(_) => "tuple".to_strbuf(),
ty_self(_) => "self".to_owned(), ty_infer(TyVar(_)) => "inferred type".to_strbuf(),
ty_err => "type error".to_owned() ty_infer(IntVar(_)) => "integral variable".to_strbuf(),
ty_infer(FloatVar(_)) => "floating-point variable".to_strbuf(),
ty_param(_) => "type parameter".to_strbuf(),
ty_self(_) => "self".to_strbuf(),
ty_err => "type error".to_strbuf(),
} }
} }
pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> ~str { pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> StrBuf {
/*! /*!
* *
* Explains the source of a type err in a short, * Explains the source of a type err in a short,
@ -3249,126 +3255,145 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> ~str {
* to present additional details, particularly when * to present additional details, particularly when
* it comes to lifetime-related errors. */ * it comes to lifetime-related errors. */
fn tstore_to_closure(s: &TraitStore) -> ~str { fn tstore_to_closure(s: &TraitStore) -> StrBuf {
match s { match s {
&UniqTraitStore => "proc".to_owned(), &UniqTraitStore => "proc".to_strbuf(),
&RegionTraitStore(..) => "closure".to_owned() &RegionTraitStore(..) => "closure".to_strbuf()
} }
} }
match *err { match *err {
terr_mismatch => "types differ".to_owned(), terr_mismatch => "types differ".to_strbuf(),
terr_fn_style_mismatch(values) => { terr_fn_style_mismatch(values) => {
format!("expected {} fn but found {} fn", format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str()) values.expected.to_str(),
values.found.to_str())
} }
terr_abi_mismatch(values) => { terr_abi_mismatch(values) => {
format!("expected {} fn but found {} fn", format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str()) values.expected.to_str(),
values.found.to_str())
} }
terr_onceness_mismatch(values) => { terr_onceness_mismatch(values) => {
format!("expected {} fn but found {} fn", format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str()) values.expected.to_str(),
values.found.to_str())
} }
terr_sigil_mismatch(values) => { terr_sigil_mismatch(values) => {
format!("expected {}, found {}", format_strbuf!("expected {}, found {}",
tstore_to_closure(&values.expected), tstore_to_closure(&values.expected),
tstore_to_closure(&values.found)) tstore_to_closure(&values.found))
} }
terr_mutability => "values differ in mutability".to_owned(), terr_mutability => "values differ in mutability".to_strbuf(),
terr_box_mutability => "boxed values differ in mutability".to_owned(), terr_box_mutability => {
terr_vec_mutability => "vectors differ in mutability".to_owned(), "boxed values differ in mutability".to_strbuf()
terr_ptr_mutability => "pointers differ in mutability".to_owned(), }
terr_ref_mutability => "references differ in mutability".to_owned(), terr_vec_mutability => "vectors differ in mutability".to_strbuf(),
terr_ptr_mutability => "pointers differ in mutability".to_strbuf(),
terr_ref_mutability => "references differ in mutability".to_strbuf(),
terr_ty_param_size(values) => { terr_ty_param_size(values) => {
format!("expected a type with {} type params \ format_strbuf!("expected a type with {} type params \
but found one with {} type params", but found one with {} type params",
values.expected, values.found) values.expected,
values.found)
} }
terr_tuple_size(values) => { terr_tuple_size(values) => {
format!("expected a tuple with {} elements \ format_strbuf!("expected a tuple with {} elements \
but found one with {} elements", but found one with {} elements",
values.expected, values.found) values.expected,
values.found)
} }
terr_record_size(values) => { terr_record_size(values) => {
format!("expected a record with {} fields \ format_strbuf!("expected a record with {} fields \
but found one with {} fields", but found one with {} fields",
values.expected, values.found) values.expected,
values.found)
} }
terr_record_mutability => { terr_record_mutability => {
"record elements differ in mutability".to_owned() "record elements differ in mutability".to_strbuf()
} }
terr_record_fields(values) => { terr_record_fields(values) => {
format!("expected a record with field `{}` but found one with field \ format_strbuf!("expected a record with field `{}` but found one \
`{}`", with field `{}`",
token::get_ident(values.expected), token::get_ident(values.expected),
token::get_ident(values.found)) token::get_ident(values.found))
} }
terr_arg_count => "incorrect number of function parameters".to_owned(), terr_arg_count => {
"incorrect number of function parameters".to_strbuf()
}
terr_regions_does_not_outlive(..) => { terr_regions_does_not_outlive(..) => {
format!("lifetime mismatch") "lifetime mismatch".to_strbuf()
} }
terr_regions_not_same(..) => { terr_regions_not_same(..) => {
format!("lifetimes are not the same") "lifetimes are not the same".to_strbuf()
} }
terr_regions_no_overlap(..) => { terr_regions_no_overlap(..) => {
format!("lifetimes do not intersect") "lifetimes do not intersect".to_strbuf()
} }
terr_regions_insufficiently_polymorphic(br, _) => { terr_regions_insufficiently_polymorphic(br, _) => {
format!("expected bound lifetime parameter {}, \ format_strbuf!("expected bound lifetime parameter {}, \
but found concrete lifetime", but found concrete lifetime",
bound_region_ptr_to_str(cx, br)) bound_region_ptr_to_str(cx, br))
} }
terr_regions_overly_polymorphic(br, _) => { terr_regions_overly_polymorphic(br, _) => {
format!("expected concrete lifetime, \ format_strbuf!("expected concrete lifetime, \
but found bound lifetime parameter {}", but found bound lifetime parameter {}",
bound_region_ptr_to_str(cx, br)) bound_region_ptr_to_str(cx, br))
} }
terr_trait_stores_differ(_, ref values) => { terr_trait_stores_differ(_, ref values) => {
format!("trait storage differs: expected `{}` but found `{}`", format_strbuf!("trait storage differs: expected `{}` but found \
`{}`",
trait_store_to_str(cx, (*values).expected), trait_store_to_str(cx, (*values).expected),
trait_store_to_str(cx, (*values).found)) trait_store_to_str(cx, (*values).found))
} }
terr_sorts(values) => { terr_sorts(values) => {
format!("expected {} but found {}", format_strbuf!("expected {} but found {}",
ty_sort_str(cx, values.expected), ty_sort_str(cx, values.expected),
ty_sort_str(cx, values.found)) ty_sort_str(cx, values.found))
} }
terr_traits(values) => { terr_traits(values) => {
format!("expected trait `{}` but found trait `{}`", format_strbuf!("expected trait `{}` but found trait `{}`",
item_path_str(cx, values.expected), item_path_str(cx, values.expected),
item_path_str(cx, values.found)) item_path_str(cx, values.found))
} }
terr_builtin_bounds(values) => { terr_builtin_bounds(values) => {
if values.expected.is_empty() { if values.expected.is_empty() {
format!("expected no bounds but found `{}`", format_strbuf!("expected no bounds but found `{}`",
values.found.user_string(cx)) values.found.user_string(cx))
} else if values.found.is_empty() { } else if values.found.is_empty() {
format!("expected bounds `{}` but found no bounds", format_strbuf!("expected bounds `{}` but found no bounds",
values.expected.user_string(cx)) values.expected.user_string(cx))
} else { } else {
format!("expected bounds `{}` but found bounds `{}`", format_strbuf!("expected bounds `{}` but found bounds `{}`",
values.expected.user_string(cx), values.expected.user_string(cx),
values.found.user_string(cx)) values.found.user_string(cx))
} }
} }
terr_integer_as_char => { terr_integer_as_char => {
format!("expected an integral type but found `char`") "expected an integral type but found `char`".to_strbuf()
} }
terr_int_mismatch(ref values) => { terr_int_mismatch(ref values) => {
format!("expected `{}` but found `{}`", format_strbuf!("expected `{}` but found `{}`",
values.expected.to_str(), values.expected.to_str(),
values.found.to_str()) values.found.to_str())
} }
terr_float_mismatch(ref values) => { terr_float_mismatch(ref values) => {
format!("expected `{}` but found `{}`", format_strbuf!("expected `{}` but found `{}`",
values.expected.to_str(), values.expected.to_str(),
values.found.to_str()) values.found.to_str())
} }
terr_variadic_mismatch(ref values) => { terr_variadic_mismatch(ref values) => {
format!("expected {} fn but found {} function", format_strbuf!("expected {} fn but found {} function",
if values.expected { "variadic" } else { "non-variadic" }, if values.expected {
if values.found { "variadic" } else { "non-variadic" }) "variadic"
} else {
"non-variadic"
},
if values.found {
"variadic"
} else {
"non-variadic"
})
} }
} }
} }
@ -3665,8 +3690,8 @@ pub fn substd_enum_variants(cx: &ctxt,
}).collect() }).collect()
} }
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str { pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> StrBuf {
with_path(cx, id, |path| ast_map::path_to_str(path)).to_owned() with_path(cx, id, |path| ast_map::path_to_str(path)).to_strbuf()
} }
pub enum DtorKind { pub enum DtorKind {
@ -4231,14 +4256,14 @@ pub fn each_bound_trait_and_supertraits(tcx: &ctxt,
return true; return true;
} }
pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, ~str> { pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, StrBuf> {
tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| { tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| {
tcx.intrinsic_defs.borrow().find_copy(&tydesc_lang_item) tcx.intrinsic_defs.borrow().find_copy(&tydesc_lang_item)
.expect("Failed to resolve TyDesc") .expect("Failed to resolve TyDesc")
}) })
} }
pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, ~str> { pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, StrBuf> {
tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| { tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| {
tcx.intrinsic_defs.borrow().find_copy(&opaque_lang_item) tcx.intrinsic_defs.borrow().find_copy(&opaque_lang_item)
.expect("Failed to resolve Opaque") .expect("Failed to resolve Opaque")
@ -4246,7 +4271,7 @@ pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, ~str> {
} }
pub fn visitor_object_ty(tcx: &ctxt, pub fn visitor_object_ty(tcx: &ctxt,
region: ty::Region) -> Result<(Rc<TraitRef>, t), ~str> { region: ty::Region) -> Result<(Rc<TraitRef>, t), StrBuf> {
let trait_lang_item = match tcx.lang_items.require(TyVisitorTraitLangItem) { let trait_lang_item = match tcx.lang_items.require(TyVisitorTraitLangItem) {
Ok(id) => id, Ok(id) => id,
Err(s) => { return Err(s); } Err(s) => { return Err(s); }

View file

@ -166,10 +166,14 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_or("".to_owned(), |e| { expected.map_or("".to_strbuf(), |e| {
format!("mismatched types: expected `{}` but found {}", format_strbuf!("mismatched types: expected `{}` but \
e, actual)})}, found {}",
Some(expected), "a structure pattern".to_owned(), e,
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
None); None);
fcx.write_error(pat.id); fcx.write_error(pat.id);
kind_name = "[error]"; kind_name = "[error]";
@ -217,10 +221,16 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_or("".to_owned(), |e| { expected.map_or("".to_strbuf(),
format!("mismatched types: expected `{}` but found {}", |e| {
e, actual)})}, format_strbuf!("mismatched types: expected `{}` but \
Some(expected), "an enum or structure pattern".to_owned(), found {}",
e,
actual)
})
},
Some(expected),
"an enum or structure pattern".to_strbuf(),
None); None);
fcx.write_error(pat.id); fcx.write_error(pat.id);
kind_name = "[error]"; kind_name = "[error]";
@ -446,7 +456,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
debug!("pat_range ending type: {:?}", e_ty); debug!("pat_range ending type: {:?}", e_ty);
if !require_same_types( if !require_same_types(
tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty, tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty,
|| "mismatched types in range".to_owned()) || "mismatched types in range".to_strbuf())
{ {
// no-op // no-op
} else if !ty::type_is_numeric(b_ty) && !ty::type_is_char(b_ty) { } else if !ty::type_is_numeric(b_ty) && !ty::type_is_char(b_ty) {
@ -540,10 +550,15 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| { |expected, actual| {
expected.map_or("".to_owned(), |e| { expected.map_or("".to_strbuf(),
format!("mismatched types: expected `{}` but found {}", |e| {
e, actual)})}, format_strbuf!("mismatched types: expected \
Some(expected), "a structure pattern".to_owned(), `{}` but found {}",
e,
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
None); None);
match tcx.def_map.borrow().find(&pat.id) { match tcx.def_map.borrow().find(&pat.id) {
Some(&ast::DefStruct(supplied_def_id)) => { Some(&ast::DefStruct(supplied_def_id)) => {
@ -590,17 +605,28 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
} }
// use terr_tuple_size if both types are tuples // use terr_tuple_size if both types are tuples
let type_error = match *s { let type_error = match *s {
ty::ty_tup(ref ex_elts) => ty::ty_tup(ref ex_elts) => {
ty::terr_tuple_size(ty::expected_found{expected: ex_elts.len(), ty::terr_tuple_size(ty::expected_found {
found: e_count}), expected: ex_elts.len(),
found: e_count
})
}
_ => ty::terr_mismatch _ => ty::terr_mismatch
}; };
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| { fcx.infcx().type_error_message_str_with_expected(pat.span,
expected.map_or("".to_owned(), |e| { |expected,
format!("mismatched types: expected `{}` but found {}", actual| {
e, actual)})}, expected.map_or("".to_strbuf(), |e| {
Some(expected), "tuple".to_owned(), Some(&type_error)); format_strbuf!("mismatched types: expected `{}` \
but found {}",
e,
actual)
}
)},
Some(expected),
"tuple".to_strbuf(),
Some(&type_error));
fcx.write_error(pat.id); fcx.write_error(pat.id);
} }
} }
@ -630,11 +656,16 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected( fcx.infcx().type_error_message_str_with_expected(
pat.span, pat.span,
|expected, actual| { |expected, actual| {
expected.map_or("".to_owned(), |e| { expected.map_or("".to_strbuf(),
format!("mismatched types: expected `{}` but found {}", |e| {
e, actual)})}, format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})
},
Some(expected), Some(expected),
"a vector pattern".to_owned(), "a vector pattern".to_strbuf(),
None); None);
fcx.write_error(pat.id); fcx.write_error(pat.id);
}; };
@ -648,7 +679,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.type_error_message(pat.span, fcx.type_error_message(pat.span,
|_| { |_| {
"unique vector patterns are no \ "unique vector patterns are no \
longer supported".to_owned() longer supported".to_strbuf()
}, },
expected, expected,
None); None);
@ -716,13 +747,17 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
fcx.infcx().type_error_message_str_with_expected( fcx.infcx().type_error_message_str_with_expected(
span, span,
|expected, actual| { |expected, actual| {
expected.map_or("".to_owned(), |e| { expected.map_or("".to_strbuf(), |e| {
format!("mismatched types: expected `{}` but found {}", format_strbuf!("mismatched types: expected `{}` but \
e, actual)})}, found {}",
e,
actual)
})
},
Some(expected), Some(expected),
format!("{} pattern", match pointer_kind { format_strbuf!("{} pattern", match pointer_kind {
Send => "a box", Send => "a box",
Borrowed => "an `&`-pointer" Borrowed => "an `&`-pointer",
}), }),
None); None);
fcx.write_error(pat_id); fcx.write_error(pat_id);
@ -731,4 +766,8 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
} }
#[deriving(Eq)] #[deriving(Eq)]
pub enum PointerKind { Send, Borrowed } pub enum PointerKind {
Send,
Borrowed,
}

View file

@ -1498,11 +1498,11 @@ impl<'a> LookupContext<'a> {
self.fcx.tcx() self.fcx.tcx()
} }
fn ty_to_str(&self, t: ty::t) -> ~str { fn ty_to_str(&self, t: ty::t) -> StrBuf {
self.fcx.infcx().ty_to_str(t) self.fcx.infcx().ty_to_str(t)
} }
fn did_to_str(&self, did: DefId) -> ~str { fn did_to_str(&self, did: DefId) -> StrBuf {
ty::item_path_str(self.tcx(), did) ty::item_path_str(self.tcx(), did)
} }
@ -1512,8 +1512,9 @@ impl<'a> LookupContext<'a> {
} }
impl Repr for Candidate { impl Repr for Candidate {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("Candidate(rcvr_ty={}, rcvr_substs={}, method_ty={}, origin={:?})", format_strbuf!("Candidate(rcvr_ty={}, rcvr_substs={}, method_ty={}, \
origin={:?})",
self.rcvr_match_condition.repr(tcx), self.rcvr_match_condition.repr(tcx),
self.rcvr_substs.repr(tcx), self.rcvr_substs.repr(tcx),
self.method_ty.repr(tcx), self.method_ty.repr(tcx),
@ -1522,13 +1523,13 @@ impl Repr for Candidate {
} }
impl Repr for RcvrMatchCondition { impl Repr for RcvrMatchCondition {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
RcvrMatchesIfObject(d) => { RcvrMatchesIfObject(d) => {
format!("RcvrMatchesIfObject({})", d.repr(tcx)) format_strbuf!("RcvrMatchesIfObject({})", d.repr(tcx))
} }
RcvrMatchesIfSubtype(t) => { RcvrMatchesIfSubtype(t) => {
format!("RcvrMatchesIfSubtype({})", t.repr(tcx)) format_strbuf!("RcvrMatchesIfSubtype({})", t.repr(tcx))
} }
} }
} }

View file

@ -468,7 +468,7 @@ fn check_fn<'a>(ccx: &'a CrateCtxt<'a>,
let ret_ty = fn_sig.output; let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={:?}, ret_ty={:?})", debug!("check_fn(arg_tys={:?}, ret_ty={:?})",
arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<~str>>(), arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<StrBuf>>(),
ppaux::ty_to_str(tcx, ret_ty)); ppaux::ty_to_str(tcx, ret_ty));
// Create the function context. This is either derived from scratch or, // Create the function context. This is either derived from scratch or,
@ -1089,8 +1089,8 @@ impl<'a> RegionScope for infer::InferCtxt<'a> {
} }
impl<'a> FnCtxt<'a> { impl<'a> FnCtxt<'a> {
pub fn tag(&self) -> ~str { pub fn tag(&self) -> StrBuf {
format!("{}", self as *FnCtxt) format_strbuf!("{}", self as *FnCtxt)
} }
pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> ty::t { pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> ty::t {
@ -1164,7 +1164,7 @@ impl<'a> FnCtxt<'a> {
ast_ty_to_ty(self, self.infcx(), ast_t) ast_ty_to_ty(self, self.infcx(), ast_t)
} }
pub fn pat_to_str(&self, pat: &ast::Pat) -> ~str { pub fn pat_to_str(&self, pat: &ast::Pat) -> StrBuf {
pat.repr(self.tcx()) pat.repr(self.tcx())
} }
@ -1271,7 +1271,7 @@ impl<'a> FnCtxt<'a> {
pub fn type_error_message(&self, pub fn type_error_message(&self,
sp: Span, sp: Span,
mk_msg: |~str| -> ~str, mk_msg: |StrBuf| -> StrBuf,
actual_ty: ty::t, actual_ty: ty::t,
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
self.infcx().type_error_message(sp, mk_msg, actual_ty, err); self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
@ -1787,7 +1787,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
}; };
debug!("check_argument_types: formal_tys={:?}", debug!("check_argument_types: formal_tys={:?}",
formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<~str>>()); formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<StrBuf>>());
// Check the arguments. // Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments // We do this in a pretty awful way: first we typecheck any arguments
@ -1863,18 +1863,24 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
match ty::get(arg_ty).sty { match ty::get(arg_ty).sty {
ty::ty_float(ast::TyF32) => { ty::ty_float(ast::TyF32) => {
fcx.type_error_message(arg.span, fcx.type_error_message(arg.span,
|t| format!("can't pass an {} to variadic function, \ |t| {
cast to c_double", t), arg_ty, None); format_strbuf!("can't pass an {} to variadic \
function, cast to c_double", t)
}, arg_ty, None);
} }
ty::ty_int(ast::TyI8) | ty::ty_int(ast::TyI16) | ty::ty_bool => { ty::ty_int(ast::TyI8) | ty::ty_int(ast::TyI16) | ty::ty_bool => {
fcx.type_error_message(arg.span, fcx.type_error_message(arg.span, |t| {
|t| format!("can't pass {} to variadic function, cast to c_int", format_strbuf!("can't pass {} to variadic \
t), arg_ty, None); function, cast to c_int",
t)
}, arg_ty, None);
} }
ty::ty_uint(ast::TyU8) | ty::ty_uint(ast::TyU16) => { ty::ty_uint(ast::TyU8) | ty::ty_uint(ast::TyU16) => {
fcx.type_error_message(arg.span, fcx.type_error_message(arg.span, |t| {
|t| format!("can't pass {} to variadic function, cast to c_uint", format_strbuf!("can't pass {} to variadic \
t), arg_ty, None); function, cast to c_uint",
t)
}, arg_ty, None);
} }
_ => {} _ => {}
} }
@ -1920,8 +1926,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ty::ty_closure(box ty::ClosureTy {sig: ref sig, ..}) => sig, ty::ty_closure(box ty::ClosureTy {sig: ref sig, ..}) => sig,
_ => { _ => {
fcx.type_error_message(call_expr.span, |actual| { fcx.type_error_message(call_expr.span, |actual| {
format!("expected function but \ format_strbuf!("expected function but found `{}`", actual)
found `{}`", actual) }, fn_ty, None); }, fn_ty, None);
&error_fn_sig &error_fn_sig
} }
}; };
@ -1974,8 +1980,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message(method_name.span, fcx.type_error_message(method_name.span,
|actual| { |actual| {
format!("type `{}` does not implement any method in scope named `{}`", format_strbuf!("type `{}` does not implement any \
actual, token::get_ident(method_name.node)) method in scope named `{}`",
actual,
token::get_ident(method_name.node))
}, },
expr_t, expr_t,
None); None);
@ -1984,12 +1992,16 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.write_error(expr.id); fcx.write_error(expr.id);
// Check for potential static matches (missing self parameters) // Check for potential static matches (missing self parameters)
method::lookup(fcx, expr, rcvr, method::lookup(fcx,
expr,
rcvr,
method_name.node.name, method_name.node.name,
expr_t, tps.as_slice(), expr_t,
tps.as_slice(),
DontDerefArgs, DontDerefArgs,
CheckTraitsAndInherentMethods, CheckTraitsAndInherentMethods,
DontAutoderefReceiver, ReportStaticMethods); DontAutoderefReceiver,
ReportStaticMethods);
ty::mk_err() ty::mk_err()
} }
@ -2123,9 +2135,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
if ty::type_is_fp(ty::simd_type(tcx, lhs_t)) { if ty::type_is_fp(ty::simd_type(tcx, lhs_t)) {
fcx.type_error_message(expr.span, fcx.type_error_message(expr.span,
|actual| { |actual| {
format!("binary comparison operation `{}` not supported \ format_strbuf!("binary comparison \
for floating point SIMD vector `{}`", operation `{}` not \
ast_util::binop_to_str(op), actual) supported for floating \
point SIMD vector `{}`",
ast_util::binop_to_str(op),
actual)
}, },
lhs_t, lhs_t,
None None
@ -2150,12 +2165,15 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
// type // type
fcx.write_error(expr.id); fcx.write_error(expr.id);
fcx.write_error(rhs.id); fcx.write_error(rhs.id);
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span,
format!("binary operation `{}` cannot be applied \ |actual| {
format_strbuf!("binary operation `{}` cannot be applied \
to type `{}`", to type `{}`",
ast_util::binop_to_str(op), actual)}, ast_util::binop_to_str(op),
lhs_t, None) actual)
},
lhs_t,
None)
} }
// Check for overloaded operators if not an assignment. // Check for overloaded operators if not an assignment.
@ -2164,8 +2182,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
} else { } else {
fcx.type_error_message(expr.span, fcx.type_error_message(expr.span,
|actual| { |actual| {
format!("binary assignment operation \ format_strbuf!("binary assignment \
`{}=` cannot be applied to type `{}`", operation `{}=` \
cannot be applied to \
type `{}`",
ast_util::binop_to_str(op), ast_util::binop_to_str(op),
actual) actual)
}, },
@ -2214,8 +2234,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
lookup_op_method(fcx, ex, lhs_resolved_t, token::intern(name), lookup_op_method(fcx, ex, lhs_resolved_t, token::intern(name),
trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || { trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || {
fcx.type_error_message(ex.span, |actual| { fcx.type_error_message(ex.span, |actual| {
format!("binary operation `{}` cannot be applied to type `{}`", format_strbuf!("binary operation `{}` cannot be applied to \
ast_util::binop_to_str(op), actual) type `{}`",
ast_util::binop_to_str(op),
actual)
}, lhs_resolved_t, None) }, lhs_resolved_t, None)
}) })
} }
@ -2230,7 +2252,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
lookup_op_method(fcx, ex, rhs_t, token::intern(mname), lookup_op_method(fcx, ex, rhs_t, token::intern(mname),
trait_did, [rhs_expr], DontAutoderefReceiver, || { trait_did, [rhs_expr], DontAutoderefReceiver, || {
fcx.type_error_message(ex.span, |actual| { fcx.type_error_message(ex.span, |actual| {
format!("cannot apply unary operator `{}` to type `{}`", op_str, actual) format_strbuf!("cannot apply unary operator `{}` to type \
`{}`",
op_str,
actual)
}, rhs_t, None); }, rhs_t, None);
}) })
} }
@ -2389,8 +2414,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message( fcx.type_error_message(
expr.span, expr.span,
|actual| { |actual| {
format!("attempted to take value of method `{}` on type `{}`", format_strbuf!("attempted to take value of method \
token::get_name(field), actual) `{}` on type `{}`",
token::get_name(field),
actual)
}, },
expr_t, None); expr_t, None);
@ -2402,9 +2429,11 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message( fcx.type_error_message(
expr.span, expr.span,
|actual| { |actual| {
format!("attempted access of field `{}` on type `{}`, \ format_strbuf!("attempted access of field `{}` on \
but no field with that name was found", type `{}`, but no field with that \
token::get_name(field), actual) name was found",
token::get_name(field),
actual)
}, },
expr_t, None); expr_t, None);
} }
@ -2442,9 +2471,13 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message( fcx.type_error_message(
field.ident.span, field.ident.span,
|actual| { |actual| {
format!("structure `{}` has no field named `{}`", format_strbuf!("structure `{}` has no field named \
actual, token::get_ident(field.ident.node)) `{}`",
}, struct_ty, None); actual,
token::get_ident(field.ident.node))
},
struct_ty,
None);
error_happened = true; error_happened = true;
} }
Some((_, true)) => { Some((_, true)) => {
@ -2701,7 +2734,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
.require(GcLangItem) { .require(GcLangItem) {
Ok(id) => id, Ok(id) => id,
Err(msg) => { Err(msg) => {
tcx.sess.span_err(expr.span, msg); tcx.sess.span_err(expr.span,
msg.as_slice());
ast::DefId { ast::DefId {
krate: ast::CRATE_NODE_ID, krate: ast::CRATE_NODE_ID,
node: ast::DUMMY_NODE_ID, node: ast::DUMMY_NODE_ID,
@ -2825,7 +2859,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
no longer be dereferenced"); no longer be dereferenced");
} else { } else {
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span, |actual| {
format!("type `{}` cannot be dereferenced", actual) format_strbuf!("type `{}` cannot be \
dereferenced",
actual)
}, oprnd_t, None); }, oprnd_t, None);
} }
ty::mk_err() ty::mk_err()
@ -3066,12 +3102,14 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
_ => { _ => {
if ty::type_is_nil(t_e) { if ty::type_is_nil(t_e) {
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span, |actual| {
format!("cast from nil: `{}` as `{}`", actual, format_strbuf!("cast from nil: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_str(t_1))
}, t_e, None); }, t_e, None);
} else if ty::type_is_nil(t_1) { } else if ty::type_is_nil(t_1) {
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span, |actual| {
format!("cast to nil: `{}` as `{}`", actual, format_strbuf!("cast to nil: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_str(t_1))
}, t_e, None); }, t_e, None);
} }
@ -3092,12 +3130,17 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
let te = fcx.infcx().resolve_type_vars_if_possible(te); let te = fcx.infcx().resolve_type_vars_if_possible(te);
if ty::get(te).sty != ty::ty_uint(ast::TyU8) { if ty::get(te).sty != ty::ty_uint(ast::TyU8) {
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span, |actual| {
format!("only `u8` can be cast as `char`, not `{}`", actual) format_strbuf!("only `u8` can be cast as \
`char`, not `{}`",
actual)
}, t_e, None); }, t_e, None);
} }
} else if ty::get(t1).sty == ty::ty_bool { } else if ty::get(t1).sty == ty::ty_bool {
fcx.tcx().sess.span_err(expr.span, fcx.tcx()
"cannot cast as `bool`, compare with zero instead"); .sess
.span_err(expr.span,
"cannot cast as `bool`, compare with \
zero instead");
} else if type_is_region_ptr(fcx, expr.span, t_e) && } else if type_is_region_ptr(fcx, expr.span, t_e) &&
type_is_unsafe_ptr(fcx, expr.span, t_1) { type_is_unsafe_ptr(fcx, expr.span, t_1) {
@ -3151,7 +3194,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
record the issue number in this comment. record the issue number in this comment.
*/ */
fcx.type_error_message(expr.span, |actual| { fcx.type_error_message(expr.span, |actual| {
format!("non-scalar cast: `{}` as `{}`", actual, format_strbuf!("non-scalar cast: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_str(t_1))
}, t_e, None); }, t_e, None);
} }
@ -3271,8 +3315,11 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|| { || {
fcx.type_error_message(expr.span, fcx.type_error_message(expr.span,
|actual| { |actual| {
format!("cannot index a value \ format_strbuf!("cannot \
of type `{}`", index a \
value of \
type \
`{}`",
actual) actual)
}, },
base_t, base_t,
@ -3291,7 +3338,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)), ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
match expected { match expected {
Some(t) => ppaux::ty_to_str(tcx, t), Some(t) => ppaux::ty_to_str(tcx, t),
_ => "empty".to_owned() _ => "empty".to_strbuf()
}); });
unifier(); unifier();
@ -3300,7 +3347,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
pub fn require_uint(fcx: &FnCtxt, sp: Span, t: ty::t) { pub fn require_uint(fcx: &FnCtxt, sp: Span, t: ty::t) {
if !type_is_uint(fcx, sp, t) { if !type_is_uint(fcx, sp, t) {
fcx.type_error_message(sp, |actual| { fcx.type_error_message(sp, |actual| {
format!("mismatched types: expected `uint` type but found `{}`", format_strbuf!("mismatched types: expected `uint` type but found \
`{}`",
actual) actual)
}, t, None); }, t, None);
} }
@ -3309,7 +3357,8 @@ pub fn require_uint(fcx: &FnCtxt, sp: Span, t: ty::t) {
pub fn require_integral(fcx: &FnCtxt, sp: Span, t: ty::t) { pub fn require_integral(fcx: &FnCtxt, sp: Span, t: ty::t) {
if !type_is_integral(fcx, sp, t) { if !type_is_integral(fcx, sp, t) {
fcx.type_error_message(sp, |actual| { fcx.type_error_message(sp, |actual| {
format!("mismatched types: expected integral type but found `{}`", format_strbuf!("mismatched types: expected integral type but \
found `{}`",
actual) actual)
}, t, None); }, t, None);
} }
@ -3439,8 +3488,13 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true, ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
_ => false _ => false
} { } {
fcx.ccx.tcx.sess.add_lint(UnreachableCode, s_id, s.span, fcx.ccx
"unreachable statement".to_owned()); .tcx
.sess
.add_lint(UnreachableCode,
s_id,
s.span,
"unreachable statement".to_strbuf());
warned = true; warned = true;
} }
if ty::type_is_bot(s_ty) { if ty::type_is_bot(s_ty) {
@ -3461,8 +3515,13 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
}, },
Some(e) => { Some(e) => {
if any_bot && !warned { if any_bot && !warned {
fcx.ccx.tcx.sess.add_lint(UnreachableCode, e.id, e.span, fcx.ccx
"unreachable expression".to_owned()); .tcx
.sess
.add_lint(UnreachableCode,
e.id,
e.span,
"unreachable expression".to_strbuf());
} }
check_expr_with_opt_hint(fcx, e, expected); check_expr_with_opt_hint(fcx, e, expected);
let ety = fcx.expr_ty(e); let ety = fcx.expr_ty(e);
@ -3979,7 +4038,8 @@ pub fn structurally_resolved_type(fcx: &FnCtxt, sp: Span, tp: ty::t) -> ty::t {
Ok(t_s) if !ty::type_is_ty_var(t_s) => t_s, Ok(t_s) if !ty::type_is_ty_var(t_s) => t_s,
_ => { _ => {
fcx.type_error_message(sp, |_actual| { fcx.type_error_message(sp, |_actual| {
"the type of this value must be known in this context".to_owned() "the type of this value must be known in this \
context".to_strbuf()
}, tp, None); }, tp, None);
demand::suptype(fcx, sp, ty::mk_err(), tp); demand::suptype(fcx, sp, ty::mk_err(), tp);
tp tp
@ -4183,7 +4243,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
"get_tydesc" => { "get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t, Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); } Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
}; };
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty, ty: tydesc_ty,
@ -4199,18 +4259,20 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
tps: Vec::new(), tps: Vec::new(),
regions: ty::NonerasedRegions(OwnedSlice::empty()) regions: ty::NonerasedRegions(OwnedSlice::empty())
}) ), }) ),
Err(msg) => { tcx.sess.span_fatal(it.span, msg); } Err(msg) => {
tcx.sess.span_fatal(it.span, msg.as_slice());
}
} }
}, },
"visit_tydesc" => { "visit_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t, Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); } Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
}; };
let region = ty::ReLateBound(it.id, ty::BrAnon(0)); let region = ty::ReLateBound(it.id, ty::BrAnon(0));
let visitor_object_ty = match ty::visitor_object_ty(tcx, region) { let visitor_object_ty = match ty::visitor_object_ty(tcx, region) {
Ok((_, vot)) => vot, Ok((_, vot)) => vot,
Err(s) => { tcx.sess.span_fatal(it.span, s); } Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
}; };
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
@ -4387,10 +4449,12 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy { let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
fn_style: ast::UnsafeFn, fn_style: ast::UnsafeFn,
abi: abi::RustIntrinsic, abi: abi::RustIntrinsic,
sig: FnSig {binder_id: it.id, sig: FnSig {
binder_id: it.id,
inputs: inputs, inputs: inputs,
output: output, output: output,
variadic: false} variadic: false,
}
}); });
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id)); let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs().len(); let i_n_tps = i_ty.generics.type_param_defs().len();
@ -4399,10 +4463,15 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
of type parameters: found {}, \ of type parameters: found {}, \
expected {}", i_n_tps, n_tps)); expected {}", i_n_tps, n_tps));
} else { } else {
require_same_types( require_same_types(tcx,
tcx, None, false, it.span, i_ty.ty, fty, None,
|| format!("intrinsic has wrong type: \ false,
expected `{}`", it.span,
ppaux::ty_to_str(ccx.tcx, fty))); i_ty.ty,
fty,
|| {
format_strbuf!("intrinsic has wrong type: expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty))
});
} }
} }

View file

@ -421,7 +421,9 @@ impl<'a> CoherenceChecker<'a> {
let crate_store = &self.crate_context.tcx.sess.cstore; let crate_store = &self.crate_context.tcx.sess.cstore;
let cdata = crate_store.get_crate_data(impl_b.krate); let cdata = crate_store.get_crate_data(impl_b.krate);
session.note( session.note(
"conflicting implementation in crate `" + cdata.name + "`"); format!("conflicting implementation in crate \
`{}`",
cdata.name));
} }
} }
} }

View file

@ -438,8 +438,10 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
} }
if sized == ast::StaticSize { if sized == ast::StaticSize {
match tcx.lang_items.require(SizedTraitLangItem) { match tcx.lang_items.require(SizedTraitLangItem) {
Ok(def_id) => { ty::try_add_builtin_trait(tcx, def_id, &mut bounds); }, Ok(def_id) => {
Err(s) => tcx.sess.err(s), ty::try_add_builtin_trait(tcx, def_id, &mut bounds);
}
Err(s) => tcx.sess.err(s.as_slice()),
}; };
} }

View file

@ -71,7 +71,7 @@ use syntax::abi;
pub trait Combine { pub trait Combine {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a>; fn infcx<'a>(&'a self) -> &'a InferCtxt<'a>;
fn tag(&self) -> ~str; fn tag(&self) -> StrBuf;
fn a_is_expected(&self) -> bool; fn a_is_expected(&self) -> bool;
fn trace(&self) -> TypeTrace; fn trace(&self) -> TypeTrace;

View file

@ -103,12 +103,12 @@ pub trait ErrorReporting {
trace: TypeTrace, trace: TypeTrace,
terr: &ty::type_err); terr: &ty::type_err);
fn values_str(&self, values: &ValuePairs) -> Option<~str>; fn values_str(&self, values: &ValuePairs) -> Option<StrBuf>;
fn expected_found_str<T:UserString+Resolvable>( fn expected_found_str<T:UserString+Resolvable>(
&self, &self,
exp_found: &ty::expected_found<T>) exp_found: &ty::expected_found<T>)
-> Option<~str>; -> Option<StrBuf>;
fn report_concrete_failure(&self, fn report_concrete_failure(&self,
origin: SubregionOrigin, origin: SubregionOrigin,
@ -365,7 +365,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
ty::note_and_explain_type_err(self.tcx, terr); ty::note_and_explain_type_err(self.tcx, terr);
} }
fn values_str(&self, values: &ValuePairs) -> Option<~str> { fn values_str(&self, values: &ValuePairs) -> Option<StrBuf> {
/*! /*!
* Returns a string of the form "expected `{}` but found `{}`", * Returns a string of the form "expected `{}` but found `{}`",
* or None if this is a derived error. * or None if this is a derived error.
@ -383,7 +383,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
fn expected_found_str<T:UserString+Resolvable>( fn expected_found_str<T:UserString+Resolvable>(
&self, &self,
exp_found: &ty::expected_found<T>) exp_found: &ty::expected_found<T>)
-> Option<~str> -> Option<StrBuf>
{ {
let expected = exp_found.expected.resolve(self); let expected = exp_found.expected.resolve(self);
if expected.contains_error() { if expected.contains_error() {
@ -395,7 +395,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
return None; return None;
} }
Some(format!("expected `{}` but found `{}`", Some(format_strbuf!("expected `{}` but found `{}`",
expected.user_string(self.tcx), expected.user_string(self.tcx),
found.user_string(self.tcx))) found.user_string(self.tcx)))
} }
@ -1449,7 +1449,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
// LifeGiver is responsible for generating fresh lifetime names // LifeGiver is responsible for generating fresh lifetime names
struct LifeGiver { struct LifeGiver {
taken: HashSet<~str>, taken: HashSet<StrBuf>,
counter: Cell<uint>, counter: Cell<uint>,
generated: RefCell<Vec<ast::Lifetime>>, generated: RefCell<Vec<ast::Lifetime>>,
} }
@ -1458,7 +1458,7 @@ impl LifeGiver {
fn with_taken(taken: &[ast::Lifetime]) -> LifeGiver { fn with_taken(taken: &[ast::Lifetime]) -> LifeGiver {
let mut taken_ = HashSet::new(); let mut taken_ = HashSet::new();
for lt in taken.iter() { for lt in taken.iter() {
let lt_name = token::get_name(lt.name).get().to_owned(); let lt_name = token::get_name(lt.name).get().to_strbuf();
taken_.insert(lt_name); taken_.insert(lt_name);
} }
LifeGiver { LifeGiver {
@ -1489,14 +1489,14 @@ impl LifeGiver {
return lifetime; return lifetime;
// 0 .. 25 generates a .. z, 26 .. 51 generates aa .. zz, and so on // 0 .. 25 generates a .. z, 26 .. 51 generates aa .. zz, and so on
fn num_to_str(counter: uint) -> ~str { fn num_to_str(counter: uint) -> StrBuf {
let mut s = StrBuf::new(); let mut s = StrBuf::new();
let (n, r) = (counter/26 + 1, counter % 26); let (n, r) = (counter/26 + 1, counter % 26);
let letter: char = from_u32((r+97) as u32).unwrap(); let letter: char = from_u32((r+97) as u32).unwrap();
for _ in range(0, n) { for _ in range(0, n) {
s.push_char(letter); s.push_char(letter);
} }
s.into_owned() s
} }
} }

View file

@ -36,7 +36,7 @@ impl<'f> Glb<'f> {
impl<'f> Combine for Glb<'f> { impl<'f> Combine for Glb<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx } fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "glb".to_owned() } fn tag(&self) -> StrBuf { "glb".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected } fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() } fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View file

@ -35,7 +35,7 @@ impl<'f> Lub<'f> {
impl<'f> Combine for Lub<'f> { impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx } fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "lub".to_owned() } fn tag(&self) -> StrBuf { "lub".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected } fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() } fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View file

@ -246,15 +246,17 @@ pub enum fixup_err {
region_var_bound_by_region_var(RegionVid, RegionVid) region_var_bound_by_region_var(RegionVid, RegionVid)
} }
pub fn fixup_err_to_str(f: fixup_err) -> ~str { pub fn fixup_err_to_str(f: fixup_err) -> StrBuf {
match f { match f {
unresolved_int_ty(_) => "unconstrained integral type".to_owned(), unresolved_int_ty(_) => "unconstrained integral type".to_strbuf(),
unresolved_ty(_) => "unconstrained type".to_owned(), unresolved_ty(_) => "unconstrained type".to_strbuf(),
cyclic_ty(_) => "cyclic type of infinite size".to_owned(), cyclic_ty(_) => "cyclic type of infinite size".to_strbuf(),
unresolved_region(_) => "unconstrained region".to_owned(), unresolved_region(_) => "unconstrained region".to_strbuf(),
region_var_bound_by_region_var(r1, r2) => { region_var_bound_by_region_var(r1, r2) => {
format!("region var {:?} bound by another region var {:?}; this is \ format_strbuf!("region var {:?} bound by another region var {:?}; \
a bug in rustc", r1, r2) this is a bug in rustc",
r1,
r2)
} }
} }
} }
@ -649,17 +651,17 @@ impl<'a> InferCtxt<'a> {
self.report_region_errors(&errors); // see error_reporting.rs self.report_region_errors(&errors); // see error_reporting.rs
} }
pub fn ty_to_str(&self, t: ty::t) -> ~str { pub fn ty_to_str(&self, t: ty::t) -> StrBuf {
ty_to_str(self.tcx, ty_to_str(self.tcx,
self.resolve_type_vars_if_possible(t)) self.resolve_type_vars_if_possible(t))
} }
pub fn tys_to_str(&self, ts: &[ty::t]) -> ~str { pub fn tys_to_str(&self, ts: &[ty::t]) -> StrBuf {
let tstrs: Vec<~str> = ts.iter().map(|t| self.ty_to_str(*t)).collect(); let tstrs: Vec<StrBuf> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
format!("({})", tstrs.connect(", ")) format_strbuf!("({})", tstrs.connect(", "))
} }
pub fn trait_ref_to_str(&self, t: &ty::TraitRef) -> ~str { pub fn trait_ref_to_str(&self, t: &ty::TraitRef) -> StrBuf {
let t = self.resolve_type_vars_in_trait_ref_if_possible(t); let t = self.resolve_type_vars_in_trait_ref_if_possible(t);
trait_ref_to_str(self.tcx, &t) trait_ref_to_str(self.tcx, &t)
} }
@ -712,19 +714,19 @@ impl<'a> InferCtxt<'a> {
// errors. // errors.
pub fn type_error_message_str(&self, pub fn type_error_message_str(&self,
sp: Span, sp: Span,
mk_msg: |Option<~str>, ~str| -> ~str, mk_msg: |Option<StrBuf>, StrBuf| -> StrBuf,
actual_ty: ~str, actual_ty: StrBuf,
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
self.type_error_message_str_with_expected(sp, mk_msg, None, actual_ty, err) self.type_error_message_str_with_expected(sp, mk_msg, None, actual_ty, err)
} }
pub fn type_error_message_str_with_expected(&self, pub fn type_error_message_str_with_expected(&self,
sp: Span, sp: Span,
mk_msg: |Option<~str>, mk_msg: |Option<StrBuf>,
~str| StrBuf|
-> ~str, -> StrBuf,
expected_ty: Option<ty::t>, expected_ty: Option<ty::t>,
actual_ty: ~str, actual_ty: StrBuf,
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty); debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
@ -751,7 +753,7 @@ impl<'a> InferCtxt<'a> {
pub fn type_error_message(&self, pub fn type_error_message(&self,
sp: Span, sp: Span,
mk_msg: |~str| -> ~str, mk_msg: |StrBuf| -> StrBuf,
actual_ty: ty::t, actual_ty: ty::t,
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
let actual_ty = self.resolve_type_vars_if_possible(actual_ty); let actual_ty = self.resolve_type_vars_if_possible(actual_ty);
@ -775,10 +777,12 @@ impl<'a> InferCtxt<'a> {
// Don't report an error if expected is ty_err // Don't report an error if expected is ty_err
ty::ty_err => return, ty::ty_err => return,
_ => { _ => {
// if I leave out : ~str, it infers &str and complains // if I leave out : StrBuf, it infers &str and complains
|actual: ~str| { |actual: StrBuf| {
format!("mismatched types: expected `{}` but found `{}`", format_strbuf!("mismatched types: expected `{}` but \
self.ty_to_str(resolved_expected), actual) found `{}`",
self.ty_to_str(resolved_expected),
actual)
} }
} }
}; };
@ -818,8 +822,8 @@ impl TypeTrace {
} }
impl Repr for TypeTrace { impl Repr for TypeTrace {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("TypeTrace({})", self.origin.repr(tcx)) format_strbuf!("TypeTrace({})", self.origin.repr(tcx))
} }
} }
@ -838,15 +842,27 @@ impl TypeOrigin {
} }
impl Repr for TypeOrigin { impl Repr for TypeOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
MethodCompatCheck(a) => format!("MethodCompatCheck({})", a.repr(tcx)), MethodCompatCheck(a) => {
ExprAssignable(a) => format!("ExprAssignable({})", a.repr(tcx)), format_strbuf!("MethodCompatCheck({})", a.repr(tcx))
Misc(a) => format!("Misc({})", a.repr(tcx)), }
RelateTraitRefs(a) => format!("RelateTraitRefs({})", a.repr(tcx)), ExprAssignable(a) => {
RelateSelfType(a) => format!("RelateSelfType({})", a.repr(tcx)), format_strbuf!("ExprAssignable({})", a.repr(tcx))
MatchExpression(a) => format!("MatchExpression({})", a.repr(tcx)), }
IfExpression(a) => format!("IfExpression({})", a.repr(tcx)), Misc(a) => format_strbuf!("Misc({})", a.repr(tcx)),
RelateTraitRefs(a) => {
format_strbuf!("RelateTraitRefs({})", a.repr(tcx))
}
RelateSelfType(a) => {
format_strbuf!("RelateSelfType({})", a.repr(tcx))
}
MatchExpression(a) => {
format_strbuf!("MatchExpression({})", a.repr(tcx))
}
IfExpression(a) => {
format_strbuf!("IfExpression({})", a.repr(tcx))
}
} }
} }
} }
@ -875,26 +891,44 @@ impl SubregionOrigin {
} }
impl Repr for SubregionOrigin { impl Repr for SubregionOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
Subtype(ref a) => format!("Subtype({})", a.repr(tcx)), Subtype(ref a) => {
InfStackClosure(a) => format!("InfStackClosure({})", a.repr(tcx)), format_strbuf!("Subtype({})", a.repr(tcx))
InvokeClosure(a) => format!("InvokeClosure({})", a.repr(tcx)), }
DerefPointer(a) => format!("DerefPointer({})", a.repr(tcx)), InfStackClosure(a) => {
FreeVariable(a, b) => format!("FreeVariable({}, {})", a.repr(tcx), b), format_strbuf!("InfStackClosure({})", a.repr(tcx))
IndexSlice(a) => format!("IndexSlice({})", a.repr(tcx)), }
RelateObjectBound(a) => format!("RelateObjectBound({})", a.repr(tcx)), InvokeClosure(a) => {
Reborrow(a) => format!("Reborrow({})", a.repr(tcx)), format_strbuf!("InvokeClosure({})", a.repr(tcx))
ReborrowUpvar(a, b) => format!("ReborrowUpvar({},{:?})", a.repr(tcx), b), }
ReferenceOutlivesReferent(_, a) => DerefPointer(a) => {
format!("ReferenceOutlivesReferent({})", a.repr(tcx)), format_strbuf!("DerefPointer({})", a.repr(tcx))
BindingTypeIsNotValidAtDecl(a) => }
format!("BindingTypeIsNotValidAtDecl({})", a.repr(tcx)), FreeVariable(a, b) => {
CallRcvr(a) => format!("CallRcvr({})", a.repr(tcx)), format_strbuf!("FreeVariable({}, {})", a.repr(tcx), b)
CallArg(a) => format!("CallArg({})", a.repr(tcx)), }
CallReturn(a) => format!("CallReturn({})", a.repr(tcx)), IndexSlice(a) => {
AddrOf(a) => format!("AddrOf({})", a.repr(tcx)), format_strbuf!("IndexSlice({})", a.repr(tcx))
AutoBorrow(a) => format!("AutoBorrow({})", a.repr(tcx)), }
RelateObjectBound(a) => {
format_strbuf!("RelateObjectBound({})", a.repr(tcx))
}
Reborrow(a) => format_strbuf!("Reborrow({})", a.repr(tcx)),
ReborrowUpvar(a, b) => {
format_strbuf!("ReborrowUpvar({},{:?})", a.repr(tcx), b)
}
ReferenceOutlivesReferent(_, a) => {
format_strbuf!("ReferenceOutlivesReferent({})", a.repr(tcx))
}
BindingTypeIsNotValidAtDecl(a) => {
format_strbuf!("BindingTypeIsNotValidAtDecl({})", a.repr(tcx))
}
CallRcvr(a) => format_strbuf!("CallRcvr({})", a.repr(tcx)),
CallArg(a) => format_strbuf!("CallArg({})", a.repr(tcx)),
CallReturn(a) => format_strbuf!("CallReturn({})", a.repr(tcx)),
AddrOf(a) => format_strbuf!("AddrOf({})", a.repr(tcx)),
AutoBorrow(a) => format_strbuf!("AutoBorrow({})", a.repr(tcx)),
} }
} }
} }
@ -918,25 +952,43 @@ impl RegionVariableOrigin {
} }
impl Repr for RegionVariableOrigin { impl Repr for RegionVariableOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
MiscVariable(a) => format!("MiscVariable({})", a.repr(tcx)), MiscVariable(a) => {
PatternRegion(a) => format!("PatternRegion({})", a.repr(tcx)), format_strbuf!("MiscVariable({})", a.repr(tcx))
AddrOfRegion(a) => format!("AddrOfRegion({})", a.repr(tcx)), }
AddrOfSlice(a) => format!("AddrOfSlice({})", a.repr(tcx)), PatternRegion(a) => {
Autoref(a) => format!("Autoref({})", a.repr(tcx)), format_strbuf!("PatternRegion({})", a.repr(tcx))
Coercion(ref a) => format!("Coercion({})", a.repr(tcx)), }
EarlyBoundRegion(a, b) => format!("EarlyBoundRegion({},{})", AddrOfRegion(a) => {
a.repr(tcx), b.repr(tcx)), format_strbuf!("AddrOfRegion({})", a.repr(tcx))
LateBoundRegion(a, b) => format!("LateBoundRegion({},{})", }
a.repr(tcx), b.repr(tcx)), AddrOfSlice(a) => format_strbuf!("AddrOfSlice({})", a.repr(tcx)),
BoundRegionInFnType(a, b) => format!("bound_regionInFnType({},{})", Autoref(a) => format_strbuf!("Autoref({})", a.repr(tcx)),
a.repr(tcx), b.repr(tcx)), Coercion(ref a) => format_strbuf!("Coercion({})", a.repr(tcx)),
BoundRegionInCoherence(a) => format!("bound_regionInCoherence({})", EarlyBoundRegion(a, b) => {
a.repr(tcx)), format_strbuf!("EarlyBoundRegion({},{})",
UpvarRegion(a, b) => format!("UpvarRegion({}, {})",
a.repr(tcx), a.repr(tcx),
b.repr(tcx)), b.repr(tcx))
}
LateBoundRegion(a, b) => {
format_strbuf!("LateBoundRegion({},{})",
a.repr(tcx),
b.repr(tcx))
}
BoundRegionInFnType(a, b) => {
format_strbuf!("bound_regionInFnType({},{})",
a.repr(tcx),
b.repr(tcx))
}
BoundRegionInCoherence(a) => {
format_strbuf!("bound_regionInCoherence({})", a.repr(tcx))
}
UpvarRegion(a, b) => {
format_strbuf!("UpvarRegion({}, {})",
a.repr(tcx),
b.repr(tcx))
}
} }
} }
} }

View file

@ -1332,16 +1332,28 @@ impl<'a> RegionVarBindings<'a> {
} }
impl Repr for Constraint { impl Repr for Constraint {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
ConstrainVarSubVar(a, b) => format!("ConstrainVarSubVar({}, {})", ConstrainVarSubVar(a, b) => {
a.repr(tcx), b.repr(tcx)), format_strbuf!("ConstrainVarSubVar({}, {})",
ConstrainRegSubVar(a, b) => format!("ConstrainRegSubVar({}, {})", a.repr(tcx),
a.repr(tcx), b.repr(tcx)), b.repr(tcx))
ConstrainVarSubReg(a, b) => format!("ConstrainVarSubReg({}, {})", }
a.repr(tcx), b.repr(tcx)), ConstrainRegSubVar(a, b) => {
ConstrainRegSubReg(a, b) => format!("ConstrainRegSubReg({}, {})", format_strbuf!("ConstrainRegSubVar({}, {})",
a.repr(tcx), b.repr(tcx)), a.repr(tcx),
b.repr(tcx))
}
ConstrainVarSubReg(a, b) => {
format_strbuf!("ConstrainVarSubReg({}, {})",
a.repr(tcx),
b.repr(tcx))
}
ConstrainRegSubReg(a, b) => {
format_strbuf!("ConstrainRegSubReg({}, {})",
a.repr(tcx),
b.repr(tcx))
}
} }
} }
} }

View file

@ -35,7 +35,7 @@ impl<'f> Sub<'f> {
impl<'f> Combine for Sub<'f> { impl<'f> Combine for Sub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx } fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "sub".to_owned() } fn tag(&self) -> StrBuf { "sub".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected } fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() } fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View file

@ -34,7 +34,7 @@ struct Env {
krate: @ast::Crate, krate: @ast::Crate,
tcx: ty::ctxt, tcx: ty::ctxt,
infcx: infer::infer_ctxt, infcx: infer::infer_ctxt,
err_messages: @DVec<~str> err_messages: @DVec<StrBuf>
} }
struct RH { struct RH {
@ -93,7 +93,7 @@ impl Env {
sub: &[]}]}); sub: &[]}]});
} }
pub fn lookup_item(&self, names: &[~str]) -> ast::node_id { pub fn lookup_item(&self, names: &[StrBuf]) -> ast::node_id {
return match search_mod(self, &self.krate.node.module, 0, names) { return match search_mod(self, &self.krate.node.module, 0, names) {
Some(id) => id, Some(id) => id,
None => { None => {
@ -104,7 +104,7 @@ impl Env {
fn search_mod(self: &Env, fn search_mod(self: &Env,
m: &ast::Mod, m: &ast::Mod,
idx: uint, idx: uint,
names: &[~str]) -> Option<ast::node_id> { names: &[StrBuf]) -> Option<ast::node_id> {
assert!(idx < names.len()); assert!(idx < names.len());
for item in m.items.iter() { for item in m.items.iter() {
if self.tcx.sess.str_of(item.ident) == names[idx] { if self.tcx.sess.str_of(item.ident) == names[idx] {
@ -117,7 +117,7 @@ impl Env {
fn search(self: &Env, fn search(self: &Env,
it: @ast::Item, it: @ast::Item,
idx: uint, idx: uint,
names: &[~str]) -> Option<ast::node_id> { names: &[StrBuf]) -> Option<ast::node_id> {
if idx == names.len() { if idx == names.len() {
return Some(it.id); return Some(it.id);
} }
@ -174,7 +174,7 @@ impl Env {
self.assert_subtype(b, a); self.assert_subtype(b, a);
} }
pub fn ty_to_str(&self, a: ty::t) -> ~str { pub fn ty_to_str(&self, a: ty::t) -> StrBuf {
ty_to_str(self.tcx, a) ty_to_str(self.tcx, a)
} }

View file

@ -20,75 +20,80 @@ use util::ppaux::{mt_to_str, ty_to_str, trait_ref_to_str};
use syntax::ast; use syntax::ast;
pub trait InferStr { pub trait InferStr {
fn inf_str(&self, cx: &InferCtxt) -> ~str; fn inf_str(&self, cx: &InferCtxt) -> StrBuf;
} }
impl InferStr for ty::t { impl InferStr for ty::t {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
ty_to_str(cx.tcx, *self) ty_to_str(cx.tcx, *self)
} }
} }
impl InferStr for FnSig { impl InferStr for FnSig {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
format!("({}) -> {}", format_strbuf!("({}) -> {}",
self.inputs.iter().map(|a| a.inf_str(cx)).collect::<Vec<~str>>().connect(", "), self.inputs
.iter()
.map(|a| a.inf_str(cx))
.collect::<Vec<StrBuf>>().connect(", "),
self.output.inf_str(cx)) self.output.inf_str(cx))
} }
} }
impl InferStr for ty::mt { impl InferStr for ty::mt {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
mt_to_str(cx.tcx, self) mt_to_str(cx.tcx, self)
} }
} }
impl InferStr for ty::Region { impl InferStr for ty::Region {
fn inf_str(&self, _cx: &InferCtxt) -> ~str { fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
format!("{:?}", *self) format_strbuf!("{:?}", *self)
} }
} }
impl<V:InferStr> InferStr for Bound<V> { impl<V:InferStr> InferStr for Bound<V> {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
match *self { match *self {
Some(ref v) => v.inf_str(cx), Some(ref v) => v.inf_str(cx),
None => "none".to_owned() None => "none".to_strbuf()
} }
} }
} }
impl<T:InferStr> InferStr for Bounds<T> { impl<T:InferStr> InferStr for Bounds<T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
format!("\\{{} <: {}\\}", format_strbuf!("\\{{} <: {}\\}",
self.lb.inf_str(cx), self.lb.inf_str(cx),
self.ub.inf_str(cx)) self.ub.inf_str(cx))
} }
} }
impl<V:Vid + ToStr,T:InferStr> InferStr for VarValue<V, T> { impl<V:Vid + ToStr,T:InferStr> InferStr for VarValue<V, T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
match *self { match *self {
Redirect(ref vid) => format!("Redirect({})", vid.to_str()), Redirect(ref vid) => format_strbuf!("Redirect({})", vid.to_str()),
Root(ref pt, rk) => format!("Root({}, {})", pt.inf_str(cx), rk) Root(ref pt, rk) => {
format_strbuf!("Root({}, {})", pt.inf_str(cx), rk)
}
} }
} }
} }
impl InferStr for IntVarValue { impl InferStr for IntVarValue {
fn inf_str(&self, _cx: &InferCtxt) -> ~str { fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
self.to_str() self.to_str().to_strbuf()
} }
} }
impl InferStr for ast::FloatTy { impl InferStr for ast::FloatTy {
fn inf_str(&self, _cx: &InferCtxt) -> ~str { fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
self.to_str() self.to_str().to_strbuf()
} }
} }
impl InferStr for ty::TraitRef { impl InferStr for ty::TraitRef {
fn inf_str(&self, cx: &InferCtxt) -> ~str { fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
trait_ref_to_str(cx.tcx, self) trait_ref_to_str(cx.tcx, self)
} }
} }

View file

@ -198,10 +198,10 @@ pub enum vtable_origin {
} }
impl Repr for vtable_origin { impl Repr for vtable_origin {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self { match *self {
vtable_static(def_id, ref tys, ref vtable_res) => { vtable_static(def_id, ref tys, ref vtable_res) => {
format!("vtable_static({:?}:{}, {}, {})", format_strbuf!("vtable_static({:?}:{}, {}, {})",
def_id, def_id,
ty::item_path_str(tcx, def_id), ty::item_path_str(tcx, def_id),
tys.repr(tcx), tys.repr(tcx),
@ -209,7 +209,7 @@ impl Repr for vtable_origin {
} }
vtable_param(x, y) => { vtable_param(x, y) => {
format!("vtable_param({:?}, {:?})", x, y) format_strbuf!("vtable_param({:?}, {:?})", x, y)
} }
} }
} }
@ -230,8 +230,8 @@ pub struct impl_res {
} }
impl Repr for impl_res { impl Repr for impl_res {
fn repr(&self, tcx: &ty::ctxt) -> ~str { fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format!("impl_res \\{trait_vtables={}, self_vtables={}\\}", format_strbuf!("impl_res \\{trait_vtables={}, self_vtables={}\\}",
self.trait_vtables.repr(tcx), self.trait_vtables.repr(tcx),
self.self_vtables.repr(tcx)) self.self_vtables.repr(tcx))
} }
@ -293,7 +293,7 @@ pub fn require_same_types(tcx: &ty::ctxt,
span: Span, span: Span,
t1: ty::t, t1: ty::t,
t2: ty::t, t2: ty::t,
msg: || -> ~str) msg: || -> StrBuf)
-> bool { -> bool {
let result = match maybe_infcx { let result = match maybe_infcx {
None => { None => {
@ -308,8 +308,10 @@ pub fn require_same_types(tcx: &ty::ctxt,
match result { match result {
Ok(_) => true, Ok(_) => true,
Err(ref terr) => { Err(ref terr) => {
tcx.sess.span_err(span, msg() + ": " + tcx.sess.span_err(span,
ty::type_err_to_str(tcx, terr)); format!("{}: {}",
msg(),
ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr); ty::note_and_explain_type_err(tcx, terr);
false false
} }
@ -350,8 +352,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
}); });
require_same_types(tcx, None, false, main_span, main_t, se_ty, require_same_types(tcx, None, false, main_span, main_t, se_ty,
|| format!("main function expects type: `{}`", || {
ppaux::ty_to_str(ccx.tcx, se_ty))); format_strbuf!("main function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty))
});
} }
_ => { _ => {
tcx.sess.span_bug(main_span, tcx.sess.span_bug(main_span,
@ -399,7 +403,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
}); });
require_same_types(tcx, None, false, start_span, start_t, se_ty, require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| format!("start function expects type: `{}`", ppaux::ty_to_str(ccx.tcx, se_ty))); || {
format_strbuf!("start function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty))
});
} }
_ => { _ => {

View file

@ -1001,7 +1001,7 @@ impl<'a> SolveContext<'a> {
// attribute and report an error with various results if found. // attribute and report an error with various results if found.
if ty::has_attr(tcx, item_def_id, "rustc_variance") { if ty::has_attr(tcx, item_def_id, "rustc_variance") {
let found = item_variances.repr(tcx); let found = item_variances.repr(tcx);
tcx.sess.span_err(tcx.map.span(item_id), found); tcx.sess.span_err(tcx.map.span(item_id), found.as_slice());
} }
let newly_added = tcx.item_variance_map.borrow_mut() let newly_added = tcx.item_variance_map.borrow_mut()

File diff suppressed because it is too large Load diff

View file

@ -257,9 +257,9 @@ pub trait Digest {
} }
/// Convenience function that retrieves the result of a digest as a /// Convenience function that retrieves the result of a digest as a
/// ~str in hexadecimal format. /// StrBuf in hexadecimal format.
fn result_str(&mut self) -> ~str { fn result_str(&mut self) -> StrBuf {
self.result_bytes().as_slice().to_hex() self.result_bytes().as_slice().to_hex().to_strbuf()
} }
} }
@ -543,15 +543,15 @@ mod tests {
} }
struct Test { struct Test {
input: ~str, input: StrBuf,
output_str: ~str, output_str: StrBuf,
} }
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) { fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once // Test that it works when accepting the message all at once
for t in tests.iter() { for t in tests.iter() {
sh.reset(); sh.reset();
sh.input_str(t.input); sh.input_str(t.input.as_slice());
let out_str = sh.result_str(); let out_str = sh.result_str();
assert!(out_str == t.output_str); assert!(out_str == t.output_str);
} }
@ -563,7 +563,9 @@ mod tests {
let mut left = len; let mut left = len;
while left > 0u { while left > 0u {
let take = (left + 1u) / 2u; let take = (left + 1u) / 2u;
sh.input_str(t.input.slice(len - left, take + len - left)); sh.input_str(t.input
.as_slice()
.slice(len - left, take + len - left));
left = left - take; left = left - take;
} }
let out_str = sh.result_str(); let out_str = sh.result_str();
@ -576,19 +578,21 @@ mod tests {
// Examples from wikipedia // Examples from wikipedia
let wikipedia_tests = vec!( let wikipedia_tests = vec!(
Test { Test {
input: "".to_owned(), input: "".to_strbuf(),
output_str: "e3b0c44298fc1c149afb\ output_str: "e3b0c44298fc1c149afb\
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_owned() f4c8996fb92427ae41e4649b934ca495991b7852b855".to_strbuf()
}, },
Test { Test {
input: "The quick brown fox jumps over the lazy dog".to_owned(), input: "The quick brown fox jumps over the lazy \
dog".to_strbuf(),
output_str: "d7a8fbb307d7809469ca\ output_str: "d7a8fbb307d7809469ca\
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_owned() 9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_strbuf()
}, },
Test { Test {
input: "The quick brown fox jumps over the lazy dog.".to_owned(), input: "The quick brown fox jumps over the lazy \
dog.".to_strbuf(),
output_str: "ef537f25c895bfa78252\ output_str: "ef537f25c895bfa78252\
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_owned() 6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_strbuf()
}); });
let tests = wikipedia_tests; let tests = wikipedia_tests;

View file

@ -90,7 +90,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
self.attrs.as_slice(), self.attrs.as_slice(),
cx.sess()); cx.sess());
let id = link::find_crate_id(self.attrs.as_slice(), let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem); t_outputs.out_filestem.as_slice());
Crate { Crate {
name: id.name.to_owned(), name: id.name.to_owned(),
module: Some(self.module.clean()), module: Some(self.module.clean()),

View file

@ -100,7 +100,7 @@ pub fn run(input: &str,
fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool, fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
no_run: bool, loose_feature_gating: bool) { no_run: bool, loose_feature_gating: bool) {
let test = maketest(test, cratename, loose_feature_gating); let test = maketest(test, cratename, loose_feature_gating);
let input = driver::StrInput(test); let input = driver::StrInput(test.to_strbuf());
let sessopts = config::Options { let sessopts = config::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()), maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),

View file

@ -504,6 +504,7 @@ use slice::{Vector, ImmutableVector};
use slice; use slice;
use str::{StrSlice, StrAllocating, UTF16Item, ScalarValue, LoneSurrogate}; use str::{StrSlice, StrAllocating, UTF16Item, ScalarValue, LoneSurrogate};
use str; use str;
use strbuf::StrBuf;
pub use self::num::radix; pub use self::num::radix;
pub use self::num::Radix; pub use self::num::Radix;
@ -788,6 +789,11 @@ pub fn format(args: &Arguments) -> ~str {
unsafe { format_unsafe(args.fmt, args.args) } unsafe { format_unsafe(args.fmt, args.args) }
} }
/// Temporary transitionary thing.
pub fn format_strbuf(args: &Arguments) -> StrBuf {
unsafe { format_unsafe_strbuf(args.fmt, args.args) }
}
/// The unsafe version of the formatting function. /// The unsafe version of the formatting function.
/// ///
/// This is currently an unsafe function because the types of all arguments /// This is currently an unsafe function because the types of all arguments
@ -815,6 +821,14 @@ pub unsafe fn format_unsafe(fmt: &[rt::Piece], args: &[Argument]) -> ~str {
return str::from_utf8(output.unwrap().as_slice()).unwrap().to_owned(); return str::from_utf8(output.unwrap().as_slice()).unwrap().to_owned();
} }
/// Temporary transitionary thing.
pub unsafe fn format_unsafe_strbuf(fmt: &[rt::Piece], args: &[Argument])
-> StrBuf {
let mut output = MemWriter::new();
write_unsafe(&mut output as &mut io::Writer, fmt, args).unwrap();
return str::from_utf8(output.unwrap().as_slice()).unwrap().into_strbuf();
}
impl<'a> Formatter<'a> { impl<'a> Formatter<'a> {
// First up is the collection of functions used to execute a format string // First up is the collection of functions used to execute a format string

View file

@ -229,6 +229,14 @@ macro_rules! format(
) )
) )
/// Temporary transitionary thing.
#[macro_export]
macro_rules! format_strbuf(
($($arg:tt)*) => (
format_args!(::std::fmt::format_strbuf, $($arg)*)
)
)
/// Use the `format!` syntax to write data into a buffer of type `&mut Writer`. /// Use the `format!` syntax to write data into a buffer of type `&mut Writer`.
/// See `std::fmt` for more information. /// See `std::fmt` for more information.
/// ///