1
Fork 0

libstd: Remove ~str from all libstd modules except fmt and str.

This commit is contained in:
Patrick Walton 2014-05-16 10:45:16 -07:00
parent e402e75f4e
commit 36195eb91f
204 changed files with 2102 additions and 1496 deletions

View file

@ -96,7 +96,7 @@ pub fn parse_config(args: Vec<StrBuf> ) -> Config {
let args_ = args.tail(); let args_ = args.tail();
if args.get(1).as_slice() == "-h" || args.get(1).as_slice() == "--help" { if args.get(1).as_slice() == "-h" || args.get(1).as_slice() == "--help" {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println!("{}", getopts::usage(message, groups.as_slice())); println!("{}", getopts::usage(message.as_slice(), groups.as_slice()));
println!(""); println!("");
fail!() fail!()
} }
@ -109,7 +109,7 @@ pub fn parse_config(args: Vec<StrBuf> ) -> Config {
if matches.opt_present("h") || matches.opt_present("help") { if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
println!("{}", getopts::usage(message, groups.as_slice())); println!("{}", getopts::usage(message.as_slice(), groups.as_slice()));
println!(""); println!("");
fail!() fail!()
} }

View file

@ -157,9 +157,14 @@ fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
// module or function. This doesn't seem to be an optimization // module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one. // with a warm page cache. Maybe with a cold one.
let ln = ln.unwrap(); let ln = ln.unwrap();
if ln.starts_with("fn") || ln.starts_with("mod") { if ln.as_slice().starts_with("fn") ||
ln.as_slice().starts_with("mod") {
return true; return true;
} else { if !(it(ln.trim())) { return false; } } } else {
if !(it(ln.as_slice().trim())) {
return false;
}
}
} }
return true; return true;
} }

View file

@ -538,7 +538,8 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
// Set breakpoints on every line that contains the string "#break" // Set breakpoints on every line that contains the string "#break"
for line in breakpoint_lines.iter() { for line in breakpoint_lines.iter() {
script_str.push_str(format!("breakpoint set --line {}\n", line)); script_str.push_str(format!("breakpoint set --line {}\n",
line).as_slice());
} }
// Append the other commands // Append the other commands
@ -620,18 +621,18 @@ fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str)
for line in reader.lines() { for line in reader.lines() {
match line { match line {
Ok(line) => { Ok(line) => {
if line.contains("#break") { if line.as_slice().contains("#break") {
breakpoint_lines.push(counter); breakpoint_lines.push(counter);
} }
header::parse_name_value_directive( header::parse_name_value_directive(
line, line.as_slice(),
command_directive.to_strbuf()).map(|cmd| { command_directive.to_strbuf()).map(|cmd| {
commands.push(cmd) commands.push(cmd)
}); });
header::parse_name_value_directive( header::parse_name_value_directive(
line, line.as_slice(),
check_directive.to_strbuf()).map(|cmd| { check_directive.to_strbuf()).map(|cmd| {
check_lines.push(cmd) check_lines.push(cmd)
}); });

View file

@ -274,12 +274,13 @@ impl<'a> Parser<'a> {
self.cur.next(); self.cur.next();
} }
Some((_, other)) => { Some((_, other)) => {
self.err( self.err(format!("expected `{}` but found `{}`",
format!("expected `{}` but found `{}`", c, other)); c,
other).as_slice());
} }
None => { None => {
self.err( self.err(format!("expected `{}` but string was terminated",
format!("expected `{}` but string was terminated", c)); c).as_slice());
} }
} }
} }
@ -307,7 +308,8 @@ impl<'a> Parser<'a> {
Some((_, c @ '#')) | Some((_, c @ '{')) | Some((_, c @ '#')) | Some((_, c @ '{')) |
Some((_, c @ '\\')) | Some((_, c @ '}')) => { c } Some((_, c @ '\\')) | Some((_, c @ '}')) => { c }
Some((_, c)) => { Some((_, c)) => {
self.err(format!("invalid escape character `{}`", c)); self.err(format!("invalid escape character `{}`",
c).as_slice());
c c
} }
None => { None => {
@ -459,7 +461,7 @@ impl<'a> Parser<'a> {
return None; return None;
} }
method => { method => {
self.err(format!("unknown method: `{}`", method)); self.err(format!("unknown method: `{}`", method).as_slice());
return None; return None;
} }
} }
@ -526,7 +528,7 @@ impl<'a> Parser<'a> {
let word = self.word(); let word = self.word();
if word != "offset" { if word != "offset" {
self.err(format!("expected `offset`, found `{}`", self.err(format!("expected `offset`, found `{}`",
word)); word).as_slice());
} else { } else {
self.must_consume(':'); self.must_consume(':');
match self.integer() { match self.integer() {
@ -566,7 +568,7 @@ impl<'a> Parser<'a> {
"many" => Keyword(Many), "many" => Keyword(Many),
word => { word => {
self.err(format!("unexpected plural selector `{}`", self.err(format!("unexpected plural selector `{}`",
word)); word).as_slice());
if word == "" { if word == "" {
break break
} else { } else {

View file

@ -46,7 +46,7 @@ macro_rules! rtassert (
macro_rules! rtabort ( macro_rules! rtabort (
($($arg:tt)*) => ( { ($($arg:tt)*) => ( {
::macros::abort(format!($($arg)*)); ::macros::abort(format!($($arg)*).as_slice());
} ) } )
) )

View file

@ -147,7 +147,10 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Some((err_pos, err_str)) => { Some((err_pos, err_str)) => {
let pos = expr.span.lo + syntax::codemap::Pos::from_uint(err_pos + 1); let pos = expr.span.lo + syntax::codemap::Pos::from_uint(err_pos + 1);
let span = syntax::codemap::mk_sp(pos,pos); let span = syntax::codemap::mk_sp(pos,pos);
cx.span_err(span, format!("invalid hex float literal in hexfloat!: {}", err_str)); cx.span_err(span,
format!("invalid hex float literal in hexfloat!: \
{}",
err_str).as_slice());
return base::DummyResult::expr(sp); return base::DummyResult::expr(sp);
} }
_ => () _ => ()

View file

@ -302,8 +302,10 @@ pub fn mod_enabled(level: u32, module: &str) -> bool {
enabled(level, module, unsafe { (*DIRECTIVES).iter() }) enabled(level, module, unsafe { (*DIRECTIVES).iter() })
} }
fn enabled(level: u32, module: &str, fn enabled(level: u32,
iter: slice::Items<directive::LogDirective>) -> bool { module: &str,
iter: slice::Items<directive::LogDirective>)
-> bool {
// Search for the longest match, the vector is assumed to be pre-sorted. // Search for the longest match, the vector is assumed to be pre-sorted.
for directive in iter.rev() { for directive in iter.rev() {
match directive.name { match directive.name {
@ -322,7 +324,7 @@ fn enabled(level: u32, module: &str,
/// `Once` primitive (and this function is called from that primitive). /// `Once` primitive (and this function is called from that primitive).
fn init() { fn init() {
let mut directives = match os::getenv("RUST_LOG") { let mut directives = match os::getenv("RUST_LOG") {
Some(spec) => directive::parse_logging_spec(spec), Some(spec) => directive::parse_logging_spec(spec.as_slice()),
None => Vec::new(), None => Vec::new(),
}; };

View file

@ -104,9 +104,10 @@ fn get_error(_: c_int) -> IoError {
#[cfg(not(windows))] #[cfg(not(windows))]
fn get_error(s: c_int) -> IoError { fn get_error(s: c_int) -> IoError {
use std::io; use std::io;
use std::str::raw::from_c_str;
let err_str = unsafe { from_c_str(gai_strerror(s)) }; let err_str = unsafe {
CString::new(gai_strerror(s), false).as_str().unwrap().to_strbuf()
};
IoError { IoError {
kind: io::OtherIoError, kind: io::OtherIoError,
desc: "unable to resolve host", desc: "unable to resolve host",

View file

@ -604,7 +604,7 @@ impl_to_biguint!(u32, FromPrimitive::from_u32)
impl_to_biguint!(u64, FromPrimitive::from_u64) impl_to_biguint!(u64, FromPrimitive::from_u64)
impl ToStrRadix for BigUint { impl ToStrRadix for BigUint {
fn to_str_radix(&self, radix: uint) -> ~str { fn to_str_radix(&self, radix: uint) -> StrBuf {
assert!(1 < radix && radix <= 16); assert!(1 < radix && radix <= 16);
let (base, max_len) = get_radix_base(radix); let (base, max_len) = get_radix_base(radix);
if base == BigDigit::base { if base == BigDigit::base {
@ -627,15 +627,17 @@ impl ToStrRadix for BigUint {
return result; return result;
} }
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str { fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> StrBuf {
if v.is_empty() { return "0".to_owned() } if v.is_empty() {
return "0".to_strbuf()
}
let mut s = StrBuf::with_capacity(v.len() * l); let mut s = StrBuf::with_capacity(v.len() * l);
for n in v.iter().rev() { for n in v.iter().rev() {
let ss = (*n as uint).to_str_radix(radix); let ss = (*n as uint).to_str_radix(radix);
s.push_str("0".repeat(l - ss.len())); s.push_str("0".repeat(l - ss.len()));
s.push_str(ss); s.push_str(ss.as_slice());
} }
s.as_slice().trim_left_chars('0').to_owned() s.as_slice().trim_left_chars('0').to_strbuf()
} }
} }
} }
@ -1209,11 +1211,11 @@ impl_to_bigint!(u64, FromPrimitive::from_u64)
impl ToStrRadix for BigInt { impl ToStrRadix for BigInt {
#[inline] #[inline]
fn to_str_radix(&self, radix: uint) -> ~str { fn to_str_radix(&self, radix: uint) -> StrBuf {
match self.sign { match self.sign {
Plus => self.data.to_str_radix(radix), Plus => self.data.to_str_radix(radix),
Zero => "0".to_owned(), Zero => "0".to_strbuf(),
Minus => "-".to_owned() + self.data.to_str_radix(radix) Minus => format_strbuf!("-{}", self.data.to_str_radix(radix)),
} }
} }
} }

View file

@ -175,11 +175,15 @@ impl<T: fmt::Show + Num + Ord> fmt::Show for Complex<T> {
} }
impl<T: ToStrRadix + Num + Ord> ToStrRadix for Complex<T> { impl<T: ToStrRadix + Num + Ord> ToStrRadix for Complex<T> {
fn to_str_radix(&self, radix: uint) -> ~str { fn to_str_radix(&self, radix: uint) -> StrBuf {
if self.im < Zero::zero() { if self.im < Zero::zero() {
format!("{}-{}i", self.re.to_str_radix(radix), (-self.im).to_str_radix(radix)) format_strbuf!("{}-{}i",
self.re.to_str_radix(radix),
(-self.im).to_str_radix(radix))
} else { } else {
format!("{}+{}i", self.re.to_str_radix(radix), self.im.to_str_radix(radix)) format_strbuf!("{}+{}i",
self.re.to_str_radix(radix),
self.im.to_str_radix(radix))
} }
} }
} }

View file

@ -281,8 +281,10 @@ impl<T: fmt::Show> fmt::Show for Ratio<T> {
} }
impl<T: ToStrRadix> ToStrRadix for Ratio<T> { impl<T: ToStrRadix> ToStrRadix for Ratio<T> {
/// Renders as `numer/denom` where the numbers are in base `radix`. /// Renders as `numer/denom` where the numbers are in base `radix`.
fn to_str_radix(&self, radix: uint) -> ~str { fn to_str_radix(&self, radix: uint) -> StrBuf {
format!("{}/{}", self.numer.to_str_radix(radix), self.denom.to_str_radix(radix)) format_strbuf!("{}/{}",
self.numer.to_str_radix(radix),
self.denom.to_str_radix(radix))
} }
} }

View file

@ -278,7 +278,10 @@ impl<'a> Parser<'a> {
fn noteof(&mut self, expected: &str) -> Result<(), Error> { fn noteof(&mut self, expected: &str) -> Result<(), Error> {
match self.next_char() { match self.next_char() {
true => Ok(()), true => Ok(()),
false => self.err(format!("Expected {} but got EOF.", expected)), false => {
self.err(format!("Expected {} but got EOF.",
expected).as_slice())
}
} }
} }
@ -286,8 +289,11 @@ impl<'a> Parser<'a> {
match self.next_char() { match self.next_char() {
true if self.cur() == expected => Ok(()), true if self.cur() == expected => Ok(()),
true => self.err(format!("Expected '{}' but got '{}'.", true => self.err(format!("Expected '{}' but got '{}'.",
expected, self.cur())), expected, self.cur()).as_slice()),
false => self.err(format!("Expected '{}' but got EOF.", expected)), false => {
self.err(format!("Expected '{}' but got EOF.",
expected).as_slice())
}
} }
} }
@ -429,8 +435,10 @@ impl<'a> Parser<'a> {
try!(self.noteof("not a ']'")) try!(self.noteof("not a ']'"))
let c2 = self.cur(); let c2 = self.cur();
if c2 < c { if c2 < c {
return self.err(format!( return self.err(format!("Invalid character class \
"Invalid character class range '{}-{}'", c, c2)) range '{}-{}'",
c,
c2).as_slice())
} }
ranges.push((c, self.cur())) ranges.push((c, self.cur()))
} else { } else {
@ -491,9 +499,12 @@ impl<'a> Parser<'a> {
let closer = let closer =
match self.pos('}') { match self.pos('}') {
Some(i) => i, Some(i) => i,
None => return self.err(format!( None => {
"No closing brace for counted repetition starting at \ return self.err(format!("No closing brace for counted \
position {}.", start)), repetition starting at position \
{}.",
start).as_slice())
}
}; };
self.chari = closer; self.chari = closer;
let greed = try!(self.get_next_greedy()); let greed = try!(self.get_next_greedy());
@ -525,19 +536,19 @@ impl<'a> Parser<'a> {
if min > MAX_REPEAT { if min > MAX_REPEAT {
return self.err(format!( return self.err(format!(
"{} exceeds maximum allowed repetitions ({})", "{} exceeds maximum allowed repetitions ({})",
min, MAX_REPEAT)); min, MAX_REPEAT).as_slice());
} }
if max.is_some() { if max.is_some() {
let m = max.unwrap(); let m = max.unwrap();
if m > MAX_REPEAT { if m > MAX_REPEAT {
return self.err(format!( return self.err(format!(
"{} exceeds maximum allowed repetitions ({})", "{} exceeds maximum allowed repetitions ({})",
m, MAX_REPEAT)); m, MAX_REPEAT).as_slice());
} }
if m < min { if m < min {
return self.err(format!( return self.err(format!(
"Max repetitions ({}) cannot be smaller than min \ "Max repetitions ({}) cannot be smaller than min \
repetitions ({}).", m, min)); repetitions ({}).", m, min).as_slice());
} }
} }
@ -600,7 +611,10 @@ impl<'a> Parser<'a> {
if c.is_uppercase() { flags |= FLAG_NEGATED } if c.is_uppercase() { flags |= FLAG_NEGATED }
Ok(Class(ranges, flags)) Ok(Class(ranges, flags))
} }
_ => self.err(format!("Invalid escape sequence '\\\\{}'", c)), _ => {
self.err(format!("Invalid escape sequence '\\\\{}'",
c).as_slice())
}
} }
} }
@ -619,7 +633,7 @@ impl<'a> Parser<'a> {
Some(i) => i, Some(i) => i,
None => return self.err(format!( None => return self.err(format!(
"Missing '\\}' for unclosed '\\{' at position {}", "Missing '\\}' for unclosed '\\{' at position {}",
self.chari)), self.chari).as_slice()),
}; };
if closer - self.chari + 1 == 0 { if closer - self.chari + 1 == 0 {
return self.err("No Unicode class name found.") return self.err("No Unicode class name found.")
@ -634,8 +648,10 @@ impl<'a> Parser<'a> {
self.chari += 1; self.chari += 1;
} }
match find_class(UNICODE_CLASSES, name.as_slice()) { match find_class(UNICODE_CLASSES, name.as_slice()) {
None => return self.err(format!( None => {
"Could not find Unicode class '{}'", name)), return self.err(format!("Could not find Unicode class '{}'",
name).as_slice())
}
Some(ranges) => { Some(ranges) => {
Ok(Class(ranges, negated | (self.flags & FLAG_NOCASE))) Ok(Class(ranges, negated | (self.flags & FLAG_NOCASE)))
} }
@ -659,8 +675,10 @@ impl<'a> Parser<'a> {
let s = self.slice(start, end); let s = self.slice(start, end);
match num::from_str_radix::<u32>(s.as_slice(), 8) { match num::from_str_radix::<u32>(s.as_slice(), 8) {
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
None => self.err(format!( None => {
"Could not parse '{}' as octal number.", s)), self.err(format!("Could not parse '{}' as octal number.",
s).as_slice())
}
} }
} }
@ -674,8 +692,11 @@ impl<'a> Parser<'a> {
let start = self.chari + 2; let start = self.chari + 2;
let closer = let closer =
match self.pos('}') { match self.pos('}') {
None => return self.err(format!( None => {
"Missing '\\}' for unclosed '\\{' at position {}", start)), return self.err(format!("Missing '\\}' for unclosed \
'\\{' at position {}",
start).as_slice())
}
Some(i) => i, Some(i) => i,
}; };
self.chari = closer; self.chari = closer;
@ -689,7 +710,8 @@ impl<'a> Parser<'a> {
fn parse_hex_two(&mut self) -> Result<Ast, Error> { fn parse_hex_two(&mut self) -> Result<Ast, Error> {
let (start, end) = (self.chari, self.chari + 2); let (start, end) = (self.chari, self.chari + 2);
let bad = self.slice(start - 2, self.chars.len()); let bad = self.slice(start - 2, self.chars.len());
try!(self.noteof(format!("Invalid hex escape sequence '{}'", bad))) try!(self.noteof(format!("Invalid hex escape sequence '{}'",
bad).as_slice()))
self.parse_hex_digits(self.slice(start, end).as_slice()) self.parse_hex_digits(self.slice(start, end).as_slice())
} }
@ -697,8 +719,10 @@ impl<'a> Parser<'a> {
fn parse_hex_digits(&self, s: &str) -> Result<Ast, Error> { fn parse_hex_digits(&self, s: &str) -> Result<Ast, Error> {
match num::from_str_radix::<u32>(s, 16) { match num::from_str_radix::<u32>(s, 16) {
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
None => self.err(format!( None => {
"Could not parse '{}' as hex number.", s)), self.err(format!("Could not parse '{}' as hex number.",
s).as_slice())
}
} }
} }
@ -722,7 +746,8 @@ impl<'a> Parser<'a> {
"Capture names can only have underscores, letters and digits.") "Capture names can only have underscores, letters and digits.")
} }
if self.names.contains(&name) { if self.names.contains(&name) {
return self.err(format!("Duplicate capture group name '{}'.", name)) return self.err(format!("Duplicate capture group name '{}'.",
name).as_slice())
} }
self.names.push(name.clone()); self.names.push(name.clone());
self.chari = closer; self.chari = closer;
@ -754,7 +779,7 @@ impl<'a> Parser<'a> {
if sign < 0 { if sign < 0 {
return self.err(format!( return self.err(format!(
"Cannot negate flags twice in '{}'.", "Cannot negate flags twice in '{}'.",
self.slice(start, self.chari + 1))) self.slice(start, self.chari + 1)).as_slice())
} }
sign = -1; sign = -1;
saw_flag = false; saw_flag = false;
@ -765,7 +790,7 @@ impl<'a> Parser<'a> {
if !saw_flag { if !saw_flag {
return self.err(format!( return self.err(format!(
"A valid flag does not follow negation in '{}'", "A valid flag does not follow negation in '{}'",
self.slice(start, self.chari + 1))) self.slice(start, self.chari + 1)).as_slice())
} }
flags = flags ^ flags; flags = flags ^ flags;
} }
@ -777,7 +802,7 @@ impl<'a> Parser<'a> {
return Ok(()) return Ok(())
} }
_ => return self.err(format!( _ => return self.err(format!(
"Unrecognized flag '{}'.", self.cur())), "Unrecognized flag '{}'.", self.cur()).as_slice()),
} }
} }
} }
@ -871,16 +896,21 @@ impl<'a> Parser<'a> {
fn parse_uint(&self, s: &str) -> Result<uint, Error> { fn parse_uint(&self, s: &str) -> Result<uint, Error> {
match from_str::<uint>(s) { match from_str::<uint>(s) {
Some(i) => Ok(i), Some(i) => Ok(i),
None => self.err(format!( None => {
"Expected an unsigned integer but got '{}'.", s)), self.err(format!("Expected an unsigned integer but got '{}'.",
s).as_slice())
}
} }
} }
fn char_from_u32(&self, n: u32) -> Result<char, Error> { fn char_from_u32(&self, n: u32) -> Result<char, Error> {
match char::from_u32(n) { match char::from_u32(n) {
Some(c) => Ok(c), Some(c) => Ok(c),
None => self.err(format!( None => {
"Could not decode '{}' to unicode character.", n)), self.err(format!("Could not decode '{}' to unicode \
character.",
n).as_slice())
}
} }
} }

View file

@ -85,7 +85,7 @@ fn native(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree])
let re = match Regex::new(regex.to_owned()) { let re = match Regex::new(regex.to_owned()) {
Ok(re) => re, Ok(re) => re,
Err(err) => { Err(err) => {
cx.span_err(sp, err.to_str()); cx.span_err(sp, err.to_str().as_slice());
return DummyResult::any(sp) return DummyResult::any(sp)
} }
}; };
@ -612,7 +612,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<StrBuf> {
_ => { _ => {
cx.span_err(entry.span, format!( cx.span_err(entry.span, format!(
"expected string literal but got `{}`", "expected string literal but got `{}`",
pprust::lit_to_str(lit))); pprust::lit_to_str(lit)).as_slice());
return None return None
} }
} }
@ -620,7 +620,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<StrBuf> {
_ => { _ => {
cx.span_err(entry.span, format!( cx.span_err(entry.span, format!(
"expected string literal but got `{}`", "expected string literal but got `{}`",
pprust::expr_to_str(entry))); pprust::expr_to_str(entry)).as_slice());
return None return None
} }
}; };

View file

@ -56,17 +56,24 @@ fn run_ar(sess: &Session, args: &str, cwd: Option<&Path>,
Ok(prog) => { Ok(prog) => {
let o = prog.wait_with_output().unwrap(); let o = prog.wait_with_output().unwrap();
if !o.status.success() { if !o.status.success() {
sess.err(format!("{} failed with: {}", cmd, o.status)); sess.err(format!("{} failed with: {}",
cmd,
o.status).as_slice());
sess.note(format!("stdout ---\n{}", sess.note(format!("stdout ---\n{}",
str::from_utf8(o.output.as_slice()).unwrap())); str::from_utf8(o.output
.as_slice()).unwrap())
.as_slice());
sess.note(format!("stderr ---\n{}", sess.note(format!("stderr ---\n{}",
str::from_utf8(o.error.as_slice()).unwrap())); str::from_utf8(o.error
.as_slice()).unwrap())
.as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
o o
}, },
Err(e) => { Err(e) => {
sess.err(format!("could not exec `{}`: {}", ar.as_slice(), e)); sess.err(format!("could not exec `{}`: {}", ar.as_slice(),
e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
fail!("rustc::back::archive::run_ar() should not reach this point"); fail!("rustc::back::archive::run_ar() should not reach this point");
} }
@ -158,7 +165,7 @@ impl<'a> Archive<'a> {
if skip.iter().any(|s| *s == filename) { continue } if skip.iter().any(|s| *s == filename) { continue }
if filename.contains(".SYMDEF") { continue } if filename.contains(".SYMDEF") { continue }
let filename = format!("r-{}-{}", name, filename); let filename = format_strbuf!("r-{}-{}", name, filename);
let new_filename = file.with_filename(filename); let new_filename = file.with_filename(filename);
try!(fs::rename(file, &new_filename)); try!(fs::rename(file, &new_filename));
inputs.push(new_filename); inputs.push(new_filename);
@ -178,8 +185,8 @@ impl<'a> Archive<'a> {
}; };
// On Windows, static libraries sometimes show up as libfoo.a and other // On Windows, static libraries sometimes show up as libfoo.a and other
// times show up as foo.lib // times show up as foo.lib
let oslibname = format!("{}{}.{}", osprefix, name, osext); let oslibname = format_strbuf!("{}{}.{}", osprefix, name, osext);
let unixlibname = format!("lib{}.a", name); let unixlibname = format_strbuf!("lib{}.a", name);
let mut rustpath = filesearch::rust_path(); let mut rustpath = filesearch::rust_path();
rustpath.push(self.sess.target_filesearch().get_lib_path()); rustpath.push(self.sess.target_filesearch().get_lib_path());
@ -194,7 +201,8 @@ impl<'a> Archive<'a> {
} }
} }
self.sess.fatal(format!("could not find native static library `{}`, \ self.sess.fatal(format!("could not find native static library `{}`, \
perhaps an -L flag is missing?", name)); perhaps an -L flag is missing?",
name).as_slice());
} }
} }

View file

@ -167,7 +167,9 @@ pub mod write {
"dynamic-no-pic" => lib::llvm::RelocDynamicNoPic, "dynamic-no-pic" => lib::llvm::RelocDynamicNoPic,
_ => { _ => {
sess.err(format!("{} is not a valid relocation mode", sess.err(format!("{} is not a valid relocation mode",
sess.opts.cg.relocation_model)); sess.opts
.cg
.relocation_model).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
return; return;
} }
@ -219,7 +221,8 @@ pub mod write {
for pass in sess.opts.cg.passes.iter() { for pass in sess.opts.cg.passes.iter() {
pass.as_slice().with_c_str(|s| { pass.as_slice().with_c_str(|s| {
if !llvm::LLVMRustAddPass(mpm, s) { if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(format!("unknown pass {}, ignoring", *pass)); sess.warn(format!("unknown pass {}, ignoring",
*pass).as_slice());
} }
}) })
} }
@ -360,8 +363,10 @@ pub mod write {
match cmd.output() { match cmd.output() {
Ok(prog) => { Ok(prog) => {
if !prog.status.success() { if !prog.status.success() {
sess.err(format!("linking with `{}` failed: {}", pname, prog.status)); sess.err(format!("linking with `{}` failed: {}",
sess.note(format!("{}", &cmd)); pname,
prog.status).as_slice());
sess.note(format!("{}", &cmd).as_slice());
let mut note = prog.error.clone(); let mut note = prog.error.clone();
note.push_all(prog.output.as_slice()); note.push_all(prog.output.as_slice());
sess.note(str::from_utf8(note.as_slice()).unwrap().to_owned()); sess.note(str::from_utf8(note.as_slice()).unwrap().to_owned());
@ -369,7 +374,9 @@ pub mod write {
} }
}, },
Err(e) => { Err(e) => {
sess.err(format!("could not exec the linker `{}`: {}", pname, e)); sess.err(format!("could not exec the linker `{}`: {}",
pname,
e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
@ -666,7 +673,7 @@ pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
fn push(n: &mut StrBuf, s: &str) { fn push(n: &mut StrBuf, s: &str) {
let sani = sanitize(s); let sani = sanitize(s);
n.push_str(format!("{}{}", sani.len(), sani)); n.push_str(format!("{}{}", sani.len(), sani).as_slice());
} }
// First, connect each component with <len, name> pairs. // First, connect each component with <len, name> pairs.
@ -774,7 +781,9 @@ fn remove(sess: &Session, path: &Path) {
match fs::unlink(path) { match fs::unlink(path) {
Ok(..) => {} Ok(..) => {}
Err(e) => { Err(e) => {
sess.err(format!("failed to remove {}: {}", path.display(), e)); sess.err(format!("failed to remove {}: {}",
path.display(),
e).as_slice());
} }
} }
} }
@ -815,7 +824,7 @@ pub fn filename_for_input(sess: &Session, crate_type: config::CrateType,
let libname = output_lib_filename(id); let libname = output_lib_filename(id);
match crate_type { match crate_type {
config::CrateTypeRlib => { config::CrateTypeRlib => {
out_filename.with_filename(format!("lib{}.rlib", libname)) out_filename.with_filename(format_strbuf!("lib{}.rlib", libname))
} }
config::CrateTypeDylib => { config::CrateTypeDylib => {
let (prefix, suffix) = match sess.targ_cfg.os { let (prefix, suffix) = match sess.targ_cfg.os {
@ -825,10 +834,13 @@ pub fn filename_for_input(sess: &Session, crate_type: config::CrateType,
abi::OsAndroid => (loader::ANDROID_DLL_PREFIX, loader::ANDROID_DLL_SUFFIX), abi::OsAndroid => (loader::ANDROID_DLL_PREFIX, loader::ANDROID_DLL_SUFFIX),
abi::OsFreebsd => (loader::FREEBSD_DLL_PREFIX, loader::FREEBSD_DLL_SUFFIX), abi::OsFreebsd => (loader::FREEBSD_DLL_PREFIX, loader::FREEBSD_DLL_SUFFIX),
}; };
out_filename.with_filename(format!("{}{}{}", prefix, libname, suffix)) out_filename.with_filename(format_strbuf!("{}{}{}",
prefix,
libname,
suffix))
} }
config::CrateTypeStaticlib => { config::CrateTypeStaticlib => {
out_filename.with_filename(format!("lib{}.a", libname)) out_filename.with_filename(format_strbuf!("lib{}.a", libname))
} }
config::CrateTypeExecutable => out_filename.clone(), config::CrateTypeExecutable => out_filename.clone(),
} }
@ -855,12 +867,14 @@ fn link_binary_output(sess: &Session,
let obj_is_writeable = is_writeable(&obj_filename); let obj_is_writeable = is_writeable(&obj_filename);
let out_is_writeable = is_writeable(&out_filename); let out_is_writeable = is_writeable(&out_filename);
if !out_is_writeable { if !out_is_writeable {
sess.fatal(format!("output file {} is not writeable -- check its permissions.", sess.fatal(format!("output file {} is not writeable -- check its \
out_filename.display())); permissions.",
out_filename.display()).as_slice());
} }
else if !obj_is_writeable { else if !obj_is_writeable {
sess.fatal(format!("object file {} is not writeable -- check its permissions.", sess.fatal(format!("object file {} is not writeable -- check its \
obj_filename.display())); permissions.",
obj_filename.display()).as_slice());
} }
match crate_type { match crate_type {
@ -936,7 +950,8 @@ fn link_rlib<'a>(sess: &'a Session,
Ok(..) => {} Ok(..) => {}
Err(e) => { Err(e) => {
sess.err(format!("failed to write {}: {}", sess.err(format!("failed to write {}: {}",
metadata.display(), e)); metadata.display(),
e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
@ -956,7 +971,9 @@ fn link_rlib<'a>(sess: &'a Session,
}) { }) {
Ok(()) => {} Ok(()) => {}
Err(e) => { Err(e) => {
sess.err(format!("failed to write compressed bytecode: {}", e)); sess.err(format!("failed to write compressed bytecode: \
{}",
e).as_slice());
sess.abort_if_errors() sess.abort_if_errors()
} }
} }
@ -1003,7 +1020,8 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
let name = sess.cstore.get_crate_data(cnum).name.clone(); let name = sess.cstore.get_crate_data(cnum).name.clone();
let p = match *path { let p = match *path {
Some(ref p) => p.clone(), None => { Some(ref p) => p.clone(), None => {
sess.err(format!("could not find rlib for: `{}`", name)); sess.err(format!("could not find rlib for: `{}`",
name).as_slice());
continue continue
} }
}; };
@ -1015,7 +1033,9 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
cstore::NativeUnknown => "library", cstore::NativeUnknown => "library",
cstore::NativeFramework => "framework", cstore::NativeFramework => "framework",
}; };
sess.warn(format!("unlinked native {}: {}", name, *lib)); sess.warn(format!("unlinked native {}: {}",
name,
*lib).as_slice());
} }
} }
} }
@ -1049,8 +1069,10 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
match prog { match prog {
Ok(prog) => { Ok(prog) => {
if !prog.status.success() { if !prog.status.success() {
sess.err(format!("linking with `{}` failed: {}", pname, prog.status)); sess.err(format!("linking with `{}` failed: {}",
sess.note(format!("{}", &cmd)); pname,
prog.status).as_slice());
sess.note(format!("{}", &cmd).as_slice());
let mut output = prog.error.clone(); let mut output = prog.error.clone();
output.push_all(prog.output.as_slice()); output.push_all(prog.output.as_slice());
sess.note(str::from_utf8(output.as_slice()).unwrap().to_owned()); sess.note(str::from_utf8(output.as_slice()).unwrap().to_owned());
@ -1058,7 +1080,9 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
} }
}, },
Err(e) => { Err(e) => {
sess.err(format!("could not exec the linker `{}`: {}", pname, e)); sess.err(format!("could not exec the linker `{}`: {}",
pname,
e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
@ -1070,7 +1094,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
match Command::new("dsymutil").arg(out_filename).status() { match Command::new("dsymutil").arg(out_filename).status() {
Ok(..) => {} Ok(..) => {}
Err(e) => { Err(e) => {
sess.err(format!("failed to run dsymutil: {}", e)); sess.err(format!("failed to run dsymutil: {}", e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
@ -1409,7 +1433,8 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
// against the archive. // against the archive.
if sess.lto() { if sess.lto() {
let name = sess.cstore.get_crate_data(cnum).name.clone(); let name = sess.cstore.get_crate_data(cnum).name.clone();
time(sess.time_passes(), format!("altering {}.rlib", name), time(sess.time_passes(),
format!("altering {}.rlib", name).as_slice(),
(), |()| { (), |()| {
let dst = tmpdir.join(cratepath.filename().unwrap()); let dst = tmpdir.join(cratepath.filename().unwrap());
match fs::copy(&cratepath, &dst) { match fs::copy(&cratepath, &dst) {
@ -1418,12 +1443,12 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
sess.err(format!("failed to copy {} to {}: {}", sess.err(format!("failed to copy {} to {}: {}",
cratepath.display(), cratepath.display(),
dst.display(), dst.display(),
e)); e).as_slice());
sess.abort_if_errors(); sess.abort_if_errors();
} }
} }
let mut archive = Archive::open(sess, dst.clone()); let mut archive = Archive::open(sess, dst.clone());
archive.remove_file(format!("{}.o", name)); archive.remove_file(format!("{}.o", name).as_slice());
let files = archive.files(); let files = archive.files();
if files.iter().any(|s| s.as_slice().ends_with(".o")) { if files.iter().any(|s| s.as_slice().ends_with(".o")) {
cmd.arg(dst); cmd.arg(dst);

View file

@ -47,29 +47,46 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
let path = match path { let path = match path {
Some(p) => p, Some(p) => p,
None => { None => {
sess.fatal(format!("could not find rlib for: `{}`", name)); sess.fatal(format!("could not find rlib for: `{}`",
name).as_slice());
} }
}; };
let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let archive = ArchiveRO::open(&path).expect("wanted an rlib");
debug!("reading {}", name); debug!("reading {}", name);
let bc = time(sess.time_passes(), format!("read {}.bc.deflate", name), (), |_| let bc = time(sess.time_passes(),
archive.read(format!("{}.bc.deflate", name))); format!("read {}.bc.deflate", name).as_slice(),
(),
|_| {
archive.read(format!("{}.bc.deflate",
name).as_slice())
});
let bc = bc.expect("missing compressed bytecode in archive!"); let bc = bc.expect("missing compressed bytecode in archive!");
let bc = time(sess.time_passes(), format!("inflate {}.bc", name), (), |_| let bc = time(sess.time_passes(),
format!("inflate {}.bc", name).as_slice(),
(),
|_| {
match flate::inflate_bytes(bc) { match flate::inflate_bytes(bc) {
Some(bc) => bc, Some(bc) => bc,
None => sess.fatal(format!("failed to decompress bc of `{}`", name)) None => {
sess.fatal(format!("failed to decompress \
bc of `{}`",
name).as_slice())
}
}
}); });
let ptr = bc.as_slice().as_ptr(); let ptr = bc.as_slice().as_ptr();
debug!("linking {}", name); debug!("linking {}", name);
time(sess.time_passes(), format!("ll link {}", name), (), |()| unsafe { time(sess.time_passes(),
format!("ll link {}", name).as_slice(),
(),
|()| unsafe {
if !llvm::LLVMRustLinkInExternalBitcode(llmod, if !llvm::LLVMRustLinkInExternalBitcode(llmod,
ptr as *libc::c_char, ptr as *libc::c_char,
bc.len() as libc::size_t) { bc.len() as libc::size_t) {
link::llvm_err(sess, link::llvm_err(sess,
(format_strbuf!("failed to load bc of `{}`", format_strbuf!("failed to load bc of `{}`",
name))); name.as_slice()));
} }
}); });
} }

View file

@ -328,18 +328,23 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
if option_to_lookup.as_slice() != candidate { continue } if option_to_lookup.as_slice() != candidate { continue }
if !setter(&mut cg, value) { if !setter(&mut cg, value) {
match value { match value {
Some(..) => early_error(format!("codegen option `{}` takes \ Some(..) => {
no value", key)), early_error(format!("codegen option `{}` takes no \
None => early_error(format!("codegen option `{0}` requires \ value", key).as_slice())
}
None => {
early_error(format!("codegen option `{0}` requires \
a value (-C {0}=<value>)", a value (-C {0}=<value>)",
key)) key).as_slice())
}
} }
} }
found = true; found = true;
break; break;
} }
if !found { if !found {
early_error(format!("unknown codegen option: `{}`", key)); early_error(format!("unknown codegen option: `{}`",
key).as_slice());
} }
} }
return cg; return cg;
@ -570,7 +575,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
"staticlib" => CrateTypeStaticlib, "staticlib" => CrateTypeStaticlib,
"dylib" => CrateTypeDylib, "dylib" => CrateTypeDylib,
"bin" => CrateTypeExecutable, "bin" => CrateTypeExecutable,
_ => early_error(format!("unknown crate type: `{}`", part)) _ => {
early_error(format!("unknown crate type: `{}`",
part).as_slice())
}
}; };
crate_types.push(new_part) crate_types.push(new_part)
} }
@ -589,14 +597,17 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let level_short = level_name.slice_chars(0, 1); let level_short = level_name.slice_chars(0, 1);
let level_short = level_short.to_ascii().to_upper().into_str(); let level_short = level_short.to_ascii().to_upper().into_str();
let flags = matches.opt_strs(level_short).move_iter().collect::<Vec<_>>().append( let flags = matches.opt_strs(level_short.as_slice())
matches.opt_strs(level_name).as_slice()); .move_iter()
.collect::<Vec<_>>()
.append(matches.opt_strs(level_name).as_slice());
for lint_name in flags.iter() { for lint_name in flags.iter() {
let lint_name = lint_name.replace("-", "_"); let lint_name = lint_name.replace("-", "_").into_strbuf();
match lint_dict.find_equiv(&lint_name) { match lint_dict.find_equiv(&lint_name) {
None => { None => {
early_error(format!("unknown {} flag: {}", early_error(format!("unknown {} flag: {}",
level_name, lint_name)); level_name,
lint_name).as_slice());
} }
Some(lint) => { Some(lint) => {
lint_opts.push((lint.lint, *level)); lint_opts.push((lint.lint, *level));
@ -618,7 +629,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
} }
} }
if this_bit == 0 { if this_bit == 0 {
early_error(format!("unknown debug flag: {}", *debug_flag)) early_error(format!("unknown debug flag: {}",
*debug_flag).as_slice())
} }
debugging_opts |= this_bit; debugging_opts |= this_bit;
} }
@ -638,7 +650,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
"bc" => link::OutputTypeBitcode, "bc" => link::OutputTypeBitcode,
"obj" => link::OutputTypeObject, "obj" => link::OutputTypeObject,
"link" => link::OutputTypeExe, "link" => link::OutputTypeExe,
_ => early_error(format!("unknown emission type: `{}`", part)) _ => {
early_error(format!("unknown emission type: `{}`",
part).as_slice())
}
}; };
output_types.push(output_type) output_types.push(output_type)
} }
@ -671,8 +686,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
Some("2") => Default, Some("2") => Default,
Some("3") => Aggressive, Some("3") => Aggressive,
Some(arg) => { Some(arg) => {
early_error(format!("optimization level needs to be between 0-3 \ early_error(format!("optimization level needs to be \
(instead was `{}`)", arg)); between 0-3 (instead was `{}`)",
arg).as_slice());
} }
} }
} else { } else {
@ -692,8 +708,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
None | None |
Some("2") => FullDebugInfo, Some("2") => FullDebugInfo,
Some(arg) => { Some(arg) => {
early_error(format!("optimization level needs to be between 0-3 \ early_error(format!("optimization level needs to be between \
(instead was `{}`)", arg)); 0-3 (instead was `{}`)",
arg).as_slice());
} }
} }
} else { } else {
@ -725,9 +742,11 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
None => Auto, None => Auto,
Some(arg) => early_error(format!( Some(arg) => {
"argument for --color must be auto, always or never (instead was `{}`)", early_error(format!("argument for --color must be auto, always \
arg)) or never (instead was `{}`)",
arg).as_slice())
}
}; };
Options { Options {

View file

@ -511,7 +511,7 @@ fn write_out_deps(sess: &Session,
Ok(()) => {} Ok(()) => {}
Err(e) => { Err(e) => {
sess.fatal(format!("error writing dependencies to `{}`: {}", sess.fatal(format!("error writing dependencies to `{}`: {}",
deps_filename.display(), e)); deps_filename.display(), e).as_slice());
} }
} }
} }
@ -705,7 +705,8 @@ fn print_flowgraph<W:io::Writer>(analysis: CrateAnalysis,
let m = "graphviz::render failed"; let m = "graphviz::render failed";
io::IoError { io::IoError {
detail: Some(match orig_detail { detail: Some(match orig_detail {
None => m.into_owned(), Some(d) => format!("{}: {}", m, d) None => m.into_strbuf(),
Some(d) => format_strbuf!("{}: {}", m, d)
}), }),
..ioerr ..ioerr
} }

View file

@ -120,7 +120,8 @@ Additional help:
-C help Print codegen options -C help Print codegen options
-W help Print 'lint' options and default settings -W help Print 'lint' options and default settings
-Z help Print internal options for debugging rustc\n", -Z help Print internal options for debugging rustc\n",
getopts::usage(message, config::optgroups().as_slice())); getopts::usage(message.as_slice(),
config::optgroups().as_slice()));
} }
fn describe_warnings() { fn describe_warnings() {
@ -305,16 +306,18 @@ pub fn parse_pretty(sess: &Session, name: &str) -> PpMode {
(arg, "flowgraph") => { (arg, "flowgraph") => {
match arg.and_then(from_str) { match arg.and_then(from_str) {
Some(id) => PpmFlowGraph(id), Some(id) => PpmFlowGraph(id),
None => sess.fatal(format_strbuf!("`pretty flowgraph=<nodeid>` needs \ None => {
sess.fatal(format!("`pretty flowgraph=<nodeid>` needs \
an integer <nodeid>; got {}", an integer <nodeid>; got {}",
arg.unwrap_or("nothing")).as_slice()) arg.unwrap_or("nothing")).as_slice())
} }
} }
}
_ => { _ => {
sess.fatal(format!( sess.fatal(format!(
"argument to `pretty` must be one of `normal`, \ "argument to `pretty` must be one of `normal`, \
`expanded`, `flowgraph=<nodeid>`, `typed`, `identified`, \ `expanded`, `flowgraph=<nodeid>`, `typed`, `identified`, \
or `expanded,identified`; got {}", name)); or `expanded,identified`; got {}", name).as_slice());
} }
} }
} }
@ -406,9 +409,13 @@ fn monitor(f: proc():Send) {
match r.read_to_str() { match r.read_to_str() {
Ok(s) => println!("{}", s), Ok(s) => println!("{}", s),
Err(e) => emitter.emit(None, Err(e) => {
format!("failed to read internal stderr: {}", e), emitter.emit(None,
diagnostic::Error), format!("failed to read internal \
stderr: {}",
e).as_slice(),
diagnostic::Error)
}
} }
} }

View file

@ -141,7 +141,8 @@ impl Session {
// This exists to help with refactoring to eliminate impossible // This exists to help with refactoring to eliminate impossible
// cases later on // cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, format!("impossible case reached: {}", msg)); self.span_bug(sp,
format!("impossible case reached: {}", msg).as_slice());
} }
pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) }
pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) }

View file

@ -109,7 +109,7 @@ impl<'a> Context<'a> {
self.sess.span_err(span, explain); self.sess.span_err(span, explain);
self.sess.span_note(span, format!("add \\#![feature({})] to the \ self.sess.span_note(span, format!("add \\#![feature({})] to the \
crate attributes to enable", crate attributes to enable",
feature)); feature).as_slice());
} }
} }

View file

@ -65,7 +65,7 @@ pub fn with_version(krate: &str) -> Option<(InternedString, ast::StrStyle)> {
_ => { _ => {
Some((token::intern_and_get_ident(format!("{}\\#{}", Some((token::intern_and_get_ident(format!("{}\\#{}",
krate, krate,
VERSION)), VERSION).as_slice()),
ast::CookedStr)) ast::CookedStr))
} }
} }

View file

@ -327,7 +327,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
pub fn main() { pub fn main() {
#![main] #![main]
use std::slice::Vector; use std::slice::Vector;
test::test_main_static_x(::std::os::args().as_slice(), TESTS); test::test_main_static(::std::os::args().as_slice(), TESTS);
} }
)).unwrap(); )).unwrap();

View file

@ -91,7 +91,8 @@ fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
for ((name, _), dupes) in map.move_iter() { for ((name, _), dupes) in map.move_iter() {
if dupes.len() == 1 { continue } if dupes.len() == 1 { continue }
diag.handler().warn( diag.handler().warn(
format!("using multiple versions of crate `{}`", name)); format!("using multiple versions of crate `{}`",
name).as_slice());
for dupe in dupes.move_iter() { for dupe in dupes.move_iter() {
let data = cstore.get_crate_data(dupe); let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here"); diag.span_note(data.span, "used here");
@ -161,7 +162,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
Some(id) => id Some(id) => id
} }
} }
None => from_str(ident.get().to_str()).unwrap() None => from_str(ident.get().to_str().as_slice()).unwrap()
}; };
Some(CrateInfo { Some(CrateInfo {
ident: ident.get().to_strbuf(), ident: ident.get().to_strbuf(),
@ -224,7 +225,8 @@ fn visit_item(e: &Env, i: &ast::Item) {
cstore::NativeUnknown cstore::NativeUnknown
} else { } else {
e.sess.span_err(m.span, e.sess.span_err(m.span,
format!("unknown kind: `{}`", k)); format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown cstore::NativeUnknown
} }
} }
@ -243,7 +245,9 @@ fn visit_item(e: &Env, i: &ast::Item) {
} }
}; };
if n.get().is_empty() { if n.get().is_empty() {
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name"); e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else { } else {
e.sess e.sess
.cstore .cstore
@ -425,7 +429,7 @@ impl<'a> CrateLoader for Loader<'a> {
let message = format!("crate `{}` contains a macro_registrar fn but \ let message = format!("crate `{}` contains a macro_registrar fn but \
only a version for triple `{}` could be found (need {})", only a version for triple `{}` could be found (need {})",
info.ident, target_triple, driver::host_triple()); info.ident, target_triple, driver::host_triple());
self.env.sess.span_err(krate.span, message); self.env.sess.span_err(krate.span, message.as_slice());
// need to abort now because the syntax expansion // need to abort now because the syntax expansion
// code will shortly attempt to load and execute // code will shortly attempt to load and execute
// code from the found library. // code from the found library.

View file

@ -248,7 +248,7 @@ fn encode_symbol(ecx: &EncodeContext,
} }
None => { None => {
ecx.diag.handler().bug( ecx.diag.handler().bug(
format!("encode_symbol: id not found {}", id)); format!("encode_symbol: id not found {}", id).as_slice());
} }
} }
ebml_w.end_tag(); ebml_w.end_tag();
@ -375,7 +375,7 @@ fn encode_reexported_static_method(ebml_w: &mut Encoder,
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(format!("{}::{}", ebml_w.wr_str(format!("{}::{}",
exp.name, exp.name,
token::get_ident(method_ident))); token::get_ident(method_ident)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1439,7 +1439,10 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
attr::mk_attr_inner( attr::mk_attr_inner(
attr::mk_name_value_item_str( attr::mk_name_value_item_str(
InternedString::new("crate_id"), InternedString::new("crate_id"),
token::intern_and_get_ident(ecx.link_meta.crateid.to_str()))) token::intern_and_get_ident(ecx.link_meta
.crateid
.to_str()
.as_slice())))
} }
let mut attrs = Vec::new(); let mut attrs = Vec::new();

View file

@ -137,15 +137,17 @@ impl<'a> Context<'a> {
&Some(ref r) => format!("{} which `{}` depends on", &Some(ref r) => format!("{} which `{}` depends on",
message, r.ident) message, r.ident)
}; };
self.sess.span_err(self.span, message); self.sess.span_err(self.span, message.as_slice());
let mismatches = self.rejected_via_triple.iter(); let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 { if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple)); self.sess.span_note(self.span,
format!("expected triple of {}",
self.triple).as_slice());
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span, self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}", format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display())); self.ident, i+1, got, path.display()).as_slice());
} }
} }
if self.rejected_via_hash.len() > 0 { if self.rejected_via_hash.len() > 0 {
@ -155,7 +157,7 @@ impl<'a> Context<'a> {
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span, self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}", format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display())); self.ident, i+1, path.display()).as_slice());
} }
match self.root { match self.root {
&None => {} &None => {}
@ -163,7 +165,7 @@ impl<'a> Context<'a> {
for (i, path) in r.paths().iter().enumerate() { for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span, self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}", format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display())); r.ident, i+1, path.display()).as_slice());
} }
} }
} }
@ -198,9 +200,10 @@ impl<'a> Context<'a> {
None => return FileDoesntMatch, None => return FileDoesntMatch,
Some(file) => file, Some(file) => file,
}; };
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") { if file.starts_with(rlib_prefix.as_slice()) &&
file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display()); info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") { match self.try_match(file, rlib_prefix.as_slice(), ".rlib") {
Some(hash) => { Some(hash) => {
info!("rlib accepted, hash: {}", hash); info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| { let slot = candidates.find_or_insert_with(hash, |_| {
@ -215,9 +218,12 @@ impl<'a> Context<'a> {
FileDoesntMatch FileDoesntMatch
} }
} }
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){ } else if file.starts_with(dylib_prefix.as_slice()) &&
file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display()); info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) { match self.try_match(file,
dylib_prefix.as_slice(),
dysuffix) {
Some(hash) => { Some(hash) => {
info!("dylib accepted, hash: {}", hash); info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| { let slot = candidates.find_or_insert_with(hash, |_| {
@ -271,18 +277,20 @@ impl<'a> Context<'a> {
_ => { _ => {
self.sess.span_err(self.span, self.sess.span_err(self.span,
format!("multiple matching crates for `{}`", format!("multiple matching crates for `{}`",
self.crate_id.name)); self.crate_id.name).as_slice());
self.sess.note("candidates:"); self.sess.note("candidates:");
for lib in libraries.iter() { for lib in libraries.iter() {
match lib.dylib { match lib.dylib {
Some(ref p) => { Some(ref p) => {
self.sess.note(format!("path: {}", p.display())); self.sess.note(format!("path: {}",
p.display()).as_slice());
} }
None => {} None => {}
} }
match lib.rlib { match lib.rlib {
Some(ref p) => { Some(ref p) => {
self.sess.note(format!("path: {}", p.display())); self.sess.note(format!("path: {}",
p.display()).as_slice());
} }
None => {} None => {}
} }
@ -375,10 +383,13 @@ impl<'a> Context<'a> {
if ret.is_some() { if ret.is_some() {
self.sess.span_err(self.span, self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \ format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name)); found",
flavor,
self.crate_id.name).as_slice());
self.sess.span_note(self.span, self.sess.span_note(self.span,
format!(r"candidate \#1: {}", format!(r"candidate \#1: {}",
ret.get_ref().display())); ret.get_ref()
.display()).as_slice());
error = 1; error = 1;
ret = None; ret = None;
} }
@ -386,7 +397,7 @@ impl<'a> Context<'a> {
error += 1; error += 1;
self.sess.span_note(self.span, self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error, format!(r"candidate \#{}: {}", error,
lib.display())); lib.display()).as_slice());
continue continue
} }
*slot = Some(metadata); *slot = Some(metadata);
@ -450,7 +461,7 @@ impl<'a> Context<'a> {
} }
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) { pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str())); diag.handler().note(format!("crate_id: {}", crateid.to_str()).as_slice());
} }
impl ArchiveMetadata { impl ArchiveMetadata {

View file

@ -155,7 +155,10 @@ fn parse_trait_store(st: &mut PState, conv: conv_did) -> ty::TraitStore {
match next(st) { match next(st) {
'~' => ty::UniqTraitStore, '~' => ty::UniqTraitStore,
'&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)), '&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)),
c => st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", c)) c => {
st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
c).as_slice())
}
} }
} }

View file

@ -1312,7 +1312,8 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
match c::astencode_tag::from_uint(tag) { match c::astencode_tag::from_uint(tag) {
None => { None => {
xcx.dcx.tcx.sess.bug( xcx.dcx.tcx.sess.bug(
format!("unknown tag found in side tables: {:x}", tag)); format!("unknown tag found in side tables: {:x}",
tag).as_slice());
} }
Some(value) => { Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint); let val_doc = entry_doc.get(c::tag_table_val as uint);
@ -1376,7 +1377,8 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
} }
_ => { _ => {
xcx.dcx.tcx.sess.bug( xcx.dcx.tcx.sess.bug(
format!("unknown tag found in side tables: {:x}", tag)); format!("unknown tag found in side tables: {:x}",
tag).as_slice());
} }
} }
} }

View file

@ -243,7 +243,7 @@ impl<'a> CheckLoanCtxt<'a> {
if restr.loan_path != loan2.loan_path { continue; } if restr.loan_path != loan2.loan_path { continue; }
let old_pronoun = if new_loan.loan_path == old_loan.loan_path { let old_pronoun = if new_loan.loan_path == old_loan.loan_path {
"it".to_owned() "it".to_strbuf()
} else { } else {
format!("`{}`", format!("`{}`",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_str(&*old_loan.loan_path))
@ -255,7 +255,8 @@ impl<'a> CheckLoanCtxt<'a> {
new_loan.span, new_loan.span,
format!("cannot borrow `{}` as mutable \ format!("cannot borrow `{}` as mutable \
more than once at a time", more than once at a time",
self.bccx.loan_path_to_str(&*new_loan.loan_path))); self.bccx.loan_path_to_str(
&*new_loan.loan_path)).as_slice());
} }
(ty::UniqueImmBorrow, _) => { (ty::UniqueImmBorrow, _) => {
@ -264,7 +265,7 @@ impl<'a> CheckLoanCtxt<'a> {
format!("closure requires unique access to `{}` \ format!("closure requires unique access to `{}` \
but {} is already borrowed", but {} is already borrowed",
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_str(&*new_loan.loan_path),
old_pronoun)); old_pronoun).as_slice());
} }
(_, ty::UniqueImmBorrow) => { (_, ty::UniqueImmBorrow) => {
@ -273,7 +274,7 @@ impl<'a> CheckLoanCtxt<'a> {
format!("cannot borrow `{}` as {} because \ format!("cannot borrow `{}` as {} because \
previous closure requires unique access", previous closure requires unique access",
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_str(&*new_loan.loan_path),
new_loan.kind.to_user_str())); new_loan.kind.to_user_str()).as_slice());
} }
(_, _) => { (_, _) => {
@ -284,7 +285,7 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_str(&*new_loan.loan_path),
new_loan.kind.to_user_str(), new_loan.kind.to_user_str(),
old_pronoun, old_pronoun,
old_loan.kind.to_user_str())); old_loan.kind.to_user_str()).as_slice());
} }
} }
@ -293,7 +294,8 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_note( self.bccx.span_note(
span, span,
format!("borrow occurs due to use of `{}` in closure", format!("borrow occurs due to use of `{}` in closure",
self.bccx.loan_path_to_str(&*new_loan.loan_path))); self.bccx.loan_path_to_str(
&*new_loan.loan_path)).as_slice());
} }
_ => { } _ => { }
} }
@ -303,7 +305,8 @@ impl<'a> CheckLoanCtxt<'a> {
format!("the mutable borrow prevents subsequent \ format!("the mutable borrow prevents subsequent \
moves, borrows, or modification of `{0}` \ moves, borrows, or modification of `{0}` \
until the borrow ends", until the borrow ends",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_str(
&*old_loan.loan_path))
} }
ty::ImmBorrow => { ty::ImmBorrow => {
@ -340,7 +343,7 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_note( self.bccx.span_note(
old_loan.span, old_loan.span,
format!("{}; {}", borrow_summary, rule_summary)); format!("{}; {}", borrow_summary, rule_summary).as_slice());
let old_loan_span = self.tcx().map.span(old_loan.kill_scope); let old_loan_span = self.tcx().map.span(old_loan.kill_scope);
self.bccx.span_end_note(old_loan_span, self.bccx.span_end_note(old_loan_span,
@ -428,14 +431,14 @@ impl<'a> CheckLoanCtxt<'a> {
format!("cannot assign to {} {} `{}`", format!("cannot assign to {} {} `{}`",
cmt.mutbl.to_user_str(), cmt.mutbl.to_user_str(),
self.bccx.cmt_to_str(&*cmt), self.bccx.cmt_to_str(&*cmt),
self.bccx.loan_path_to_str(&*lp))); self.bccx.loan_path_to_str(&*lp)).as_slice());
} }
None => { None => {
self.bccx.span_err( self.bccx.span_err(
expr.span, expr.span,
format!("cannot assign to {} {}", format!("cannot assign to {} {}",
cmt.mutbl.to_user_str(), cmt.mutbl.to_user_str(),
self.bccx.cmt_to_str(&*cmt))); self.bccx.cmt_to_str(&*cmt)).as_slice());
} }
} }
return; return;
@ -672,11 +675,11 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_err( self.bccx.span_err(
expr.span, expr.span,
format!("cannot assign to `{}` because it is borrowed", format!("cannot assign to `{}` because it is borrowed",
self.bccx.loan_path_to_str(loan_path))); self.bccx.loan_path_to_str(loan_path)).as_slice());
self.bccx.span_note( self.bccx.span_note(
loan.span, loan.span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(loan_path))); self.bccx.loan_path_to_str(loan_path)).as_slice());
} }
fn check_move_out_from_expr(&self, expr: &ast::Expr) { fn check_move_out_from_expr(&self, expr: &ast::Expr) {
@ -702,11 +705,13 @@ impl<'a> CheckLoanCtxt<'a> {
span, span,
format!("cannot move out of `{}` \ format!("cannot move out of `{}` \
because it is borrowed", because it is borrowed",
self.bccx.loan_path_to_str(move_path))); self.bccx.loan_path_to_str(
move_path)).as_slice());
self.bccx.span_note( self.bccx.span_note(
loan_span, loan_span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(&*loan_path))); self.bccx.loan_path_to_str(
&*loan_path)).as_slice());
} }
} }
true true
@ -745,11 +750,13 @@ impl<'a> CheckLoanCtxt<'a> {
freevar.span, freevar.span,
format!("cannot move `{}` into closure \ format!("cannot move `{}` into closure \
because it is borrowed", because it is borrowed",
this.bccx.loan_path_to_str(move_path))); this.bccx.loan_path_to_str(
move_path)).as_slice());
this.bccx.span_note( this.bccx.span_note(
loan_span, loan_span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
this.bccx.loan_path_to_str(&*loan_path))); this.bccx.loan_path_to_str(
&*loan_path)).as_slice());
} }
} }
} }

View file

@ -174,8 +174,9 @@ fn check_aliasability(bccx: &BorrowckCtxt,
// static item resides in immutable memory and mutating it would // static item resides in immutable memory and mutating it would
// cause segfaults. // cause segfaults.
bccx.tcx.sess.span_err(borrow_span, bccx.tcx.sess.span_err(borrow_span,
format!("borrow of immutable static items with \ "borrow of immutable static items \
unsafe interior is not allowed")); with unsafe interior is not \
allowed");
Err(()) Err(())
} }
mc::InteriorSafe => { mc::InteriorSafe => {
@ -290,7 +291,8 @@ impl<'a> GatherLoanCtxt<'a> {
ty::ReInfer(..) => { ty::ReInfer(..) => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
cmt.span, cmt.span,
format!("invalid borrow lifetime: {:?}", loan_region)); format!("invalid borrow lifetime: {:?}",
loan_region).as_slice());
} }
}; };
debug!("loan_scope = {:?}", loan_scope); debug!("loan_scope = {:?}", loan_scope);

View file

@ -131,7 +131,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) {
bccx.span_err( bccx.span_err(
move_from.span, move_from.span,
format!("cannot move out of {}", format!("cannot move out of {}",
bccx.cmt_to_str(&*move_from))); bccx.cmt_to_str(&*move_from)).as_slice());
} }
mc::cat_downcast(ref b) | mc::cat_downcast(ref b) |
@ -143,7 +143,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) {
move_from.span, move_from.span,
format!("cannot move out of type `{}`, \ format!("cannot move out of type `{}`, \
which defines the `Drop` trait", which defines the `Drop` trait",
b.ty.user_string(bccx.tcx))); b.ty.user_string(bccx.tcx)).as_slice());
}, },
_ => fail!("this path should not cause illegal move") _ => fail!("this path should not cause illegal move")
} }
@ -163,10 +163,10 @@ fn note_move_destination(bccx: &BorrowckCtxt,
format!("attempting to move value to here (to prevent the move, \ format!("attempting to move value to here (to prevent the move, \
use `ref {0}` or `ref mut {0}` to capture value by \ use `ref {0}` or `ref mut {0}` to capture value by \
reference)", reference)",
pat_name)); pat_name).as_slice());
} else { } else {
bccx.span_note(move_to_span, bccx.span_note(move_to_span,
format!("and here (use `ref {0}` or `ref mut {0}`)", format!("and here (use `ref {0}` or `ref mut {0}`)",
pat_name)); pat_name).as_slice());
} }
} }

View file

@ -461,7 +461,7 @@ impl<'a> BorrowckCtxt<'a> {
use_span, use_span,
format!("{} of possibly uninitialized variable: `{}`", format!("{} of possibly uninitialized variable: `{}`",
verb, verb,
self.loan_path_to_str(lp))); self.loan_path_to_str(lp)).as_slice());
} }
_ => { _ => {
let partially = if lp == moved_lp {""} else {"partially "}; let partially = if lp == moved_lp {""} else {"partially "};
@ -470,7 +470,7 @@ impl<'a> BorrowckCtxt<'a> {
format!("{} of {}moved value: `{}`", format!("{} of {}moved value: `{}`",
verb, verb,
partially, partially,
self.loan_path_to_str(lp))); self.loan_path_to_str(lp)).as_slice());
} }
} }
@ -482,8 +482,12 @@ impl<'a> BorrowckCtxt<'a> {
Some(ast_map::NodeExpr(expr)) => { Some(ast_map::NodeExpr(expr)) => {
(ty::expr_ty_adjusted(self.tcx, expr), expr.span) (ty::expr_ty_adjusted(self.tcx, expr), expr.span)
} }
r => self.tcx.sess.bug(format!("MoveExpr({:?}) maps to {:?}, not Expr", r => {
move.id, r)) self.tcx.sess.bug(format!("MoveExpr({:?}) maps to \
{:?}, not Expr",
move.id,
r).as_slice())
}
}; };
let suggestion = move_suggestion(self.tcx, expr_ty, let suggestion = move_suggestion(self.tcx, expr_ty,
"moved by default (use `copy` to override)"); "moved by default (use `copy` to override)");
@ -491,16 +495,18 @@ impl<'a> BorrowckCtxt<'a> {
expr_span, expr_span,
format!("`{}` moved here because it has type `{}`, which is {}", format!("`{}` moved here because it has type `{}`, which is {}",
self.loan_path_to_str(moved_lp), self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx), suggestion)); expr_ty.user_string(self.tcx),
suggestion).as_slice());
} }
move_data::MovePat => { move_data::MovePat => {
let pat_ty = ty::node_id_to_type(self.tcx, move.id); let pat_ty = ty::node_id_to_type(self.tcx, move.id);
self.tcx.sess.span_note(self.tcx.map.span(move.id), self.tcx.sess.span_note(self.tcx.map.span(move.id),
format!("`{}` moved here because it has type `{}`, \ format!("`{}` moved here because it has type `{}`, \
which is moved by default (use `ref` to override)", which is moved by default (use `ref` to \
override)",
self.loan_path_to_str(moved_lp), self.loan_path_to_str(moved_lp),
pat_ty.user_string(self.tcx))); pat_ty.user_string(self.tcx)).as_slice());
} }
move_data::Captured => { move_data::Captured => {
@ -508,8 +514,12 @@ impl<'a> BorrowckCtxt<'a> {
Some(ast_map::NodeExpr(expr)) => { Some(ast_map::NodeExpr(expr)) => {
(ty::expr_ty_adjusted(self.tcx, expr), expr.span) (ty::expr_ty_adjusted(self.tcx, expr), expr.span)
} }
r => self.tcx.sess.bug(format!("Captured({:?}) maps to {:?}, not Expr", r => {
move.id, r)) self.tcx.sess.bug(format!("Captured({:?}) maps to \
{:?}, not Expr",
move.id,
r).as_slice())
}
}; };
let suggestion = move_suggestion(self.tcx, expr_ty, let suggestion = move_suggestion(self.tcx, expr_ty,
"moved by default (make a copy and \ "moved by default (make a copy and \
@ -519,7 +529,8 @@ impl<'a> BorrowckCtxt<'a> {
format!("`{}` moved into closure environment here because it \ format!("`{}` moved into closure environment here because it \
has type `{}`, which is {}", has type `{}`, which is {}",
self.loan_path_to_str(moved_lp), self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx), suggestion)); expr_ty.user_string(self.tcx),
suggestion).as_slice());
} }
} }
@ -547,10 +558,8 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("re-assignment of immutable variable `{}`", format!("re-assignment of immutable variable `{}`",
self.loan_path_to_str(lp))); self.loan_path_to_str(lp)).as_slice());
self.tcx.sess.span_note( self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
assign.span,
format!("prior assignment occurs here"));
} }
pub fn span_err(&self, s: Span, m: &str) { pub fn span_err(&self, s: Span, m: &str) {
@ -657,23 +666,23 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("{} in an aliasable location", format!("{} in an aliasable location",
prefix)); prefix).as_slice());
} }
mc::AliasableStatic(..) | mc::AliasableStatic(..) |
mc::AliasableStaticMut(..) => { mc::AliasableStaticMut(..) => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("{} in a static location", prefix)); format!("{} in a static location", prefix).as_slice());
} }
mc::AliasableManaged => { mc::AliasableManaged => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("{} in a `@` pointer", prefix)); format!("{} in a `@` pointer", prefix).as_slice());
} }
mc::AliasableBorrowed => { mc::AliasableBorrowed => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("{} in a `&` reference", prefix)); format!("{} in a `&` reference", prefix).as_slice());
} }
} }
} }
@ -710,12 +719,13 @@ impl<'a> BorrowckCtxt<'a> {
}; };
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
format!("{} would have to be valid for ", descr), format!("{} would have to be valid for ",
descr).as_slice(),
loan_scope, loan_scope,
"..."); "...");
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
format!("...but {} is only valid for ", descr), format!("...but {} is only valid for ", descr).as_slice(),
ptr_scope, ptr_scope,
""); "");
} }
@ -739,7 +749,7 @@ impl<'a> BorrowckCtxt<'a> {
} }
mc::PositionalField(idx) => { mc::PositionalField(idx) => {
out.push_char('#'); // invent a notation here out.push_char('#'); // invent a notation here
out.push_str(idx.to_str()); out.push_str(idx.to_str().as_slice());
} }
} }
} }

View file

@ -508,7 +508,9 @@ impl<'a> CFGBuilder<'a> {
fn add_returning_edge(&mut self, fn add_returning_edge(&mut self,
_from_expr: @ast::Expr, _from_expr: @ast::Expr,
from_index: CFGIndex) { from_index: CFGIndex) {
let mut data = CFGEdgeData {exiting_scopes: vec!() }; let mut data = CFGEdgeData {
exiting_scopes: vec!(),
};
for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() { for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
data.exiting_scopes.push(id); data.exiting_scopes.push(id);
} }
@ -533,13 +535,15 @@ impl<'a> CFGBuilder<'a> {
} }
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
expr.span, expr.span,
format!("no loop scope for id {:?}", loop_id)); format!("no loop scope for id {:?}",
loop_id).as_slice());
} }
r => { r => {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
expr.span, expr.span,
format!("bad entry `{:?}` in def_map for label", r)); format!("bad entry `{:?}` in def_map for label",
r).as_slice());
} }
} }
} }

View file

@ -82,7 +82,9 @@ impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> {
let s = self.ast_map.node_to_str(node_id); let s = self.ast_map.node_to_str(node_id);
// left-aligns the lines // left-aligns the lines
let s = replace_newline_with_backslash_l(s); let s = replace_newline_with_backslash_l(s);
label = label.append(format!("exiting scope_{} {}", i, s.as_slice())); label = label.append(format!("exiting scope_{} {}",
i,
s.as_slice()).as_slice());
} }
dot::EscStr(label.into_maybe_owned()) dot::EscStr(label.into_maybe_owned())
} }

View file

@ -107,7 +107,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr, is_const: bool) {
.span_err(e.span, .span_err(e.span,
format!("can not cast to `{}` in a constant \ format!("can not cast to `{}` in a constant \
expression", expression",
ppaux::ty_to_str(v.tcx, ety).as_slice())) ppaux::ty_to_str(v.tcx, ety)).as_slice())
} }
} }
ExprPath(ref pth) => { ExprPath(ref pth) => {

View file

@ -57,10 +57,14 @@ impl<'a> CheckLoopVisitor<'a> {
match cx { match cx {
Loop => {} Loop => {}
Closure => { Closure => {
self.sess.span_err(span, format!("`{}` inside of a closure", name)); self.sess.span_err(span,
format!("`{}` inside of a closure",
name).as_slice());
} }
Normal => { Normal => {
self.sess.span_err(span, format!("`{}` outside of loop", name)); self.sess.span_err(span,
format!("`{}` outside of loop",
name).as_slice());
} }
} }
} }

View file

@ -74,7 +74,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) {
// We know the type is inhabited, so this must be wrong // We know the type is inhabited, so this must be wrong
cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \ cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \
type {} is non-empty", type {} is non-empty",
ty_to_str(cx.tcx, pat_ty))); ty_to_str(cx.tcx, pat_ty)).as_slice());
} }
// If the type *is* empty, it's vacuously exhaustive // If the type *is* empty, it's vacuously exhaustive
return; return;
@ -164,8 +164,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
match ty::get(ty).sty { match ty::get(ty).sty {
ty::ty_bool => { ty::ty_bool => {
match *ctor { match *ctor {
val(const_bool(true)) => Some("true".to_owned()), val(const_bool(true)) => Some("true".to_strbuf()),
val(const_bool(false)) => Some("false".to_owned()), val(const_bool(false)) => Some("false".to_strbuf()),
_ => None _ => None
} }
} }
@ -177,7 +177,11 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
let variants = ty::enum_variants(cx.tcx, id); let variants = ty::enum_variants(cx.tcx, id);
match variants.iter().find(|v| v.id == vid) { match variants.iter().find(|v| v.id == vid) {
Some(v) => Some(token::get_ident(v.name).get().to_str()), Some(v) => {
Some(token::get_ident(v.name).get()
.to_str()
.into_strbuf())
}
None => { None => {
fail!("check_exhaustive: bad variant in ctor") fail!("check_exhaustive: bad variant in ctor")
} }
@ -185,7 +189,9 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
} }
ty::ty_vec(..) | ty::ty_rptr(..) => { ty::ty_vec(..) | ty::ty_rptr(..) => {
match *ctor { match *ctor {
vec(n) => Some(format!("vectors of length {}", n)), vec(n) => {
Some(format_strbuf!("vectors of length {}", n))
}
_ => None _ => None
} }
} }
@ -193,11 +199,11 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
} }
} }
}; };
let msg = "non-exhaustive patterns".to_owned() + match ext { let msg = format_strbuf!("non-exhaustive patterns{}", match ext {
Some(ref s) => format!(": {} not covered", *s), Some(ref s) => format_strbuf!(": {} not covered", *s),
None => "".to_owned() None => "".to_strbuf()
}; });
cx.tcx.sess.span_err(sp, msg); cx.tcx.sess.span_err(sp, msg.as_slice());
} }
type matrix = Vec<Vec<@Pat> > ; type matrix = Vec<Vec<@Pat> > ;
@ -739,7 +745,8 @@ fn specialize(cx: &MatchCheckCtxt,
pat_span, pat_span,
format!("struct pattern resolved to {}, \ format!("struct pattern resolved to {}, \
not a struct", not a struct",
ty_to_str(cx.tcx, left_ty))); ty_to_str(cx.tcx,
left_ty)).as_slice());
} }
} }
let args = class_fields.iter().map(|class_field| { let args = class_fields.iter().map(|class_field| {
@ -980,9 +987,10 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
_ => { _ => {
cx.tcx.sess.span_bug( cx.tcx.sess.span_bug(
p.span, p.span,
format!("binding pattern {} is \ format!("binding pattern {} is not an \
not an identifier: {:?}", identifier: {:?}",
p.id, p.node)); p.id,
p.node).as_slice());
} }
} }
} }

View file

@ -436,10 +436,11 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
// (#5900). Fall back to doing a limited lookup to get past it. // (#5900). Fall back to doing a limited lookup to get past it.
let ety = ty::expr_ty_opt(tcx.ty_ctxt(), e) let ety = ty::expr_ty_opt(tcx.ty_ctxt(), e)
.or_else(|| astconv::ast_ty_to_prim_ty(tcx.ty_ctxt(), target_ty)) .or_else(|| astconv::ast_ty_to_prim_ty(tcx.ty_ctxt(), target_ty))
.unwrap_or_else(|| tcx.ty_ctxt().sess.span_fatal( .unwrap_or_else(|| {
target_ty.span, tcx.ty_ctxt().sess.span_fatal(target_ty.span,
format!("target type not found for const cast") "target type not found for \
)); const cast")
});
let base = eval_const_expr_partial(tcx, base); let base = eval_const_expr_partial(tcx, base);
match base { match base {

View file

@ -102,14 +102,14 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
let gens_str = if gens.iter().any(|&u| u != 0) { let gens_str = if gens.iter().any(|&u| u != 0) {
format!(" gen: {}", bits_to_str(gens)) format!(" gen: {}", bits_to_str(gens))
} else { } else {
"".to_owned() "".to_strbuf()
}; };
let kills = self.kills.slice(start, end); let kills = self.kills.slice(start, end);
let kills_str = if kills.iter().any(|&u| u != 0) { let kills_str = if kills.iter().any(|&u| u != 0) {
format!(" kill: {}", bits_to_str(kills)) format!(" kill: {}", bits_to_str(kills))
} else { } else {
"".to_owned() "".to_strbuf()
}; };
try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}", try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}",
@ -653,7 +653,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
from_expr.span, from_expr.span,
format!("pop_scopes(from_expr={}, to_scope={:?}) \ format!("pop_scopes(from_expr={}, to_scope={:?}) \
to_scope does not enclose from_expr", to_scope does not enclose from_expr",
from_expr.repr(tcx), to_scope.loop_id)); from_expr.repr(tcx),
to_scope.loop_id).as_slice());
} }
} }
} }
@ -765,7 +766,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
None => { None => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
expr.span, expr.span,
format!("no loop scope for id {:?}", loop_id)); format!("no loop scope for id {:?}",
loop_id).as_slice());
} }
} }
} }
@ -773,7 +775,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
r => { r => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
expr.span, expr.span,
format!("bad entry `{:?}` in def_map for label", r)); format!("bad entry `{:?}` in def_map for label",
r).as_slice());
} }
} }
} }
@ -789,7 +792,9 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
fn reset(&mut self, bits: &mut [uint]) { fn reset(&mut self, bits: &mut [uint]) {
let e = if self.dfcx.oper.initial_value() {uint::MAX} else {0}; let e = if self.dfcx.oper.initial_value() {uint::MAX} else {0};
for b in bits.mut_iter() { *b = e; } for b in bits.mut_iter() {
*b = e;
}
} }
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) { fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
@ -841,7 +846,7 @@ fn bits_to_str(words: &[uint]) -> StrBuf {
let mut v = word; let mut v = word;
for _ in range(0u, uint::BYTES) { for _ in range(0u, uint::BYTES) {
result.push_char(sep); result.push_char(sep);
result.push_str(format!("{:02x}", v & 0xFF)); result.push_str(format!("{:02x}", v & 0xFF).as_slice());
v >>= 8; v >>= 8;
sep = '-'; sep = '-';
} }

View file

@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session,
let src = sess.cstore.get_used_crate_source(cnum).unwrap(); let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.rlib.is_some() { return } if src.rlib.is_some() { return }
sess.err(format!("dependency `{}` not found in rlib format", sess.err(format!("dependency `{}` not found in rlib format",
data.name)); data.name).as_slice());
}); });
return Vec::new(); return Vec::new();
} }
@ -187,7 +187,7 @@ fn calculate_type(sess: &session::Session,
match kind { match kind {
cstore::RequireStatic => "rlib", cstore::RequireStatic => "rlib",
cstore::RequireDynamic => "dylib", cstore::RequireDynamic => "dylib",
})); }).as_slice());
} }
} }
} }
@ -211,7 +211,8 @@ fn add_library(sess: &session::Session,
if link2 != link || link == cstore::RequireStatic { if link2 != link || link == cstore::RequireStatic {
let data = sess.cstore.get_crate_data(cnum); let data = sess.cstore.get_crate_data(cnum);
sess.err(format!("cannot satisfy dependencies so `{}` only \ sess.err(format!("cannot satisfy dependencies so `{}` only \
shows up once", data.name)); shows up once",
data.name).as_slice());
sess.note("having upstream crates all available in one format \ sess.note("having upstream crates all available in one format \
will likely make this go away"); will likely make this go away");
} }

View file

@ -48,8 +48,9 @@ impl<'a> EffectCheckVisitor<'a> {
SafeContext => { SafeContext => {
// Report an error. // Report an error.
self.tcx.sess.span_err(span, self.tcx.sess.span_err(span,
format!("{} requires unsafe function or block", format!("{} requires unsafe function or \
description)) block",
description).as_slice())
} }
UnsafeBlock(block_id) => { UnsafeBlock(block_id) => {
// OK, but record this. // OK, but record this.

View file

@ -422,7 +422,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
callee.span, callee.span,
format!("unxpected callee type {}", format!("unxpected callee type {}",
callee_ty.repr(self.tcx()))); callee_ty.repr(self.tcx())).as_slice());
} }
} }
} }
@ -448,9 +448,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
} }
ast::StmtMac(..) => { ast::StmtMac(..) => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(stmt.span, "unexpanded stmt macro");
stmt.span,
format!("unexpanded stmt macro"));
} }
} }
} }
@ -518,7 +516,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
_ => { _ => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
with_expr.span, with_expr.span,
format!("with expression doesn't evaluate to a struct")); "with expression doesn't evaluate to a struct");
} }
}; };
@ -601,7 +599,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
ty::ty_rptr(r, ref m) => (m.mutbl, r), ty::ty_rptr(r, ref m) => (m.mutbl, r),
_ => self.tcx().sess.span_bug(expr.span, _ => self.tcx().sess.span_bug(expr.span,
format!("bad overloaded deref type {}", format!("bad overloaded deref type {}",
method_ty.repr(self.tcx()))) method_ty.repr(self.tcx())).as_slice())
}; };
let bk = ty::BorrowKind::from_mutbl(m); let bk = ty::BorrowKind::from_mutbl(m);
self.delegate.borrow(expr.id, expr.span, cmt, self.delegate.borrow(expr.id, expr.span, cmt,

View file

@ -127,10 +127,12 @@ fn check_impl_of_trait(cx: &mut Context, it: &Item, trait_ref: &TraitRef, self_t
check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| { check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| {
cx.tcx.sess.span_err(self_type.span, cx.tcx.sess.span_err(self_type.span,
format!("the type `{}', which does not fulfill `{}`, cannot implement this \ format!("the type `{}', which does not fulfill `{}`, cannot implement this \
trait", ty_to_str(cx.tcx, self_ty), missing.user_string(cx.tcx))); trait",
ty_to_str(cx.tcx, self_ty),
missing.user_string(cx.tcx)).as_slice());
cx.tcx.sess.span_note(self_type.span, cx.tcx.sess.span_note(self_type.span,
format!("types implementing this trait must fulfill `{}`", format!("types implementing this trait must fulfill `{}`",
trait_def.bounds.user_string(cx.tcx))); trait_def.bounds.user_string(cx.tcx)).as_slice());
}); });
// If this is a destructor, check kinds. // If this is a destructor, check kinds.
@ -210,8 +212,9 @@ fn with_appropriate_checker(cx: &Context,
b(check_for_bare) b(check_for_bare)
} }
ref s => { ref s => {
cx.tcx.sess.bug( cx.tcx.sess.bug(format!("expect fn type in kind checker, not \
format!("expect fn type in kind checker, not {:?}", s)); {:?}",
s).as_slice());
} }
} }
} }
@ -392,7 +395,7 @@ pub fn check_typaram_bounds(cx: &Context,
format!("instantiating a type parameter with an incompatible type \ format!("instantiating a type parameter with an incompatible type \
`{}`, which does not fulfill `{}`", `{}`, which does not fulfill `{}`",
ty_to_str(cx.tcx, ty), ty_to_str(cx.tcx, ty),
missing.user_string(cx.tcx))); missing.user_string(cx.tcx)).as_slice());
}); });
} }
@ -403,19 +406,26 @@ pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
// Will be Some if the freevar is implicitly borrowed (stack closure). // Will be Some if the freevar is implicitly borrowed (stack closure).
// Emit a less mysterious error message in this case. // Emit a less mysterious error message in this case.
match referenced_ty { match referenced_ty {
Some(rty) => cx.tcx.sess.span_err(sp, Some(rty) => {
format!("cannot implicitly borrow variable of type `{}` in a bounded \ cx.tcx.sess.span_err(sp,
stack closure (implicit reference does not fulfill `{}`)", format!("cannot implicitly borrow variable of type `{}` in a \
ty_to_str(cx.tcx, rty), missing.user_string(cx.tcx))), bounded stack closure (implicit reference does not \
None => cx.tcx.sess.span_err(sp, fulfill `{}`)",
ty_to_str(cx.tcx, rty),
missing.user_string(cx.tcx)).as_slice())
}
None => {
cx.tcx.sess.span_err(sp,
format!("cannot capture variable of type `{}`, which does \ format!("cannot capture variable of type `{}`, which does \
not fulfill `{}`, in a bounded closure", not fulfill `{}`, in a bounded closure",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx))), ty_to_str(cx.tcx, ty),
missing.user_string(cx.tcx)).as_slice())
}
} }
cx.tcx.sess.span_note( cx.tcx.sess.span_note(
sp, sp,
format!("this closure's environment must satisfy `{}`", format!("this closure's environment must satisfy `{}`",
bounds.user_string(cx.tcx))); bounds.user_string(cx.tcx)).as_slice());
}); });
} }
@ -426,7 +436,7 @@ pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
format!("cannot pack type `{}`, which does not fulfill \ format!("cannot pack type `{}`, which does not fulfill \
`{}`, as a trait bounded by {}", `{}`, as a trait bounded by {}",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx), ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),
bounds.user_string(cx.tcx))); bounds.user_string(cx.tcx)).as_slice());
}); });
} }
@ -436,9 +446,10 @@ fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) {
ty::type_contents(cx.tcx, ty).to_str()); ty::type_contents(cx.tcx, ty).to_str());
if ty::type_moves_by_default(cx.tcx, ty) { if ty::type_moves_by_default(cx.tcx, ty) {
cx.tcx.sess.span_err( cx.tcx.sess.span_err(
sp, format!("copying a value of non-copyable type `{}`", sp,
ty_to_str(cx.tcx, ty))); format!("copying a value of non-copyable type `{}`",
cx.tcx.sess.span_note(sp, format!("{}", reason)); ty_to_str(cx.tcx, ty)).as_slice());
cx.tcx.sess.span_note(sp, format!("{}", reason).as_slice());
} }
} }
@ -448,7 +459,8 @@ pub fn check_static(tcx: &ty::ctxt, ty: ty::t, sp: Span) -> bool {
ty::ty_param(..) => { ty::ty_param(..) => {
tcx.sess.span_err(sp, tcx.sess.span_err(sp,
format!("value may contain references; \ format!("value may contain references; \
add `'static` bound to `{}`", ty_to_str(tcx, ty))); add `'static` bound to `{}`",
ty_to_str(tcx, ty)).as_slice());
} }
_ => { _ => {
tcx.sess.span_err(sp, "value may contain references"); tcx.sess.span_err(sp, "value may contain references");
@ -564,8 +576,11 @@ pub fn check_cast_for_escaping_regions(
// Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound). // Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound).
fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: StrBuf, sp: Span) { fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: StrBuf, sp: Span) {
if !ty::type_is_sized(tcx, ty) { if !ty::type_is_sized(tcx, ty) {
tcx.sess.span_err(sp, format!("variable `{}` has dynamically sized type `{}`", tcx.sess.span_err(sp,
name, ty_to_str(tcx, ty))); format!("variable `{}` has dynamically sized type \
`{}`",
name,
ty_to_str(tcx, ty)).as_slice());
} }
} }

View file

@ -152,7 +152,8 @@ impl<'a> LanguageItemCollector<'a> {
match self.items.items.get(item_index) { match self.items.items.get(item_index) {
&Some(original_def_id) if original_def_id != item_def_id => { &Some(original_def_id) if original_def_id != item_def_id => {
self.session.err(format!("duplicate entry for `{}`", self.session.err(format!("duplicate entry for `{}`",
LanguageItems::item_name(item_index))); LanguageItems::item_name(
item_index)).as_slice());
} }
&Some(_) | &None => { &Some(_) | &None => {
// OK. // OK.

View file

@ -506,8 +506,10 @@ impl<'a> Context<'a> {
let mut note = None; let mut note = None;
let msg = match src { let msg = match src {
Default => { Default => {
format!("{}, \\#[{}({})] on by default", msg, format_strbuf!("{}, \\#[{}({})] on by default",
level_to_str(level), self.lint_to_str(lint)) msg,
level_to_str(level),
self.lint_to_str(lint))
}, },
CommandLine => { CommandLine => {
format!("{} [-{} {}]", msg, format!("{} [-{} {}]", msg,
@ -522,8 +524,8 @@ impl<'a> Context<'a> {
} }
}; };
match level { match level {
Warn => { self.tcx.sess.span_warn(span, msg); } Warn => self.tcx.sess.span_warn(span, msg.as_slice()),
Deny | Forbid => { self.tcx.sess.span_err(span, msg); } Deny | Forbid => self.tcx.sess.span_err(span, msg.as_slice()),
Allow => fail!(), Allow => fail!(),
} }
@ -552,7 +554,7 @@ impl<'a> Context<'a> {
UnrecognizedLint, UnrecognizedLint,
meta.span, meta.span,
format!("unknown `{}` attribute: `{}`", format!("unknown `{}` attribute: `{}`",
level_to_str(level), lintname)); level_to_str(level), lintname).as_slice());
} }
Some(lint) => { Some(lint) => {
let lint = lint.lint; let lint = lint.lint;
@ -561,7 +563,8 @@ impl<'a> Context<'a> {
self.tcx.sess.span_err(meta.span, self.tcx.sess.span_err(meta.span,
format!("{}({}) overruled by outer forbid({})", format!("{}({}) overruled by outer forbid({})",
level_to_str(level), level_to_str(level),
lintname, lintname)); lintname,
lintname).as_slice());
} else if now != level { } else if now != level {
let src = self.get_source(lint); let src = self.get_source(lint);
self.lint_stack.push((lint, now, src)); self.lint_stack.push((lint, now, src));
@ -965,13 +968,13 @@ fn check_heap_type(cx: &Context, span: Span, ty: ty::t) {
if n_uniq > 0 && lint != ManagedHeapMemory { if n_uniq > 0 && lint != ManagedHeapMemory {
let s = ty_to_str(cx.tcx, ty); let s = ty_to_str(cx.tcx, ty);
let m = format!("type uses owned (Box type) pointers: {}", s); let m = format!("type uses owned (Box type) pointers: {}", s);
cx.span_lint(lint, span, m); cx.span_lint(lint, span, m.as_slice());
} }
if n_box > 0 && lint != OwnedHeapMemory { if n_box > 0 && lint != OwnedHeapMemory {
let s = ty_to_str(cx.tcx, ty); let s = ty_to_str(cx.tcx, ty);
let m = format!("type uses managed (@ type) pointers: {}", s); let m = format!("type uses managed (@ type) pointers: {}", s);
cx.span_lint(lint, span, m); cx.span_lint(lint, span, m.as_slice());
} }
} }
} }
@ -1122,7 +1125,8 @@ fn check_attrs_usage(cx: &Context, attrs: &[ast::Attribute]) {
for &(obs_attr, obs_alter) in obsolete_attrs.iter() { for &(obs_attr, obs_alter) in obsolete_attrs.iter() {
if name.equiv(&obs_attr) { if name.equiv(&obs_attr) {
cx.span_lint(AttributeUsage, attr.span, cx.span_lint(AttributeUsage, attr.span,
format!("obsolete attribute: {:s}", obs_alter)); format!("obsolete attribute: {:s}",
obs_alter).as_slice());
return; return;
} }
} }
@ -1233,7 +1237,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::Item) {
cx.span_lint( cx.span_lint(
NonCamelCaseTypes, span, NonCamelCaseTypes, span,
format!("{} `{}` should have a camel case identifier", format!("{} `{}` should have a camel case identifier",
sort, token::get_ident(ident))); sort, token::get_ident(ident)).as_slice());
} }
} }
@ -1331,7 +1335,8 @@ fn check_unnecessary_parens_core(cx: &Context, value: &ast::Expr, msg: &str) {
match value.node { match value.node {
ast::ExprParen(_) => { ast::ExprParen(_) => {
cx.span_lint(UnnecessaryParens, value.span, cx.span_lint(UnnecessaryParens, value.span,
format!("unnecessary parentheses around {}", msg)) format!("unnecessary parentheses around {}",
msg).as_slice())
} }
_ => {} _ => {}
} }
@ -1506,8 +1511,10 @@ fn check_missing_doc_attrs(cx: &Context,
} }
}); });
if !has_doc { if !has_doc {
cx.span_lint(MissingDoc, sp, cx.span_lint(MissingDoc,
format!("missing documentation for {}", desc)); sp,
format!("missing documentation for {}",
desc).as_slice());
} }
} }
@ -1675,7 +1682,7 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
_ => format!("use of {} item", label) _ => format!("use of {} item", label)
}; };
cx.span_lint(lint, e.span, msg); cx.span_lint(lint, e.span, msg.as_slice());
} }
impl<'a> Visitor<()> for Context<'a> { impl<'a> Visitor<()> for Context<'a> {
@ -1906,8 +1913,11 @@ pub fn check_crate(tcx: &ty::ctxt,
// in the iteration code. // in the iteration code.
for (id, v) in tcx.sess.lints.borrow().iter() { for (id, v) in tcx.sess.lints.borrow().iter() {
for &(lint, span, ref msg) in v.iter() { for &(lint, span, ref msg) in v.iter() {
tcx.sess.span_bug(span, format!("unprocessed lint {:?} at {}: {}", tcx.sess.span_bug(span,
lint, tcx.map.node_to_str(*id), *msg)) format!("unprocessed lint {:?} at {}: {}",
lint,
tcx.map.node_to_str(*id),
*msg).as_slice())
} }
} }

View file

@ -317,7 +317,7 @@ impl<'a> IrMaps<'a> {
self.tcx self.tcx
.sess .sess
.span_bug(span, format!("no variable registered for id {}", .span_bug(span, format!("no variable registered for id {}",
node_id)); node_id).as_slice());
} }
} }
} }
@ -606,8 +606,9 @@ impl<'a> Liveness<'a> {
// code have to agree about which AST nodes are worth // code have to agree about which AST nodes are worth
// creating liveness nodes for. // creating liveness nodes for.
self.ir.tcx.sess.span_bug( self.ir.tcx.sess.span_bug(
span, format!("no live node registered for node {}", span,
node_id)); format!("no live node registered for node {}",
node_id).as_slice());
} }
} }
} }

View file

@ -224,7 +224,7 @@ pub fn deref_kind(tcx: &ty::ctxt, t: ty::t) -> deref_kind {
None => { None => {
tcx.sess.bug( tcx.sess.bug(
format!("deref_cat() invoked on non-derefable type {}", format!("deref_cat() invoked on non-derefable type {}",
ty_to_str(tcx, t))); ty_to_str(tcx, t)).as_slice());
} }
} }
} }
@ -578,7 +578,8 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
span, span,
format!("Upvar of non-closure {} - {}", format!("Upvar of non-closure {} - {}",
fn_node_id, ty.repr(self.tcx()))); fn_node_id,
ty.repr(self.tcx())).as_slice());
} }
} }
} }
@ -727,7 +728,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
node.span(), node.span(),
format!("Explicit deref of non-derefable type: {}", format!("Explicit deref of non-derefable type: {}",
base_cmt.ty.repr(self.tcx()))); base_cmt.ty.repr(self.tcx())).as_slice());
} }
} }
} }
@ -800,7 +801,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
elt.span(), elt.span(),
format!("Explicit index of non-index type `{}`", format!("Explicit index of non-index type `{}`",
base_cmt.ty.repr(self.tcx()))); base_cmt.ty.repr(self.tcx())).as_slice());
} }
}; };
@ -884,9 +885,8 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
}, },
_ => { _ => {
tcx.sess.span_bug( tcx.sess.span_bug(pat.span,
pat.span, "type of slice pattern is not a slice");
format!("Type of slice pattern is not a slice"));
} }
} }
} }

View file

@ -632,7 +632,7 @@ impl<'a> PrivacyVisitor<'a> {
UnnamedField(idx) => format!("field \\#{} of {} is private", UnnamedField(idx) => format!("field \\#{} of {} is private",
idx + 1, struct_desc), idx + 1, struct_desc),
}; };
self.tcx.sess.span_err(span, msg); self.tcx.sess.span_err(span, msg.as_slice());
} }
// Given the ID of a method, checks to ensure it's in scope. // Given the ID of a method, checks to ensure it's in scope.
@ -647,7 +647,8 @@ impl<'a> PrivacyVisitor<'a> {
self.report_error(self.ensure_public(span, self.report_error(self.ensure_public(span,
method_id, method_id,
None, None,
format!("method `{}`", string))); format!("method `{}`",
string).as_slice()));
} }
// Checks that a path is in scope. // Checks that a path is in scope.
@ -661,8 +662,12 @@ impl<'a> PrivacyVisitor<'a> {
.unwrap() .unwrap()
.identifier); .identifier);
let origdid = def_id_of_def(orig_def); let origdid = def_id_of_def(orig_def);
self.ensure_public(span, def, Some(origdid), self.ensure_public(span,
format!("{} `{}`", tyname, name)) def,
Some(origdid),
format!("{} `{}`",
tyname,
name).as_slice())
}; };
match *self.last_private_map.get(&path_id) { match *self.last_private_map.get(&path_id) {

View file

@ -234,7 +234,7 @@ impl<'a> ReachableContext<'a> {
None => { None => {
self.tcx.sess.bug(format!("found unmapped ID in worklist: \ self.tcx.sess.bug(format!("found unmapped ID in worklist: \
{}", {}",
search_item)) search_item).as_slice())
} }
} }
} }
@ -324,9 +324,12 @@ impl<'a> ReachableContext<'a> {
ast_map::NodeVariant(_) | ast_map::NodeVariant(_) |
ast_map::NodeStructCtor(_) => {} ast_map::NodeStructCtor(_) => {}
_ => { _ => {
self.tcx.sess.bug(format!("found unexpected thingy in \ self.tcx
worklist: {}", .sess
self.tcx.map.node_to_str(search_item))) .bug(format!("found unexpected thingy in worklist: {}",
self.tcx
.map
.node_to_str(search_item)).as_slice())
} }
} }
} }

View file

@ -1085,14 +1085,14 @@ impl<'a> Resolver<'a> {
self.resolve_error(sp, self.resolve_error(sp,
format!("duplicate definition of {} `{}`", format!("duplicate definition of {} `{}`",
namespace_error_to_str(duplicate_type), namespace_error_to_str(duplicate_type),
token::get_ident(name))); token::get_ident(name)).as_slice());
{ {
let r = child.span_for_namespace(ns); let r = child.span_for_namespace(ns);
for sp in r.iter() { for sp in r.iter() {
self.session.span_note(*sp, self.session.span_note(*sp,
format!("first definition of {} `{}` here", format!("first definition of {} `{}` here",
namespace_error_to_str(duplicate_type), namespace_error_to_str(duplicate_type),
token::get_ident(name))); token::get_ident(name)).as_slice());
} }
} }
} }
@ -2054,7 +2054,7 @@ impl<'a> Resolver<'a> {
import_directive.module_path import_directive.module_path
.as_slice(), .as_slice(),
import_directive.subclass)); import_directive.subclass));
self.resolve_error(import_directive.span, msg); self.resolve_error(import_directive.span, msg.as_slice());
} }
Indeterminate => { Indeterminate => {
// Bail out. We'll come around next time. // Bail out. We'll come around next time.
@ -2427,7 +2427,7 @@ impl<'a> Resolver<'a> {
`{}` in `{}`", `{}` in `{}`",
token::get_ident(source), token::get_ident(source),
self.module_to_str(&*containing_module)); self.module_to_str(&*containing_module));
self.resolve_error(directive.span, msg); self.resolve_error(directive.span, msg.as_slice());
return Failed; return Failed;
} }
let value_used_public = value_used_reexport || value_used_public; let value_used_public = value_used_reexport || value_used_public;
@ -2654,11 +2654,14 @@ impl<'a> Resolver<'a> {
format!("unresolved import. maybe \ format!("unresolved import. maybe \
a missing `extern crate \ a missing `extern crate \
{}`?", {}`?",
segment_name)); segment_name).as_slice());
return Failed; return Failed;
} }
self.resolve_error(span, format!("unresolved import: could not find `{}` in \ self.resolve_error(span,
`{}`.", segment_name, module_name)); format!("unresolved import: could not \
find `{}` in `{}`.",
segment_name,
module_name).as_slice());
return Failed; return Failed;
} }
Indeterminate => { Indeterminate => {
@ -2675,8 +2678,11 @@ impl<'a> Resolver<'a> {
match type_def.module_def { match type_def.module_def {
None => { None => {
// Not a module. // Not a module.
self.resolve_error(span, format!("not a module `{}`", self.resolve_error(
token::get_ident(name))); span,
format!("not a module `{}`",
token::get_ident(name))
.as_slice());
return Failed; return Failed;
} }
Some(ref module_def) => { Some(ref module_def) => {
@ -2717,9 +2723,10 @@ impl<'a> Resolver<'a> {
} }
None => { None => {
// There are no type bindings at all. // There are no type bindings at all.
self.resolve_error(span, self.resolve_error(
span,
format!("not a module `{}`", format!("not a module `{}`",
token::get_ident(name))); token::get_ident(name)).as_slice());
return Failed; return Failed;
} }
} }
@ -2764,16 +2771,15 @@ impl<'a> Resolver<'a> {
let mpath = self.idents_to_str(module_path); let mpath = self.idents_to_str(module_path);
match mpath.as_slice().rfind(':') { match mpath.as_slice().rfind(':') {
Some(idx) => { Some(idx) => {
self.resolve_error(span, self.resolve_error(
format!("unresolved import: could \ span,
not find `{}` in `{}`", format!("unresolved import: could not find `{}` \
// idx +- 1 to account for in `{}`",
// the colons on either // idx +- 1 to account for the colons on \
// side // either side
mpath.as_slice().slice_from(idx + 1),
mpath.as_slice() mpath.as_slice()
.slice_from(idx + 1), .slice_to(idx - 1)).as_slice());
mpath.as_slice()
.slice_to(idx - 1)));
}, },
None => (), None => (),
}; };
@ -3200,7 +3206,7 @@ impl<'a> Resolver<'a> {
} else { } else {
let err = format!("unresolved import (maybe you meant `{}::*`?)", let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn.as_slice().slice(0, sn.len())); sn.as_slice().slice(0, sn.len()));
self.resolve_error(imports.get(index).span, err); self.resolve_error(imports.get(index).span, err.as_slice());
} }
} }
@ -3870,7 +3876,7 @@ impl<'a> Resolver<'a> {
}; };
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
self.resolve_error(trait_reference.path.span, msg); self.resolve_error(trait_reference.path.span, msg.as_slice());
} }
Some(def) => { Some(def) => {
debug!("(resolving trait) found trait def: {:?}", def); debug!("(resolving trait) found trait def: {:?}", def);
@ -4071,7 +4077,7 @@ impl<'a> Resolver<'a> {
format!("variable `{}` from pattern \\#1 is \ format!("variable `{}` from pattern \\#1 is \
not bound in pattern \\#{}", not bound in pattern \\#{}",
token::get_name(key), token::get_name(key),
i + 1)); i + 1).as_slice());
} }
Some(binding_i) => { Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode { if binding_0.binding_mode != binding_i.binding_mode {
@ -4080,7 +4086,7 @@ impl<'a> Resolver<'a> {
format!("variable `{}` is bound with different \ format!("variable `{}` is bound with different \
mode in pattern \\#{} than in pattern \\#1", mode in pattern \\#{} than in pattern \\#1",
token::get_name(key), token::get_name(key),
i + 1)); i + 1).as_slice());
} }
} }
} }
@ -4093,7 +4099,7 @@ impl<'a> Resolver<'a> {
format!("variable `{}` from pattern \\#{} is \ format!("variable `{}` from pattern \\#{} is \
not bound in pattern \\#1", not bound in pattern \\#1",
token::get_name(key), token::get_name(key),
i + 1)); i + 1).as_slice());
} }
} }
} }
@ -4220,7 +4226,7 @@ impl<'a> Resolver<'a> {
None => { None => {
let msg = format!("use of undeclared type name `{}`", let msg = format!("use of undeclared type name `{}`",
self.path_idents_to_str(path)); self.path_idents_to_str(path));
self.resolve_error(ty.span, msg); self.resolve_error(ty.span, msg.as_slice());
} }
} }
@ -4285,12 +4291,12 @@ impl<'a> Resolver<'a> {
self.record_def(pattern.id, (def, lp)); self.record_def(pattern.id, (def, lp));
} }
FoundStructOrEnumVariant(..) => { FoundStructOrEnumVariant(..) => {
self.resolve_error(pattern.span, self.resolve_error(
format!("declaration of `{}` \ pattern.span,
shadows an enum \ format!("declaration of `{}` shadows an enum \
variant or unit-like \ variant or unit-like struct in \
struct in scope", scope",
token::get_name(renamed))); token::get_name(renamed)).as_slice());
} }
FoundConst(def, lp) if mode == RefutableMode => { FoundConst(def, lp) if mode == RefutableMode => {
debug!("(resolving pattern) resolving `{}` to \ debug!("(resolving pattern) resolving `{}` to \
@ -4359,9 +4365,10 @@ impl<'a> Resolver<'a> {
// in the same disjunct, which is an // in the same disjunct, which is an
// error // error
self.resolve_error(pattern.span, self.resolve_error(pattern.span,
format!("identifier `{}` is bound more \ format!("identifier `{}` is bound \
than once in the same pattern", more than once in the same \
path_to_str(path))); pattern",
path_to_str(path)).as_slice());
} }
// Not bound in the same pattern: do nothing // Not bound in the same pattern: do nothing
} }
@ -4407,7 +4414,10 @@ impl<'a> Resolver<'a> {
path.span, path.span,
format!("`{}` is not an enum variant or constant", format!("`{}` is not an enum variant or constant",
token::get_ident( token::get_ident(
path.segments.last().unwrap().identifier))) path.segments
.last()
.unwrap()
.identifier)).as_slice())
} }
None => { None => {
self.resolve_error(path.span, self.resolve_error(path.span,
@ -4435,16 +4445,20 @@ impl<'a> Resolver<'a> {
Some(_) => { Some(_) => {
self.resolve_error(path.span, self.resolve_error(path.span,
format!("`{}` is not an enum variant, struct or const", format!("`{}` is not an enum variant, struct or const",
token::get_ident(path.segments token::get_ident(
.last().unwrap() path.segments
.identifier))); .last()
.unwrap()
.identifier)).as_slice());
} }
None => { None => {
self.resolve_error(path.span, self.resolve_error(path.span,
format!("unresolved enum variant, struct or const `{}`", format!("unresolved enum variant, struct or const `{}`",
token::get_ident(path.segments token::get_ident(
.last().unwrap() path.segments
.identifier))); .last()
.unwrap()
.identifier)).as_slice());
} }
} }
@ -4485,7 +4499,7 @@ impl<'a> Resolver<'a> {
def: {:?}", result); def: {:?}", result);
let msg = format!("`{}` does not name a structure", let msg = format!("`{}` does not name a structure",
self.path_idents_to_str(path)); self.path_idents_to_str(path));
self.resolve_error(path.span, msg); self.resolve_error(path.span, msg.as_slice());
} }
} }
} }
@ -4705,7 +4719,7 @@ impl<'a> Resolver<'a> {
Failed => { Failed => {
let msg = format!("use of undeclared module `{}`", let msg = format!("use of undeclared module `{}`",
self.idents_to_str(module_path_idents.as_slice())); self.idents_to_str(module_path_idents.as_slice()));
self.resolve_error(path.span, msg); self.resolve_error(path.span, msg.as_slice());
return None; return None;
} }
@ -4776,7 +4790,7 @@ impl<'a> Resolver<'a> {
Failed => { Failed => {
let msg = format!("use of undeclared module `::{}`", let msg = format!("use of undeclared module `::{}`",
self.idents_to_str(module_path_idents.as_slice())); self.idents_to_str(module_path_idents.as_slice()));
self.resolve_error(path.span, msg); self.resolve_error(path.span, msg.as_slice());
return None; return None;
} }
@ -5096,12 +5110,12 @@ impl<'a> Resolver<'a> {
format!("`{}` is a structure name, but \ format!("`{}` is a structure name, but \
this expression \ this expression \
uses it like a function name", uses it like a function name",
wrong_name)); wrong_name).as_slice());
self.session.span_note(expr.span, self.session.span_note(expr.span,
format!("Did you mean to write: \ format!("Did you mean to write: \
`{} \\{ /* fields */ \\}`?", `{} \\{ /* fields */ \\}`?",
wrong_name)); wrong_name).as_slice());
} }
_ => { _ => {
@ -5119,10 +5133,11 @@ impl<'a> Resolver<'a> {
if method_scope && token::get_name(self.self_ident.name).get() if method_scope && token::get_name(self.self_ident.name).get()
== wrong_name.as_slice() { == wrong_name.as_slice() {
self.resolve_error(expr.span, self.resolve_error(
format!("`self` is not available in a \ expr.span,
static method. Maybe a `self` \ "`self` is not available \
argument is missing?")); in a static method. Maybe a \
`self` argument is missing?");
} else { } else {
let name = path_to_ident(path).name; let name = path_to_ident(path).name;
let mut msg = match self.find_fallback_in_self_type(name) { let mut msg = match self.find_fallback_in_self_type(name) {
@ -5130,7 +5145,7 @@ impl<'a> Resolver<'a> {
// limit search to 5 to reduce the number // limit search to 5 to reduce the number
// of stupid suggestions // of stupid suggestions
self.find_best_match_for_name(wrong_name.as_slice(), 5) self.find_best_match_for_name(wrong_name.as_slice(), 5)
.map_or("".into_owned(), .map_or("".to_strbuf(),
|x| format!("`{}`", x)) |x| format!("`{}`", x))
} }
Field => Field =>
@ -5147,8 +5162,11 @@ impl<'a> Resolver<'a> {
msg = format!(" Did you mean {}?", msg) msg = format!(" Did you mean {}?", msg)
} }
self.resolve_error(expr.span, format!("unresolved name `{}`.{}", self.resolve_error(
wrong_name, msg)); expr.span,
format!("unresolved name `{}`.{}",
wrong_name,
msg).as_slice());
} }
} }
} }
@ -5182,7 +5200,7 @@ impl<'a> Resolver<'a> {
def: {:?}", result); def: {:?}", result);
let msg = format!("`{}` does not name a structure", let msg = format!("`{}` does not name a structure",
self.path_idents_to_str(path)); self.path_idents_to_str(path));
self.resolve_error(path.span, msg); self.resolve_error(path.span, msg.as_slice());
} }
} }
@ -5211,10 +5229,12 @@ impl<'a> Resolver<'a> {
let renamed = mtwt::resolve(label); let renamed = mtwt::resolve(label);
match self.search_ribs(self.label_ribs.borrow().as_slice(), match self.search_ribs(self.label_ribs.borrow().as_slice(),
renamed, expr.span) { renamed, expr.span) {
None => None => {
self.resolve_error(expr.span, self.resolve_error(
expr.span,
format!("use of undeclared label `{}`", format!("use of undeclared label `{}`",
token::get_ident(label))), token::get_ident(label)).as_slice())
}
Some(DlDef(def @ DefLabel(_))) => { Some(DlDef(def @ DefLabel(_))) => {
// Since this def is a label, it is never read. // Since this def is a label, it is never read.
self.record_def(expr.id, (def, LastMod(AllPublic))) self.record_def(expr.id, (def, LastMod(AllPublic)))
@ -5343,8 +5363,12 @@ impl<'a> Resolver<'a> {
// times, so here is a sanity check it at least comes to // times, so here is a sanity check it at least comes to
// the same conclusion! - nmatsakis // the same conclusion! - nmatsakis
if def != *old_value { if def != *old_value {
self.session.bug(format!("node_id {:?} resolved first to {:?} \ self.session
and then {:?}", node_id, *old_value, def)); .bug(format!("node_id {:?} resolved first to {:?} and \
then {:?}",
node_id,
*old_value,
def).as_slice());
} }
}); });
} }
@ -5356,10 +5380,10 @@ impl<'a> Resolver<'a> {
match pat_binding_mode { match pat_binding_mode {
BindByValue(_) => {} BindByValue(_) => {}
BindByRef(..) => { BindByRef(..) => {
self.resolve_error( self.resolve_error(pat.span,
pat.span, format!("cannot use `ref` binding mode \
format!("cannot use `ref` binding mode with {}", with {}",
descr)); descr).as_slice());
} }
} }
} }

View file

@ -341,7 +341,7 @@ impl<'a> LifetimeContext<'a> {
self.sess.span_err( self.sess.span_err(
lifetime_ref.span, lifetime_ref.span,
format!("use of undeclared lifetime name `'{}`", format!("use of undeclared lifetime name `'{}`",
token::get_name(lifetime_ref.name))); token::get_name(lifetime_ref.name)).as_slice());
} }
fn check_lifetime_names(&self, lifetimes: &Vec<ast::Lifetime>) { fn check_lifetime_names(&self, lifetimes: &Vec<ast::Lifetime>) {
@ -354,7 +354,7 @@ impl<'a> LifetimeContext<'a> {
self.sess.span_err( self.sess.span_err(
lifetime.span, lifetime.span,
format!("illegal lifetime parameter name: `{}`", format!("illegal lifetime parameter name: `{}`",
token::get_name(lifetime.name))); token::get_name(lifetime.name)).as_slice());
} }
} }
@ -366,7 +366,7 @@ impl<'a> LifetimeContext<'a> {
lifetime_j.span, lifetime_j.span,
format!("lifetime name `'{}` declared twice in \ format!("lifetime name `'{}` declared twice in \
the same scope", the same scope",
token::get_name(lifetime_j.name))); token::get_name(lifetime_j.name)).as_slice());
} }
} }
} }

View file

@ -112,14 +112,17 @@ impl<'a> TypeFolder for SubstFolder<'a> {
let root_msg = match self.root_ty { let root_msg = match self.root_ty {
Some(root) => format!(" in the substitution of `{}`", Some(root) => format!(" in the substitution of `{}`",
root.repr(self.tcx)), root.repr(self.tcx)),
None => "".to_owned() None => "".to_strbuf()
}; };
let m = format!("can't use type parameters from outer \ let m = format!("can't use type parameters from outer \
function{}; try using a local type \ function{}; try using a local type \
parameter instead", root_msg); parameter instead",
root_msg);
match self.span { match self.span {
Some(span) => self.tcx.sess.span_err(span, m), Some(span) => {
None => self.tcx.sess.err(m) self.tcx.sess.span_err(span, m.as_slice())
}
None => self.tcx.sess.err(m.as_slice())
} }
ty::mk_err() ty::mk_err()
} }
@ -131,12 +134,15 @@ impl<'a> TypeFolder for SubstFolder<'a> {
let root_msg = match self.root_ty { let root_msg = match self.root_ty {
Some(root) => format!(" in the substitution of `{}`", Some(root) => format!(" in the substitution of `{}`",
root.repr(self.tcx)), root.repr(self.tcx)),
None => "".to_owned() None => "".to_strbuf()
}; };
let m = format!("missing `Self` type param{}", root_msg); let m = format!("missing `Self` type param{}",
root_msg);
match self.span { match self.span {
Some(span) => self.tcx.sess.span_err(span, m), Some(span) => {
None => self.tcx.sess.err(m) self.tcx.sess.span_err(span, m.as_slice())
}
None => self.tcx.sess.err(m.as_slice())
} }
ty::mk_err() ty::mk_err()
} }

View file

@ -462,7 +462,7 @@ fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
bcx.sess().span_bug( bcx.sess().span_bug(
p.span, p.span,
format!("expected an identifier pattern but found p: {}", format!("expected an identifier pattern but found p: {}",
p.repr(bcx.tcx()))); p.repr(bcx.tcx())).as_slice());
} }
} }
@ -1229,8 +1229,10 @@ fn compare_values<'a>(
rhs: ValueRef, rhs: ValueRef,
rhs_t: ty::t) rhs_t: ty::t)
-> Result<'a> { -> Result<'a> {
let did = langcall(cx, None, let did = langcall(cx,
format!("comparison of `{}`", cx.ty_to_str(rhs_t)), None,
format!("comparison of `{}`",
cx.ty_to_str(rhs_t)).as_slice(),
StrEqFnLangItem); StrEqFnLangItem);
let result = callee::trans_lang_call(cx, did, [lhs, rhs], None); let result = callee::trans_lang_call(cx, did, [lhs, rhs], None);
Result { Result {
@ -1252,8 +1254,10 @@ fn compare_values<'a>(
Store(cx, lhs, scratch_lhs); Store(cx, lhs, scratch_lhs);
let scratch_rhs = alloca(cx, val_ty(rhs), "__rhs"); let scratch_rhs = alloca(cx, val_ty(rhs), "__rhs");
Store(cx, rhs, scratch_rhs); Store(cx, rhs, scratch_rhs);
let did = langcall(cx, None, let did = langcall(cx,
format!("comparison of `{}`", cx.ty_to_str(rhs_t)), None,
format!("comparison of `{}`",
cx.ty_to_str(rhs_t)).as_slice(),
UniqStrEqFnLangItem); UniqStrEqFnLangItem);
let result = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs], None); let result = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs], None);
Result { Result {
@ -2154,7 +2158,7 @@ fn bind_irrefutable_pat<'a>(
if bcx.sess().asm_comments() { if bcx.sess().asm_comments() {
add_comment(bcx, format!("bind_irrefutable_pat(pat={})", add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
pat.repr(bcx.tcx()))); pat.repr(bcx.tcx())).as_slice());
} }
let _indenter = indenter(); let _indenter = indenter();
@ -2273,7 +2277,7 @@ fn bind_irrefutable_pat<'a>(
} }
ast::PatVec(..) => { ast::PatVec(..) => {
bcx.sess().span_bug(pat.span, bcx.sess().span_bug(pat.span,
format!("vector patterns are never irrefutable!")); "vector patterns are never irrefutable!");
} }
ast::PatWild | ast::PatWildMulti | ast::PatLit(_) | ast::PatRange(_, _) => () ast::PatWild | ast::PatWildMulti | ast::PatLit(_) | ast::PatRange(_, _) => ()
} }

View file

@ -192,7 +192,8 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) { if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) {
cx.sess().bug(format!("non-C-like enum {} with specified \ cx.sess().bug(format!("non-C-like enum {} with specified \
discriminants", discriminants",
ty::item_path_str(cx.tcx(), def_id))) ty::item_path_str(cx.tcx(),
def_id)).as_slice())
} }
if cases.len() == 1 { if cases.len() == 1 {

View file

@ -341,7 +341,8 @@ fn require_alloc_fn(bcx: &Block, info_ty: ty::t, it: LangItem) -> ast::DefId {
Ok(id) => id, Ok(id) => id,
Err(s) => { Err(s) => {
bcx.sess().fatal(format!("allocation of `{}` {}", bcx.sess().fatal(format!("allocation of `{}` {}",
bcx.ty_to_str(info_ty), s)); bcx.ty_to_str(info_ty),
s).as_slice());
} }
} }
} }
@ -476,7 +477,7 @@ pub fn unset_split_stack(f: ValueRef) {
// silently mangles such symbols, breaking our linkage model. // silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: StrBuf) { pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: StrBuf) {
if ccx.all_llvm_symbols.borrow().contains(&sym) { if ccx.all_llvm_symbols.borrow().contains(&sym) {
ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym)); ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_slice());
} }
ccx.all_llvm_symbols.borrow_mut().insert(sym); ccx.all_llvm_symbols.borrow_mut().insert(sym);
} }
@ -739,8 +740,11 @@ pub fn iter_structural_ty<'r,
for variant in (*variants).iter() { for variant in (*variants).iter() {
let variant_cx = let variant_cx =
fcx.new_temp_block("enum-iter-variant-".to_owned() + fcx.new_temp_block(
variant.disr_val.to_str()); format_strbuf!("enum-iter-variant-{}",
variant.disr_val
.to_str()
.as_slice()).as_slice());
match adt::trans_case(cx, &*repr, variant.disr_val) { match adt::trans_case(cx, &*repr, variant.disr_val) {
_match::single_result(r) => { _match::single_result(r) => {
AddCase(llswitch, r.val, variant_cx.llbb) AddCase(llswitch, r.val, variant_cx.llbb)
@ -839,7 +843,7 @@ pub fn fail_if_zero<'a>(
} }
_ => { _ => {
cx.sess().bug(format!("fail-if-zero on unexpected type: {}", cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
ty_to_str(cx.tcx(), rhs_t))); ty_to_str(cx.tcx(), rhs_t)).as_slice());
} }
}; };
with_cond(cx, is_zero, |bcx| { with_cond(cx, is_zero, |bcx| {
@ -1504,7 +1508,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: &CrateContext,
_ => ccx.sess().bug( _ => ccx.sess().bug(
format!("trans_enum_variant_or_tuple_like_struct: \ format!("trans_enum_variant_or_tuple_like_struct: \
unexpected ctor return type {}", unexpected ctor return type {}",
ty_to_str(ccx.tcx(), ctor_ty))) ty_to_str(ccx.tcx(), ctor_ty)).as_slice())
}; };
let arena = TypedArena::new(); let arena = TypedArena::new();
@ -2052,7 +2056,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
ref variant => { ref variant => {
ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}", ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}",
variant)) variant).as_slice())
} }
}; };
@ -2116,7 +2120,9 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
let compressed = Vec::from_slice(encoder::metadata_encoding_version) let compressed = Vec::from_slice(encoder::metadata_encoding_version)
.append(match flate::deflate_bytes(metadata.as_slice()) { .append(match flate::deflate_bytes(metadata.as_slice()) {
Some(compressed) => compressed, Some(compressed) => compressed,
None => cx.sess().fatal(format!("failed to compress metadata")) None => {
cx.sess().fatal("failed to compress metadata")
}
}.as_slice()); }.as_slice());
let llmeta = C_bytes(cx, compressed.as_slice()); let llmeta = C_bytes(cx, compressed.as_slice());
let llconst = C_struct(cx, [llmeta], false); let llconst = C_struct(cx, [llmeta], false);

View file

@ -750,9 +750,11 @@ impl<'a> Builder<'a> {
pub fn add_span_comment(&self, sp: Span, text: &str) { pub fn add_span_comment(&self, sp: Span, text: &str) {
if self.ccx.sess().asm_comments() { if self.ccx.sess().asm_comments() {
let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_str(sp)); let s = format!("{} ({})",
debug!("{}", s); text,
self.add_comment(s); self.ccx.sess().codemap().span_to_str(sp));
debug!("{}", s.as_slice());
self.add_comment(s.as_slice());
} }
} }
@ -761,7 +763,7 @@ impl<'a> Builder<'a> {
let sanitized = text.replace("$", ""); let sanitized = text.replace("$", "");
let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# ")); let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
self.count_insn("inlineasm"); self.count_insn("inlineasm");
let asm = comment_text.with_c_str(|c| { let asm = comment_text.as_slice().with_c_str(|c| {
unsafe { unsafe {
llvm::LLVMConstInlineAsm(Type::func([], &Type::void(self.ccx)).to_ref(), llvm::LLVMConstInlineAsm(Type::func([], &Type::void(self.ccx)).to_ref(),
c, noname(), False, False) c, noname(), False, False)

View file

@ -102,8 +102,9 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
_ => { _ => {
bcx.tcx().sess.span_bug( bcx.tcx().sess.span_bug(
expr.span, expr.span,
format!("type of callee is neither bare-fn nor closure: {}", format!("type of callee is neither bare-fn nor closure: \
bcx.ty_to_str(datum.ty))); {}",
bcx.ty_to_str(datum.ty)).as_slice());
} }
} }
} }
@ -151,7 +152,7 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
bcx.tcx().sess.span_bug( bcx.tcx().sess.span_bug(
ref_expr.span, ref_expr.span,
format!("cannot translate def {:?} \ format!("cannot translate def {:?} \
to a callable thing!", def)); to a callable thing!", def).as_slice());
} }
} }
} }

View file

@ -329,7 +329,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
self.ccx.sess().bug( self.ccx.sess().bug(
format!("no cleanup scope {} found", format!("no cleanup scope {} found",
self.ccx.tcx.map.node_to_str(cleanup_scope))); self.ccx.tcx.map.node_to_str(cleanup_scope)).as_slice());
} }
fn schedule_clean_in_custom_scope(&self, fn schedule_clean_in_custom_scope(&self,
@ -531,7 +531,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
LoopExit(id, _) => { LoopExit(id, _) => {
self.ccx.sess().bug(format!( self.ccx.sess().bug(format!(
"cannot exit from scope {:?}, \ "cannot exit from scope {:?}, \
not in scope", id)); not in scope", id).as_slice());
} }
} }
} }
@ -878,7 +878,8 @@ pub fn temporary_scope(tcx: &ty::ctxt,
r r
} }
None => { None => {
tcx.sess.bug(format!("no temporary scope available for expr {}", id)) tcx.sess.bug(format!("no temporary scope available for expr {}",
id).as_slice())
} }
} }
} }

View file

@ -205,7 +205,7 @@ pub fn store_environment<'a>(
if ccx.sess().asm_comments() { if ccx.sess().asm_comments() {
add_comment(bcx, format!("Copy {} into closure", add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx))); bv.to_str(ccx)).as_slice());
} }
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]); let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
@ -386,8 +386,9 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
ast::DefVariant(_, did, _) | ast::DefStruct(did) => did, ast::DefVariant(_, did, _) | ast::DefStruct(did) => did,
_ => { _ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a statically resolved fn, got {:?}", expected a statically resolved fn, got \
def)); {:?}",
def).as_slice());
} }
}; };
@ -405,7 +406,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
_ => { _ => {
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
expected a closure ty, got {}", expected a closure ty, got {}",
closure_ty.repr(tcx))); closure_ty.repr(tcx)).as_slice());
} }
}; };

View file

@ -108,7 +108,7 @@ pub fn gensym_name(name: &str) -> PathElem {
let num = token::gensym(name); let num = token::gensym(name);
// use one colon which will get translated to a period by the mangler, and // use one colon which will get translated to a period by the mangler, and
// we're guaranteed that `num` is globally unique for this crate. // we're guaranteed that `num` is globally unique for this crate.
PathName(token::gensym(format!("{}:{}", name, num))) PathName(token::gensym(format!("{}:{}", name, num).as_slice()))
} }
pub struct tydesc_info { pub struct tydesc_info {
@ -459,7 +459,7 @@ impl<'a> Block<'a> {
Some(&v) => v, Some(&v) => v,
None => { None => {
self.tcx().sess.bug(format!( self.tcx().sess.bug(format!(
"no def associated with node id {:?}", nid)); "no def associated with node id {:?}", nid).as_slice());
} }
} }
} }
@ -747,9 +747,10 @@ pub fn node_id_substs(bcx: &Block,
if !substs.tps.iter().all(|t| !ty::type_needs_infer(*t)) { if !substs.tps.iter().all(|t| !ty::type_needs_infer(*t)) {
bcx.sess().bug( bcx.sess().bug(
format!("type parameters for node {:?} include inference types: {}", format!("type parameters for node {:?} include inference types: \
{}",
node, node,
substs.repr(bcx.tcx()))); substs.repr(bcx.tcx())).as_slice());
} }
substs.substp(tcx, bcx.fcx.param_substs) substs.substp(tcx, bcx.fcx.param_substs)
@ -816,7 +817,7 @@ pub fn resolve_vtable_under_param_substs(tcx: &ty::ctxt,
_ => { _ => {
tcx.sess.bug(format!( tcx.sess.bug(format!(
"resolve_vtable_under_param_substs: asked to lookup \ "resolve_vtable_under_param_substs: asked to lookup \
but no vtables in the fn_ctxt!")) but no vtables in the fn_ctxt!").as_slice())
} }
} }
} }
@ -870,8 +871,8 @@ pub fn langcall(bcx: &Block,
Err(s) => { Err(s) => {
let msg = format!("{} {}", msg, s); let msg = format!("{} {}", msg, s);
match span { match span {
Some(span) => { bcx.tcx().sess.span_fatal(span, msg); } Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_slice()),
None => { bcx.tcx().sess.fatal(msg); } None => bcx.tcx().sess.fatal(msg.as_slice()),
} }
} }
} }

View file

@ -55,8 +55,9 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
C_integral(Type::uint_from_ty(cx, t), i as u64, false) C_integral(Type::uint_from_ty(cx, t), i as u64, false)
} }
_ => cx.sess().span_bug(lit.span, _ => cx.sess().span_bug(lit.span,
format!("integer literal has type {} (expected int or uint)", format!("integer literal has type {} (expected int \
ty_to_str(cx.tcx(), lit_int_ty))) or uint)",
ty_to_str(cx.tcx(), lit_int_ty)).as_slice())
} }
} }
ast::LitFloat(ref fs, t) => { ast::LitFloat(ref fs, t) => {
@ -150,14 +151,14 @@ fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool)
} }
_ => { _ => {
cx.sess().bug(format!("unexpected dereferenceable type {}", cx.sess().bug(format!("unexpected dereferenceable type {}",
ty_to_str(cx.tcx(), t))) ty_to_str(cx.tcx(), t)).as_slice())
} }
}; };
(dv, mt.ty) (dv, mt.ty)
} }
None => { None => {
cx.sess().bug(format!("can't dereference const of type {}", cx.sess().bug(format!("can't dereference const of type {}",
ty_to_str(cx.tcx(), t))) ty_to_str(cx.tcx(), t)).as_slice())
} }
} }
} }
@ -206,7 +207,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
cx.sess() cx.sess()
.span_bug(e.span, .span_bug(e.span,
format!("unexpected static function: {:?}", format!("unexpected static function: {:?}",
store)) store).as_slice())
} }
ty::AutoObject(..) => { ty::AutoObject(..) => {
cx.sess() cx.sess()
@ -256,11 +257,11 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
} }
} }
_ => { _ => {
cx.sess().span_bug(e.span, cx.sess()
format!("unimplemented \ .span_bug(e.span,
const autoref \ format!("unimplemented const \
{:?}", autoref {:?}",
autoref)) autoref).as_slice())
} }
} }
} }
@ -281,7 +282,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
} }
cx.sess().bug(format!("const {} of type {} has size {} instead of {}", cx.sess().bug(format!("const {} of type {} has size {} instead of {}",
e.repr(cx.tcx()), ty_to_str(cx.tcx(), ety), e.repr(cx.tcx()), ty_to_str(cx.tcx(), ety),
csize, tsize)); csize, tsize).as_slice());
} }
(llconst, inlineable) (llconst, inlineable)
} }

View file

@ -165,7 +165,7 @@ pub fn trans_if<'a>(bcx: &'a Block<'a>,
} }
let name = format!("then-block-{}-", thn.id); let name = format!("then-block-{}-", thn.id);
let then_bcx_in = bcx.fcx.new_id_block(name, thn.id); let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id);
let then_bcx_out = trans_block(then_bcx_in, thn, dest); let then_bcx_out = trans_block(then_bcx_in, thn, dest);
debuginfo::clear_source_location(bcx.fcx); debuginfo::clear_source_location(bcx.fcx);
@ -287,7 +287,8 @@ pub fn trans_break_cont<'a>(bcx: &'a Block<'a>,
match bcx.tcx().def_map.borrow().find(&expr_id) { match bcx.tcx().def_map.borrow().find(&expr_id) {
Some(&ast::DefLabel(loop_id)) => loop_id, Some(&ast::DefLabel(loop_id)) => loop_id,
ref r => { ref r => {
bcx.tcx().sess.bug(format!("{:?} in def-map for label", r)) bcx.tcx().sess.bug(format!("{:?} in def-map for label",
r).as_slice())
} }
} }
} }

View file

@ -317,16 +317,21 @@ pub fn create_global_var_metadata(cx: &CrateContext,
ast_map::NodeItem(item) => { ast_map::NodeItem(item) => {
match item.node { match item.node {
ast::ItemStatic(..) => (item.ident, item.span), ast::ItemStatic(..) => (item.ident, item.span),
_ => cx.sess().span_bug(item.span, _ => {
format!("debuginfo::create_global_var_metadata() - cx.sess()
Captured var-id refers to unexpected ast_item .span_bug(item.span,
variant: {:?}", format!("debuginfo::\
var_item)) create_global_var_metadata() -
Captured var-id refers to \
unexpected ast_item variant: {:?}",
var_item).as_slice())
}
} }
}, },
_ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() - Captured var-id \ _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \
refers to unexpected ast_map variant: {:?}", - Captured var-id refers to unexpected \
var_item)) ast_map variant: {:?}",
var_item).as_slice())
}; };
let filename = span_start(cx, span).file.name.clone(); let filename = span_start(cx, span).file.name.clone();
@ -340,7 +345,8 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_ident(ident).get().to_str(); let var_name = token::get_ident(ident).get().to_str();
let linkage_name = namespace_node.mangled_name_of_contained_item(var_name); let linkage_name =
namespace_node.mangled_name_of_contained_item(var_name.as_slice());
let var_scope = namespace_node.scope; let var_scope = namespace_node.scope;
var_name.as_slice().with_c_str(|var_name| { var_name.as_slice().with_c_str(|var_name| {
@ -380,7 +386,7 @@ pub fn create_local_var_metadata(bcx: &Block, local: &ast::Local) {
None => { None => {
bcx.sess().span_bug(span, bcx.sess().span_bug(span,
format!("no entry in lllocals table for {:?}", format!("no entry in lllocals table for {:?}",
node_id)); node_id).as_slice());
} }
}; };
@ -430,13 +436,17 @@ pub fn create_captured_var_metadata(bcx: &Block,
"debuginfo::create_captured_var_metadata() - \ "debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \ Captured var-id refers to unexpected \
ast_map variant: {:?}", ast_map variant: {:?}",
ast_item)); ast_item).as_slice());
} }
} }
} }
_ => { _ => {
cx.sess().span_bug(span, format!("debuginfo::create_captured_var_metadata() - \ cx.sess()
Captured var-id refers to unexpected ast_map variant: {:?}", ast_item)); .span_bug(span,
format!("debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \
ast_map variant: {:?}",
ast_item).as_slice());
} }
}; };
@ -519,7 +529,7 @@ pub fn create_argument_metadata(bcx: &Block, arg: &ast::Arg) {
None => { None => {
bcx.sess().span_bug(span, bcx.sess().span_bug(span,
format!("no entry in llargs table for {:?}", format!("no entry in llargs table for {:?}",
node_id)); node_id).as_slice());
} }
}; };
@ -653,7 +663,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
ast::ExprFnBlock(fn_decl, top_level_block) | ast::ExprFnBlock(fn_decl, top_level_block) |
ast::ExprProc(fn_decl, top_level_block) => { ast::ExprProc(fn_decl, top_level_block) => {
let name = format!("fn{}", token::gensym("fn")); let name = format!("fn{}", token::gensym("fn"));
let name = token::str_to_ident(name); let name = token::str_to_ident(name.as_slice());
(name, fn_decl, (name, fn_decl,
// This is not quite right. It should actually inherit the generics of the // This is not quite right. It should actually inherit the generics of the
// enclosing function. // enclosing function.
@ -681,7 +691,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
cx.sess() cx.sess()
.bug(format!("create_function_debug_context: \ .bug(format!("create_function_debug_context: \
unexpected sort of node: {:?}", unexpected sort of node: {:?}",
fnitem)) fnitem).as_slice())
} }
} }
} }
@ -691,7 +701,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
return FunctionDebugContext { repr: FunctionWithoutDebugInfo }; return FunctionDebugContext { repr: FunctionWithoutDebugInfo };
} }
_ => cx.sess().bug(format!("create_function_debug_context: \ _ => cx.sess().bug(format!("create_function_debug_context: \
unexpected sort of node: {:?}", fnitem)) unexpected sort of node: {:?}",
fnitem).as_slice())
}; };
// This can be the case for functions inlined from another crate // This can be the case for functions inlined from another crate
@ -1124,7 +1135,8 @@ fn scope_metadata(fcx: &FunctionContext,
let node = fcx.ccx.tcx.map.get(node_id); let node = fcx.ccx.tcx.map.get(node_id);
fcx.ccx.sess().span_bug(span, fcx.ccx.sess().span_bug(span,
format!("debuginfo: Could not find scope info for node {:?}", node)); format!("debuginfo: Could not find scope info for node {:?}",
node).as_slice());
} }
} }
} }
@ -1499,14 +1511,17 @@ fn describe_enum_variant(cx: &CrateContext,
// Get the argument names from the enum variant info // Get the argument names from the enum variant info
let mut arg_names: Vec<_> = match variant_info.arg_names { let mut arg_names: Vec<_> = match variant_info.arg_names {
Some(ref names) => { Some(ref names) => {
names.iter().map(|ident| token::get_ident(*ident).get().to_str()).collect() names.iter()
.map(|ident| {
token::get_ident(*ident).get().to_str().into_strbuf()
}).collect()
} }
None => variant_info.args.iter().map(|_| "".to_owned()).collect() None => variant_info.args.iter().map(|_| "".to_strbuf()).collect()
}; };
// If this is not a univariant enum, there is also the (unnamed) discriminant field // If this is not a univariant enum, there is also the (unnamed) discriminant field
if discriminant_type_metadata.is_some() { if discriminant_type_metadata.is_some() {
arg_names.insert(0, "".to_owned()); arg_names.insert(0, "".to_strbuf());
} }
// Build an array of (field name, field type) pairs to be captured in the factory closure. // Build an array of (field name, field type) pairs to be captured in the factory closure.
@ -1861,7 +1876,7 @@ fn boxed_type_metadata(cx: &CrateContext,
-> DICompositeType { -> DICompositeType {
let box_type_name = match content_type_name { let box_type_name = match content_type_name {
Some(content_type_name) => format!("Boxed<{}>", content_type_name), Some(content_type_name) => format!("Boxed<{}>", content_type_name),
None => "BoxedType".to_owned() None => "BoxedType".to_strbuf()
}; };
let box_llvm_type = Type::at_box(cx, content_llvm_type); let box_llvm_type = Type::at_box(cx, content_llvm_type);
@ -1913,7 +1928,7 @@ fn boxed_type_metadata(cx: &CrateContext,
return composite_type_metadata( return composite_type_metadata(
cx, cx,
box_llvm_type, box_llvm_type,
box_type_name, box_type_name.as_slice(),
member_descriptions, member_descriptions,
file_metadata, file_metadata,
file_metadata, file_metadata,
@ -1971,7 +1986,9 @@ fn vec_metadata(cx: &CrateContext,
let (element_size, element_align) = size_and_align_of(cx, element_llvm_type); let (element_size, element_align) = size_and_align_of(cx, element_llvm_type);
let vec_llvm_type = Type::vec(cx, &element_llvm_type); let vec_llvm_type = Type::vec(cx, &element_llvm_type);
let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx(), element_type)); let vec_type_name = format!("[{}]",
ppaux::ty_to_str(cx.tcx(), element_type));
let vec_type_name = vec_type_name.as_slice();
let member_llvm_types = vec_llvm_type.field_types(); let member_llvm_types = vec_llvm_type.field_types();
@ -2254,7 +2271,11 @@ fn type_metadata(cx: &CrateContext,
elements.as_slice(), elements.as_slice(),
usage_site_span).finalize(cx) usage_site_span).finalize(cx)
} }
_ => cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty)) _ => {
cx.sess().bug(format!("debuginfo: unexpected type in \
type_metadata: {:?}",
sty).as_slice())
}
}; };
debug_context(cx).created_types.borrow_mut().insert(cache_id, type_metadata); debug_context(cx).created_types.borrow_mut().insert(cache_id, type_metadata);
@ -2852,13 +2873,13 @@ impl NamespaceTreeNode {
None => {} None => {}
} }
let string = token::get_name(node.name); let string = token::get_name(node.name);
output.push_str(format!("{}", string.get().len())); output.push_str(format!("{}", string.get().len()).as_slice());
output.push_str(string.get()); output.push_str(string.get());
} }
let mut name = StrBuf::from_str("_ZN"); let mut name = StrBuf::from_str("_ZN");
fill_nested(self, &mut name); fill_nested(self, &mut name);
name.push_str(format!("{}", item_name.len())); name.push_str(format!("{}", item_name.len()).as_slice());
name.push_str(item_name); name.push_str(item_name);
name.push_char('E'); name.push_char('E');
name name
@ -2941,7 +2962,8 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
Some(node) => node, Some(node) => node,
None => { None => {
cx.sess().bug(format!("debuginfo::namespace_for_item(): \ cx.sess().bug(format!("debuginfo::namespace_for_item(): \
path too short for {:?}", def_id)); path too short for {:?}",
def_id).as_slice());
} }
} }
}) })

View file

@ -422,7 +422,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
expr.span, expr.span,
format!("trans_rvalue_datum_unadjusted reached \ format!("trans_rvalue_datum_unadjusted reached \
fall-through case: {:?}", fall-through case: {:?}",
expr.node)); expr.node).as_slice());
} }
} }
} }
@ -636,7 +636,7 @@ fn trans_rvalue_stmt_unadjusted<'a>(bcx: &'a Block<'a>,
expr.span, expr.span,
format!("trans_rvalue_stmt_unadjusted reached \ format!("trans_rvalue_stmt_unadjusted reached \
fall-through case: {:?}", fall-through case: {:?}",
expr.node)); expr.node).as_slice());
} }
} }
} }
@ -765,8 +765,9 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
_ => { _ => {
bcx.tcx().sess.span_bug( bcx.tcx().sess.span_bug(
expr.span, expr.span,
format!("trans_rvalue_dps_unadjusted reached fall-through case: {:?}", format!("trans_rvalue_dps_unadjusted reached fall-through \
expr.node)); case: {:?}",
expr.node).as_slice());
} }
} }
} }
@ -815,7 +816,7 @@ fn trans_def_dps_unadjusted<'a>(
_ => { _ => {
bcx.tcx().sess.span_bug(ref_expr.span, format!( bcx.tcx().sess.span_bug(ref_expr.span, format!(
"Non-DPS def {:?} referened by {}", "Non-DPS def {:?} referened by {}",
def, bcx.node_id_to_str(ref_expr.id))); def, bcx.node_id_to_str(ref_expr.id)).as_slice());
} }
} }
} }
@ -839,7 +840,7 @@ fn trans_def_fn_unadjusted<'a>(bcx: &'a Block<'a>,
bcx.tcx().sess.span_bug(ref_expr.span, format!( bcx.tcx().sess.span_bug(ref_expr.span, format!(
"trans_def_fn_unadjusted invoked on: {:?} for {}", "trans_def_fn_unadjusted invoked on: {:?} for {}",
def, def,
ref_expr.repr(bcx.tcx()))); ref_expr.repr(bcx.tcx())).as_slice());
} }
}; };
@ -865,7 +866,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
Some(&val) => Datum(val, local_ty, Lvalue), Some(&val) => Datum(val, local_ty, Lvalue),
None => { None => {
bcx.sess().bug(format!( bcx.sess().bug(format!(
"trans_local_var: no llval for upvar {:?} found", nid)); "trans_local_var: no llval for upvar {:?} found",
nid).as_slice());
} }
} }
} }
@ -877,7 +879,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
} }
_ => { _ => {
bcx.sess().unimpl(format!( bcx.sess().unimpl(format!(
"unsupported def type in trans_local_var: {:?}", def)); "unsupported def type in trans_local_var: {:?}",
def).as_slice());
} }
}; };
@ -889,7 +892,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
Some(&v) => v, Some(&v) => v,
None => { None => {
bcx.sess().bug(format!( bcx.sess().bug(format!(
"trans_local_var: no datum for local/arg {:?} found", nid)); "trans_local_var: no datum for local/arg {:?} found",
nid).as_slice());
} }
}; };
debug!("take_local(nid={:?}, v={}, ty={})", debug!("take_local(nid={:?}, v={}, ty={})",
@ -922,7 +926,7 @@ pub fn with_field_tys<R>(tcx: &ty::ctxt,
tcx.sess.bug(format!( tcx.sess.bug(format!(
"cannot get field types from the enum type {} \ "cannot get field types from the enum type {} \
without a node ID", without a node ID",
ty.repr(tcx))); ty.repr(tcx)).as_slice());
} }
Some(node_id) => { Some(node_id) => {
let def = tcx.def_map.borrow().get_copy(&node_id); let def = tcx.def_map.borrow().get_copy(&node_id);
@ -947,7 +951,7 @@ pub fn with_field_tys<R>(tcx: &ty::ctxt,
_ => { _ => {
tcx.sess.bug(format!( tcx.sess.bug(format!(
"cannot get field types from the type {}", "cannot get field types from the type {}",
ty.repr(tcx))); ty.repr(tcx)).as_slice());
} }
} }
} }
@ -1586,16 +1590,22 @@ fn trans_imm_cast<'a>(bcx: &'a Block<'a>,
val_ty(lldiscrim_a), val_ty(lldiscrim_a),
lldiscrim_a, true), lldiscrim_a, true),
cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out), cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
_ => ccx.sess().bug(format!("translating unsupported cast: \ _ => {
ccx.sess().bug(format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})", {} ({:?}) -> {} ({:?})",
t_in.repr(bcx.tcx()), k_in, t_in.repr(bcx.tcx()),
t_out.repr(bcx.tcx()), k_out)) k_in,
t_out.repr(bcx.tcx()),
k_out).as_slice())
}
} }
} }
_ => ccx.sess().bug(format!("translating unsupported cast: \ _ => ccx.sess().bug(format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})", {} ({:?}) -> {} ({:?})",
t_in.repr(bcx.tcx()), k_in, t_in.repr(bcx.tcx()),
t_out.repr(bcx.tcx()), k_out)) k_in,
t_out.repr(bcx.tcx()),
k_out).as_slice())
}; };
return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
} }
@ -1757,7 +1767,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
bcx.tcx().sess.span_bug( bcx.tcx().sess.span_bug(
expr.span, expr.span,
format!("deref invoked on expr of illegal type {}", format!("deref invoked on expr of illegal type {}",
datum.ty.repr(bcx.tcx()))); datum.ty.repr(bcx.tcx())).as_slice());
} }
}; };

View file

@ -81,13 +81,12 @@ pub fn llvm_calling_convention(ccx: &CrateContext,
match abi { match abi {
RustIntrinsic => { RustIntrinsic => {
// Intrinsics are emitted by monomorphic fn // Intrinsics are emitted by monomorphic fn
ccx.sess().bug(format!("asked to register intrinsic fn")); ccx.sess().bug("asked to register intrinsic fn");
} }
Rust => { Rust => {
// FIXME(#3678) Implement linking to foreign fns with Rust ABI // FIXME(#3678) Implement linking to foreign fns with Rust ABI
ccx.sess().unimpl( ccx.sess().unimpl("foreign functions with Rust ABI");
format!("foreign functions with Rust ABI"));
} }
// It's the ABI's job to select this, not us. // It's the ABI's job to select this, not us.
@ -203,13 +202,13 @@ pub fn register_foreign_item_fn(ccx: &CrateContext, abi: Abi, fty: ty::t,
ccx.sess().span_fatal(s, ccx.sess().span_fatal(s,
format!("ABI `{}` has no suitable calling convention \ format!("ABI `{}` has no suitable calling convention \
for target architecture", for target architecture",
abi.user_string(ccx.tcx()))) abi.user_string(ccx.tcx())).as_slice())
} }
None => { None => {
ccx.sess().fatal( ccx.sess().fatal(
format!("ABI `{}` has no suitable calling convention \ format!("ABI `{}` has no suitable calling convention \
for target architecture", for target architecture",
abi.user_string(ccx.tcx()))) abi.user_string(ccx.tcx())).as_slice())
} }
} }
} }
@ -371,7 +370,7 @@ pub fn trans_native_call<'a>(
ccx.sess().fatal( ccx.sess().fatal(
format!("ABI string `{}` has no suitable ABI \ format!("ABI string `{}` has no suitable ABI \
for target architecture", for target architecture",
fn_abi.user_string(ccx.tcx()))); fn_abi.user_string(ccx.tcx())).as_slice());
} }
}; };
@ -557,7 +556,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \
expected a bare fn ty", expected a bare fn ty",
ccx.tcx.map.path_to_str(id), ccx.tcx.map.path_to_str(id),
t.repr(tcx))); t.repr(tcx)).as_slice());
} }
}; };

View file

@ -396,7 +396,7 @@ pub fn trans_intrinsic(ccx: &CrateContext,
intype = ty_to_str(ccx.tcx(), in_type), intype = ty_to_str(ccx.tcx(), in_type),
insize = in_type_size as uint, insize = in_type_size as uint,
outtype = ty_to_str(ccx.tcx(), out_type), outtype = ty_to_str(ccx.tcx(), out_type),
outsize = out_type_size as uint)); outsize = out_type_size as uint).as_slice());
} }
if !return_type_is_void(ccx, out_type) { if !return_type_is_void(ccx, out_type) {

View file

@ -205,7 +205,8 @@ pub fn monomorphic_fn(ccx: &CrateContext,
hash_id.hash(&mut state); hash_id.hash(&mut state);
mono_ty.hash(&mut state); mono_ty.hash(&mut state);
exported_name(path, format!("h{}", state.result()), exported_name(path,
format!("h{}", state.result()).as_slice(),
ccx.link_meta.crateid.version_or_default()) ccx.link_meta.crateid.version_or_default())
}); });
debug!("monomorphize_fn mangled to {}", s); debug!("monomorphize_fn mangled to {}", s);
@ -287,7 +288,7 @@ pub fn monomorphic_fn(ccx: &CrateContext,
} }
_ => { _ => {
ccx.sess().bug(format!("can't monomorphize a {:?}", ccx.sess().bug(format!("can't monomorphize a {:?}",
map_node)) map_node).as_slice())
} }
} }
} }
@ -311,7 +312,8 @@ pub fn monomorphic_fn(ccx: &CrateContext,
ast_map::NodeBlock(..) | ast_map::NodeBlock(..) |
ast_map::NodePat(..) | ast_map::NodePat(..) |
ast_map::NodeLocal(..) => { ast_map::NodeLocal(..) => {
ccx.sess().bug(format!("can't monomorphize a {:?}", map_node)) ccx.sess().bug(format!("can't monomorphize a {:?}",
map_node).as_slice())
} }
}; };

View file

@ -155,7 +155,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type {
ty::ty_self(_) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_self(_) | ty::ty_infer(..) | ty::ty_param(..) |
ty::ty_err(..) | ty::ty_vec(_, None) | ty::ty_str => { ty::ty_err(..) | ty::ty_vec(_, None) | ty::ty_str => {
cx.sess().bug(format!("fictitious type {:?} in sizing_type_of()", cx.sess().bug(format!("fictitious type {:?} in sizing_type_of()",
ty::get(t).sty)) ty::get(t).sty).as_slice())
} }
}; };

View file

@ -2620,7 +2620,7 @@ pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> Rc<ty::TraitRef> {
Some(t) => t.clone(), Some(t) => t.clone(),
None => cx.sess.bug( None => cx.sess.bug(
format!("node_id_to_trait_ref: no trait ref for node `{}`", format!("node_id_to_trait_ref: no trait ref for node `{}`",
cx.map.node_to_str(id))) cx.map.node_to_str(id)).as_slice())
} }
} }
@ -2633,7 +2633,7 @@ pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t {
Some(t) => t, Some(t) => t,
None => cx.sess.bug( None => cx.sess.bug(
format!("node_id_to_type: no type for node `{}`", format!("node_id_to_type: no type for node `{}`",
cx.map.node_to_str(id))) cx.map.node_to_str(id)).as_slice())
} }
} }
@ -2717,7 +2717,8 @@ pub fn ty_region(tcx: &ctxt,
ref s => { ref s => {
tcx.sess.span_bug( tcx.sess.span_bug(
span, span,
format!("ty_region() invoked on in appropriate ty: {:?}", s)); format!("ty_region() invoked on in appropriate ty: {:?}",
s).as_slice());
} }
} }
} }
@ -2774,11 +2775,12 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
} }
Some(f) => { Some(f) => {
cx.sess.bug(format!("Node id {} is not an expr: {:?}", cx.sess.bug(format!("Node id {} is not an expr: {:?}",
id, f)); id,
f).as_slice());
} }
None => { None => {
cx.sess.bug(format!("Node id {} is not present \ cx.sess.bug(format!("Node id {} is not present \
in the node map", id)); in the node map", id).as_slice());
} }
} }
} }
@ -2793,14 +2795,15 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
_ => { _ => {
cx.sess.bug( cx.sess.bug(
format!("Variable id {} maps to {:?}, not local", format!("Variable id {} maps to {:?}, not local",
id, pat)); id,
pat).as_slice());
} }
} }
} }
r => { r => {
cx.sess.bug( cx.sess.bug(format!("Variable id {} maps to {:?}, not local",
format!("Variable id {} maps to {:?}, not local", id,
id, r)); r).as_slice());
} }
} }
} }
@ -2832,7 +2835,7 @@ pub fn adjust_ty(cx: &ctxt,
cx.sess.bug( cx.sess.bug(
format!("add_env adjustment on non-bare-fn: \ format!("add_env adjustment on non-bare-fn: \
{:?}", {:?}",
b)); b).as_slice());
} }
} }
} }
@ -2857,7 +2860,8 @@ pub fn adjust_ty(cx: &ctxt,
format!("the {}th autoderef failed: \ format!("the {}th autoderef failed: \
{}", {}",
i, i,
ty_to_str(cx, adjusted_ty))); ty_to_str(cx, adjusted_ty))
.as_slice());
} }
} }
} }
@ -2923,7 +2927,8 @@ pub fn adjust_ty(cx: &ctxt,
_ => { _ => {
cx.sess.span_bug( cx.sess.span_bug(
span, span,
format!("borrow-vec associated with bad sty: {:?}", get(ty).sty)); format!("borrow-vec associated with bad sty: {:?}",
get(ty).sty).as_slice());
} }
}, },
ty_vec(mt, Some(_)) => ty::mk_slice(cx, r, ty::mt {ty: mt.ty, mutbl: m}), ty_vec(mt, Some(_)) => ty::mk_slice(cx, r, ty::mt {ty: mt.ty, mutbl: m}),
@ -2931,7 +2936,8 @@ pub fn adjust_ty(cx: &ctxt,
ref s => { ref s => {
cx.sess.span_bug( cx.sess.span_bug(
span, span,
format!("borrow-vec associated with bad sty: {:?}", s)); format!("borrow-vec associated with bad sty: {:?}",
s).as_slice());
} }
} }
} }
@ -2947,7 +2953,7 @@ pub fn adjust_ty(cx: &ctxt,
cx.sess.span_bug( cx.sess.span_bug(
span, span,
format!("borrow-trait-obj associated with bad sty: {:?}", format!("borrow-trait-obj associated with bad sty: {:?}",
s)); s).as_slice());
} }
} }
} }
@ -2996,7 +3002,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> ast::Def {
Some(&def) => def, Some(&def) => def,
None => { None => {
tcx.sess.span_bug(expr.span, format!( tcx.sess.span_bug(expr.span, format!(
"no def-map entry for expr {:?}", expr.id)); "no def-map entry for expr {:?}", expr.id).as_slice());
} }
} }
} }
@ -3070,9 +3076,11 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
ast::DefLocal(..) => LvalueExpr, ast::DefLocal(..) => LvalueExpr,
def => { def => {
tcx.sess.span_bug(expr.span, format!( tcx.sess.span_bug(
"uncategorized def for expr {:?}: {:?}", expr.span,
expr.id, def)); format!("uncategorized def for expr {:?}: {:?}",
expr.id,
def).as_slice());
} }
} }
} }
@ -3193,7 +3201,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
token::get_name(name), token::get_name(name),
fields.iter() fields.iter()
.map(|f| token::get_ident(f.ident).get().to_strbuf()) .map(|f| token::get_ident(f.ident).get().to_strbuf())
.collect::<Vec<StrBuf>>())); .collect::<Vec<StrBuf>>()).as_slice());
} }
pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> { pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> {
@ -3444,10 +3452,18 @@ pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec<Rc<Method>> {
let (_, p) = ast_util::split_trait_methods(ms.as_slice()); let (_, p) = ast_util::split_trait_methods(ms.as_slice());
p.iter().map(|m| method(cx, ast_util::local_def(m.id))).collect() p.iter().map(|m| method(cx, ast_util::local_def(m.id))).collect()
} }
_ => cx.sess.bug(format!("provided_trait_methods: `{}` is not a trait", id)) _ => {
cx.sess.bug(format!("provided_trait_methods: `{}` is \
not a trait",
id).as_slice())
} }
} }
_ => cx.sess.bug(format!("provided_trait_methods: `{}` is not a trait", id)) }
_ => {
cx.sess.bug(format!("provided_trait_methods: `{}` is not a \
trait",
id).as_slice())
}
} }
} else { } else {
csearch::get_provided_trait_methods(cx, id) csearch::get_provided_trait_methods(cx, id)
@ -3800,7 +3816,7 @@ pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc<Vec<Rc<VariantInfo>>> {
cx.sess cx.sess
.span_err(e.span, .span_err(e.span,
format!("expected constant: {}", format!("expected constant: {}",
*err)); *err).as_slice());
} }
}, },
None => {} None => {}
@ -3963,7 +3979,7 @@ fn each_super_struct(cx: &ctxt, mut did: ast::DefId, f: |ast::DefId|) {
None => { None => {
cx.sess.bug( cx.sess.bug(
format!("ID not mapped to super-struct: {}", format!("ID not mapped to super-struct: {}",
cx.map.node_to_str(did.node))); cx.map.node_to_str(did.node)).as_slice());
} }
} }
} }
@ -3985,7 +4001,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
_ => { _ => {
cx.sess.bug( cx.sess.bug(
format!("ID not mapped to struct fields: {}", format!("ID not mapped to struct fields: {}",
cx.map.node_to_str(did.node))); cx.map.node_to_str(did.node)).as_slice());
} }
} }
}); });

View file

@ -180,7 +180,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
format!("wrong number of lifetime parameters: \ format!("wrong number of lifetime parameters: \
expected {} but found {}", expected {} but found {}",
expected_num_region_params, expected_num_region_params,
supplied_num_region_params)); supplied_num_region_params).as_slice());
} }
match anon_regions { match anon_regions {
@ -204,7 +204,9 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
}; };
this.tcx().sess.span_fatal(path.span, this.tcx().sess.span_fatal(path.span,
format!("wrong number of type arguments: {} {} but found {}", format!("wrong number of type arguments: {} {} but found {}",
expected, required_ty_param_count, supplied_ty_param_count)); expected,
required_ty_param_count,
supplied_ty_param_count).as_slice());
} else if supplied_ty_param_count > formal_ty_param_count { } else if supplied_ty_param_count > formal_ty_param_count {
let expected = if required_ty_param_count < formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count {
"expected at most" "expected at most"
@ -213,7 +215,9 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
}; };
this.tcx().sess.span_fatal(path.span, this.tcx().sess.span_fatal(path.span,
format!("wrong number of type arguments: {} {} but found {}", format!("wrong number of type arguments: {} {} but found {}",
expected, formal_ty_param_count, supplied_ty_param_count)); expected,
formal_ty_param_count,
supplied_ty_param_count).as_slice());
} }
if supplied_ty_param_count > required_ty_param_count if supplied_ty_param_count > required_ty_param_count
@ -317,8 +321,11 @@ pub fn ast_ty_to_prim_ty(tcx: &ty::ctxt, ast_ty: &ast::Ty) -> Option<ty::t> {
match ast_ty.node { match ast_ty.node {
ast::TyPath(ref path, _, id) => { ast::TyPath(ref path, _, id) => {
let a_def = match tcx.def_map.borrow().find(&id) { let a_def = match tcx.def_map.borrow().find(&id) {
None => tcx.sess.span_bug( None => {
ast_ty.span, format!("unbound path {}", path_to_str(path))), tcx.sess.span_bug(ast_ty.span,
format!("unbound path {}",
path_to_str(path)).as_slice())
}
Some(&d) => d Some(&d) => d
}; };
match a_def { match a_def {
@ -382,8 +389,13 @@ pub fn ast_ty_to_builtin_ty<AC:AstConv,
match ast_ty.node { match ast_ty.node {
ast::TyPath(ref path, _, id) => { ast::TyPath(ref path, _, id) => {
let a_def = match this.tcx().def_map.borrow().find(&id) { let a_def = match this.tcx().def_map.borrow().find(&id) {
None => this.tcx().sess.span_bug( None => {
ast_ty.span, format!("unbound path {}", path_to_str(path))), this.tcx()
.sess
.span_bug(ast_ty.span,
format!("unbound path {}",
path_to_str(path)).as_slice())
}
Some(&d) => d Some(&d) => d
}; };
@ -493,8 +505,11 @@ fn mk_pointer<AC:AstConv,
RPtr(r) => { RPtr(r) => {
return ty::mk_str_slice(tcx, r, ast::MutImmutable); return ty::mk_str_slice(tcx, r, ast::MutImmutable);
} }
_ => tcx.sess.span_err(path.span, _ => {
format!("managed strings are not supported")), tcx.sess
.span_err(path.span,
"managed strings are not supported")
}
} }
} }
Some(&ast::DefTrait(trait_def_id)) => { Some(&ast::DefTrait(trait_def_id)) => {
@ -635,8 +650,12 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
} }
ast::TyPath(ref path, ref bounds, id) => { ast::TyPath(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.borrow().find(&id) { let a_def = match tcx.def_map.borrow().find(&id) {
None => tcx.sess.span_bug( None => {
ast_ty.span, format!("unbound path {}", path_to_str(path))), tcx.sess
.span_bug(ast_ty.span,
format!("unbound path {}",
path_to_str(path)).as_slice())
}
Some(&d) => d Some(&d) => d
}; };
// Kind bounds on path types are only supported for traits. // Kind bounds on path types are only supported for traits.
@ -653,8 +672,10 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
let path_str = path_to_str(path); let path_str = path_to_str(path);
tcx.sess.span_err( tcx.sess.span_err(
ast_ty.span, ast_ty.span,
format!("reference to trait `{name}` where a type is expected; \ format!("reference to trait `{name}` where a \
try `Box<{name}>` or `&{name}`", name=path_str)); type is expected; try `Box<{name}>` or \
`&{name}`",
name=path_str).as_slice());
ty::mk_err() ty::mk_err()
} }
ast::DefTy(did) | ast::DefStruct(did) => { ast::DefTy(did) | ast::DefStruct(did) => {
@ -675,14 +696,16 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
ast::DefMod(id) => { ast::DefMod(id) => {
tcx.sess.span_fatal(ast_ty.span, tcx.sess.span_fatal(ast_ty.span,
format!("found module name used as a type: {}", format!("found module name used as a type: {}",
tcx.map.node_to_str(id.node))); tcx.map.node_to_str(id.node)).as_slice());
} }
ast::DefPrimTy(_) => { ast::DefPrimTy(_) => {
fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
} }
_ => { _ => {
tcx.sess.span_fatal(ast_ty.span, tcx.sess.span_fatal(ast_ty.span,
format!("found value name used as a type: {:?}", a_def)); format!("found value name used \
as a type: {:?}",
a_def).as_slice());
} }
} }
} }
@ -705,7 +728,9 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
Err(ref r) => { Err(ref r) => {
tcx.sess.span_fatal( tcx.sess.span_fatal(
ast_ty.span, ast_ty.span,
format!("expected constant expr for vector length: {}", *r)); format!("expected constant expr for vector \
length: {}",
*r).as_slice());
} }
} }
} }
@ -897,8 +922,8 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
} }
tcx.sess.span_fatal( tcx.sess.span_fatal(
b.path.span, b.path.span,
format!("only the builtin traits can be used \ "only the builtin traits can be used as closure \
as closure or object bounds")); or object bounds");
} }
ast::StaticRegionTyParamBound => { ast::StaticRegionTyParamBound => {
builtin_bounds.add(ty::BoundStatic); builtin_bounds.add(ty::BoundStatic);
@ -907,8 +932,8 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
if !tcx.sess.features.issue_5723_bootstrap.get() { if !tcx.sess.features.issue_5723_bootstrap.get() {
tcx.sess.span_err( tcx.sess.span_err(
span, span,
format!("only the 'static lifetime is \ "only the 'static lifetime is accepted \
accepted here.")); here.");
} }
} }
} }

View file

@ -263,7 +263,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
npat = subpats_len, npat = subpats_len,
kind = kind_name, kind = kind_name,
narg = arg_len); narg = arg_len);
tcx.sess.span_err(pat.span, s); tcx.sess.span_err(pat.span, s.as_slice());
error_happened = true; error_happened = true;
} }
@ -280,7 +280,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
{npat, plural, =1{# field} other{# fields}}, \ {npat, plural, =1{# field} other{# fields}}, \
but the corresponding {kind} has no fields", but the corresponding {kind} has no fields",
npat = subpats_len, npat = subpats_len,
kind = kind_name)); kind = kind_name).as_slice());
error_happened = true; error_happened = true;
} }
@ -324,7 +324,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
Some(&(_, true)) => { Some(&(_, true)) => {
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("field `{}` bound twice in pattern", format!("field `{}` bound twice in pattern",
token::get_ident(field.ident))); token::get_ident(field.ident)).as_slice());
} }
Some(&(index, ref mut used)) => { Some(&(index, ref mut used)) => {
*used = true; *used = true;
@ -344,7 +344,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("struct `{}` does not have a field named `{}`", format!("struct `{}` does not have a field named `{}`",
name, name,
token::get_ident(field.ident))); token::get_ident(field.ident)).as_slice());
} }
} }
} }
@ -356,9 +356,10 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
continue; continue;
} }
tcx.sess.span_err(span, tcx.sess
.span_err(span,
format!("pattern does not mention field `{}`", format!("pattern does not mention field `{}`",
token::get_name(field.name))); token::get_name(field.name)).as_slice());
} }
} }
} }
@ -381,10 +382,12 @@ pub fn check_struct_pat(pcx: &pat_ctxt, pat_id: ast::NodeId, span: Span,
} }
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => { Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
let name = pprust::path_to_str(path); let name = pprust::path_to_str(path);
tcx.sess.span_err(span, tcx.sess
format!("mismatched types: expected `{}` but found `{}`", .span_err(span,
format!("mismatched types: expected `{}` but found \
`{}`",
fcx.infcx().ty_to_str(expected), fcx.infcx().ty_to_str(expected),
name)); name).as_slice());
} }
_ => { _ => {
tcx.sess.span_bug(span, "resolve didn't write in struct ID"); tcx.sess.span_bug(span, "resolve didn't write in struct ID");
@ -423,7 +426,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
format!("mismatched types: expected `{}` but \ format!("mismatched types: expected `{}` but \
found `{}`", found `{}`",
fcx.infcx().ty_to_str(expected), fcx.infcx().ty_to_str(expected),
name)); name).as_slice());
} }
_ => { _ => {
tcx.sess.span_bug(span, "resolve didn't write in variant"); tcx.sess.span_bug(span, "resolve didn't write in variant");

View file

@ -284,7 +284,7 @@ fn construct_transformed_self_ty_for_object(
_ => { _ => {
tcx.sess.span_bug(span, tcx.sess.span_bug(span,
format!("'impossible' transformed_self_ty: {}", format!("'impossible' transformed_self_ty: {}",
transformed_self_ty.repr(tcx))); transformed_self_ty.repr(tcx)).as_slice());
} }
} }
} }
@ -950,7 +950,7 @@ impl<'a> LookupContext<'a> {
ty_infer(TyVar(_)) => { ty_infer(TyVar(_)) => {
self.bug(format!("unexpected type: {}", self.bug(format!("unexpected type: {}",
self.ty_to_str(self_ty))); self.ty_to_str(self_ty)).as_slice());
} }
} }
} }
@ -1235,9 +1235,10 @@ impl<'a> LookupContext<'a> {
rcvr_ty, transformed_self_ty) { rcvr_ty, transformed_self_ty) {
Ok(_) => {} Ok(_) => {}
Err(_) => { Err(_) => {
self.bug(format!("{} was a subtype of {} but now is not?", self.bug(format!(
"{} was a subtype of {} but now is not?",
self.ty_to_str(rcvr_ty), self.ty_to_str(rcvr_ty),
self.ty_to_str(transformed_self_ty))); self.ty_to_str(transformed_self_ty)).as_slice());
} }
} }
@ -1465,16 +1466,16 @@ impl<'a> LookupContext<'a> {
self.tcx().sess.span_note( self.tcx().sess.span_note(
span, span,
format!("candidate \\#{} is `{}`", format!("candidate \\#{} is `{}`",
idx+1u, idx + 1u,
ty::item_path_str(self.tcx(), did))); ty::item_path_str(self.tcx(), did)).as_slice());
} }
fn report_param_candidate(&self, idx: uint, did: DefId) { fn report_param_candidate(&self, idx: uint, did: DefId) {
self.tcx().sess.span_note( self.tcx().sess.span_note(
self.span, self.span,
format!("candidate \\#{} derives from the bound `{}`", format!("candidate \\#{} derives from the bound `{}`",
idx+1u, idx + 1u,
ty::item_path_str(self.tcx(), did))); ty::item_path_str(self.tcx(), did)).as_slice());
} }
fn report_trait_candidate(&self, idx: uint, did: DefId) { fn report_trait_candidate(&self, idx: uint, did: DefId) {
@ -1482,8 +1483,8 @@ impl<'a> LookupContext<'a> {
self.span, self.span,
format!("candidate \\#{} derives from the type of the receiver, \ format!("candidate \\#{} derives from the type of the receiver, \
which is the trait `{}`", which is the trait `{}`",
idx+1u, idx + 1u,
ty::item_path_str(self.tcx(), did))); ty::item_path_str(self.tcx(), did)).as_slice());
} }
fn infcx(&'a self) -> &'a infer::InferCtxt<'a> { fn infcx(&'a self) -> &'a infer::InferCtxt<'a> {

View file

@ -546,8 +546,11 @@ fn span_for_field(tcx: &ty::ctxt, field: &ty::field_ty, struct_id: ast::DefId) -
_ => false, _ => false,
}) { }) {
Some(f) => f.span, Some(f) => f.span,
None => tcx.sess.bug(format!("Could not find field {}", None => {
token::get_name(field.name))), tcx.sess
.bug(format!("Could not find field {}",
token::get_name(field.name)).as_slice())
}
} }
}, },
_ => tcx.sess.bug("Field found outside of a struct?"), _ => tcx.sess.bug("Field found outside of a struct?"),
@ -569,8 +572,9 @@ fn check_for_field_shadowing(tcx: &ty::ctxt,
match super_fields.iter().find(|sf| f.name == sf.name) { match super_fields.iter().find(|sf| f.name == sf.name) {
Some(prev_field) => { Some(prev_field) => {
tcx.sess.span_err(span_for_field(tcx, f, id), tcx.sess.span_err(span_for_field(tcx, f, id),
format!("field `{}` hides field declared in super-struct", format!("field `{}` hides field declared in \
token::get_name(f.name))); super-struct",
token::get_name(f.name)).as_slice());
tcx.sess.span_note(span_for_field(tcx, prev_field, parent_id), tcx.sess.span_note(span_for_field(tcx, prev_field, parent_id),
"previously declared here"); "previously declared here");
}, },
@ -593,11 +597,13 @@ fn check_fields_sized(tcx: &ty::ctxt,
if !ty::type_is_sized(tcx, t) { if !ty::type_is_sized(tcx, t) {
match f.node.kind { match f.node.kind {
ast::NamedField(ident, _) => { ast::NamedField(ident, _) => {
tcx.sess.span_err(f.span, format!("type `{}` is dynamically sized. \ tcx.sess.span_err(
f.span,
format!("type `{}` is dynamically sized. \
dynamically sized types may only \ dynamically sized types may only \
appear as the type of the final \ appear as the type of the final \
field in a struct", field in a struct",
token::get_ident(ident))); token::get_ident(ident)).as_slice());
} }
ast::UnnamedField(_) => { ast::UnnamedField(_) => {
tcx.sess.span_err(f.span, "dynamically sized type in field"); tcx.sess.span_err(f.span, "dynamically sized type in field");
@ -814,9 +820,10 @@ fn check_impl_methods_against_trait(ccx: &CrateCtxt,
None => { None => {
tcx.sess.span_err( tcx.sess.span_err(
impl_method.span, impl_method.span,
format!("method `{}` is not a member of trait `{}`", format!(
"method `{}` is not a member of trait `{}`",
token::get_ident(impl_method_ty.ident), token::get_ident(impl_method_ty.ident),
pprust::path_to_str(&ast_trait_ref.path))); pprust::path_to_str(&ast_trait_ref.path)).as_slice());
} }
} }
} }
@ -842,7 +849,7 @@ fn check_impl_methods_against_trait(ccx: &CrateCtxt,
tcx.sess.span_err( tcx.sess.span_err(
impl_span, impl_span,
format!("not all trait methods implemented, missing: {}", format!("not all trait methods implemented, missing: {}",
missing_methods.connect(", "))); missing_methods.connect(", ")).as_slice());
} }
} }
@ -886,7 +893,8 @@ fn compare_impl_method(tcx: &ty::ctxt,
format!("method `{}` has a `{}` declaration in the impl, \ format!("method `{}` has a `{}` declaration in the impl, \
but not in the trait", but not in the trait",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
pprust::explicit_self_to_str(impl_m.explicit_self))); pprust::explicit_self_to_str(
impl_m.explicit_self)).as_slice());
return; return;
} }
(_, &ast::SelfStatic) => { (_, &ast::SelfStatic) => {
@ -895,7 +903,8 @@ fn compare_impl_method(tcx: &ty::ctxt,
format!("method `{}` has a `{}` declaration in the trait, \ format!("method `{}` has a `{}` declaration in the trait, \
but not in the impl", but not in the impl",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
pprust::explicit_self_to_str(trait_m.explicit_self))); pprust::explicit_self_to_str(
trait_m.explicit_self)).as_slice());
return; return;
} }
_ => { _ => {
@ -914,7 +923,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
other{# type parameters}}", other{# type parameters}}",
method = token::get_ident(trait_m.ident), method = token::get_ident(trait_m.ident),
nimpl = num_impl_m_type_params, nimpl = num_impl_m_type_params,
ntrait = num_trait_m_type_params)); ntrait = num_trait_m_type_params).as_slice());
return; return;
} }
@ -927,7 +936,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
method = token::get_ident(trait_m.ident), method = token::get_ident(trait_m.ident),
nimpl = impl_m.fty.sig.inputs.len(), nimpl = impl_m.fty.sig.inputs.len(),
trait = ty::item_path_str(tcx, trait_m.def_id), trait = ty::item_path_str(tcx, trait_m.def_id),
ntrait = trait_m.fty.sig.inputs.len())); ntrait = trait_m.fty.sig.inputs.len()).as_slice());
return; return;
} }
@ -950,7 +959,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
in the trait declaration", in the trait declaration",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
i, i,
extra_bounds.user_string(tcx))); extra_bounds.user_string(tcx)).as_slice());
return; return;
} }
@ -971,7 +980,9 @@ fn compare_impl_method(tcx: &ty::ctxt,
method = token::get_ident(trait_m.ident), method = token::get_ident(trait_m.ident),
typaram = i, typaram = i,
nimpl = impl_param_def.bounds.trait_bounds.len(), nimpl = impl_param_def.bounds.trait_bounds.len(),
ntrait = trait_param_def.bounds.trait_bounds.len())); ntrait = trait_param_def.bounds
.trait_bounds
.len()).as_slice());
return; return;
} }
} }
@ -1040,7 +1051,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
impl_m_span, impl_m_span,
format!("method `{}` has an incompatible type for trait: {}", format!("method `{}` has an incompatible type for trait: {}",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
ty::type_err_to_str(tcx, terr))); ty::type_err_to_str(tcx, terr)).as_slice());
ty::note_and_explain_type_err(tcx, terr); ty::note_and_explain_type_err(tcx, terr);
} }
} }
@ -1099,7 +1110,8 @@ impl<'a> FnCtxt<'a> {
None => { None => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
span, span,
format!("no type for local variable {:?}", nid)); format!("no type for local variable {:?}",
nid).as_slice());
} }
} }
} }
@ -1173,7 +1185,7 @@ impl<'a> FnCtxt<'a> {
Some(&t) => t, Some(&t) => t,
None => { None => {
self.tcx().sess.bug(format!("no type for expr in fcx {}", self.tcx().sess.bug(format!("no type for expr in fcx {}",
self.tag())); self.tag()).as_slice());
} }
} }
} }
@ -1185,7 +1197,7 @@ impl<'a> FnCtxt<'a> {
self.tcx().sess.bug( self.tcx().sess.bug(
format!("no type for node {}: {} in fcx {}", format!("no type for node {}: {} in fcx {}",
id, self.tcx().map.node_to_str(id), id, self.tcx().map.node_to_str(id),
self.tag())); self.tag()).as_slice());
} }
} }
} }
@ -1197,7 +1209,7 @@ impl<'a> FnCtxt<'a> {
self.tcx().sess.bug( self.tcx().sess.bug(
format!("no method entry for node {}: {} in fcx {}", format!("no method entry for node {}: {} in fcx {}",
id, self.tcx().map.node_to_str(id), id, self.tcx().map.node_to_str(id),
self.tag())); self.tag()).as_slice());
} }
} }
} }
@ -1350,7 +1362,7 @@ pub fn autoderef<T>(fcx: &FnCtxt, sp: Span, base_ty: ty::t,
// We've reached the recursion limit, error gracefully. // We've reached the recursion limit, error gracefully.
fcx.tcx().sess.span_err(sp, fcx.tcx().sess.span_err(sp,
format!("reached the recursion limit while auto-dereferencing {}", format!("reached the recursion limit while auto-dereferencing {}",
base_ty.repr(fcx.tcx()))); base_ty.repr(fcx.tcx())).as_slice());
(ty::mk_err(), 0, None) (ty::mk_err(), 0, None)
} }
@ -1607,7 +1619,7 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
found {nsupplied, plural, =1{# lifetime parameter} \ found {nsupplied, plural, =1{# lifetime parameter} \
other{# lifetime parameters}}", other{# lifetime parameters}}",
nexpected = trait_region_parameter_count, nexpected = trait_region_parameter_count,
nsupplied = supplied_region_parameter_count)); nsupplied = supplied_region_parameter_count).as_slice());
} }
// Make sure the number of type parameters supplied on the trait // Make sure the number of type parameters supplied on the trait
@ -1638,7 +1650,8 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
nexpected = required_ty_param_count, nexpected = required_ty_param_count,
nsupplied = supplied_ty_param_count) nsupplied = supplied_ty_param_count)
}; };
function_context.tcx().sess.span_err(path.span, msg) function_context.tcx().sess.span_err(path.span,
msg.as_slice())
} else if supplied_ty_param_count > formal_ty_param_count { } else if supplied_ty_param_count > formal_ty_param_count {
let msg = if required_ty_param_count < generics.type_param_defs().len() { let msg = if required_ty_param_count < generics.type_param_defs().len() {
format!("the {trait_or_impl} referenced by this path needs at most \ format!("the {trait_or_impl} referenced by this path needs at most \
@ -1659,7 +1672,8 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
nexpected = formal_ty_param_count, nexpected = formal_ty_param_count,
nsupplied = supplied_ty_param_count) nsupplied = supplied_ty_param_count)
}; };
function_context.tcx().sess.span_err(path.span, msg) function_context.tcx().sess.span_err(path.span,
msg.as_slice())
} }
} }
_ => { _ => {
@ -1727,9 +1741,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fty.sig.output fty.sig.output
} }
_ => { _ => {
fcx.tcx().sess.span_bug( fcx.tcx().sess.span_bug(callee_expr.span,
callee_expr.span, "method without bare fn type");
format!("method without bare fn type"));
} }
} }
} }
@ -1768,7 +1781,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
nexpected = expected_arg_count, nexpected = expected_arg_count,
nsupplied = supplied_arg_count); nsupplied = supplied_arg_count);
tcx.sess.span_err(sp, msg); tcx.sess.span_err(sp, msg.as_slice());
err_args(supplied_arg_count) err_args(supplied_arg_count)
} }
@ -1781,7 +1794,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
nexpected = expected_arg_count, nexpected = expected_arg_count,
nsupplied = supplied_arg_count); nsupplied = supplied_arg_count);
tcx.sess.span_err(sp, msg); tcx.sess.span_err(sp, msg.as_slice());
err_args(supplied_arg_count) err_args(supplied_arg_count)
}; };
@ -2484,7 +2497,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
tcx.sess.span_err( tcx.sess.span_err(
field.ident.span, field.ident.span,
format!("field `{}` specified more than once", format!("field `{}` specified more than once",
token::get_ident(field.ident.node))); token::get_ident(field.ident
.node)).as_slice());
error_happened = true; error_happened = true;
} }
Some((field_id, false)) => { Some((field_id, false)) => {
@ -2517,14 +2531,16 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
let name = class_field.name; let name = class_field.name;
let (_, seen) = *class_field_map.get(&name); let (_, seen) = *class_field_map.get(&name);
if !seen { if !seen {
missing_fields.push("`".to_owned() + token::get_name(name).get() + "`"); missing_fields.push(
format!("`{}`", token::get_name(name).get()))
} }
} }
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("missing {nfields, plural, =1{field} other{fields}}: {fields}", format!(
"missing {nfields, plural, =1{field} other{fields}}: {fields}",
nfields = missing_fields.len(), nfields = missing_fields.len(),
fields = missing_fields.connect(", "))); fields = missing_fields.connect(", ")).as_slice());
} }
} }
@ -3589,7 +3605,7 @@ pub fn check_representable(tcx: &ty::ctxt,
tcx.sess.span_err( tcx.sess.span_err(
sp, format!("illegal recursive {} type; \ sp, format!("illegal recursive {} type; \
wrap the inner value in a box to make it representable", wrap the inner value in a box to make it representable",
designation)); designation).as_slice());
return false return false
} }
ty::Representable | ty::ContainsRecursive => (), ty::Representable | ty::ContainsRecursive => (),
@ -3614,10 +3630,12 @@ pub fn check_instantiable(tcx: &ty::ctxt,
-> bool { -> bool {
let item_ty = ty::node_id_to_type(tcx, item_id); let item_ty = ty::node_id_to_type(tcx, item_id);
if !ty::is_instantiable(tcx, item_ty) { if !ty::is_instantiable(tcx, item_ty) {
tcx.sess.span_err(sp, format!("this type cannot be instantiated \ tcx.sess
without an instance of itself; \ .span_err(sp,
consider using `Option<{}>`", format!("this type cannot be instantiated without an \
ppaux::ty_to_str(tcx, item_ty))); instance of itself; consider using \
`Option<{}>`",
ppaux::ty_to_str(tcx, item_ty)).as_slice());
false false
} else { } else {
true true
@ -3670,11 +3688,16 @@ pub fn check_enum_variants_sized(ccx: &CrateCtxt,
// A struct value with an unsized final field is itself // A struct value with an unsized final field is itself
// unsized and we must track this in the type system. // unsized and we must track this in the type system.
if !ty::type_is_sized(ccx.tcx, *t) { if !ty::type_is_sized(ccx.tcx, *t) {
ccx.tcx.sess.span_err(args.get(i).ty.span, ccx.tcx
.sess
.span_err(
args.get(i).ty.span,
format!("type `{}` is dynamically sized. \ format!("type `{}` is dynamically sized. \
dynamically sized types may only \ dynamically sized types may only \
appear as the final type in a variant", appear as the final type in a \
ppaux::ty_to_str(ccx.tcx, *t))); variant",
ppaux::ty_to_str(ccx.tcx,
*t)).as_slice());
} }
} }
}, },
@ -3755,7 +3778,11 @@ pub fn check_enum_variants(ccx: &CrateCtxt,
ccx.tcx.sess.span_err(e.span, "expected signed integer constant"); ccx.tcx.sess.span_err(e.span, "expected signed integer constant");
} }
Err(ref err) => { Err(ref err) => {
ccx.tcx.sess.span_err(e.span, format!("expected constant: {}", *err)); ccx.tcx
.sess
.span_err(e.span,
format!("expected constant: {}",
*err).as_slice());
} }
} }
}, },
@ -3906,7 +3933,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
found {nsupplied, plural, =1{# lifetime parameter} \ found {nsupplied, plural, =1{# lifetime parameter} \
other{# lifetime parameters}}", other{# lifetime parameters}}",
nexpected = num_expected_regions, nexpected = num_expected_regions,
nsupplied = num_supplied_regions)); nsupplied = num_supplied_regions).as_slice());
} }
fcx.infcx().region_vars_for_defs(span, tpt.generics.region_param_defs.as_slice()) fcx.infcx().region_vars_for_defs(span, tpt.generics.region_param_defs.as_slice())
@ -3945,7 +3972,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
fcx.ccx.tcx.sess.span_err fcx.ccx.tcx.sess.span_err
(span, (span,
format!("too many type parameters provided: {} {}, found {}", format!("too many type parameters provided: {} {}, found {}",
expected, user_ty_param_count, ty_substs_len)); expected, user_ty_param_count, ty_substs_len).as_slice());
(fcx.infcx().next_ty_vars(ty_param_count), regions) (fcx.infcx().next_ty_vars(ty_param_count), regions)
} else if ty_substs_len < user_ty_param_req { } else if ty_substs_len < user_ty_param_req {
let expected = if user_ty_param_req < user_ty_param_count { let expected = if user_ty_param_req < user_ty_param_count {
@ -3953,10 +3980,12 @@ pub fn instantiate_path(fcx: &FnCtxt,
} else { } else {
"expected" "expected"
}; };
fcx.ccx.tcx.sess.span_err fcx.ccx.tcx.sess.span_err(
(span, span,
format!("not enough type parameters provided: {} {}, found {}", format!("not enough type parameters provided: {} {}, found {}",
expected, user_ty_param_req, ty_substs_len)); expected,
user_ty_param_req,
ty_substs_len).as_slice());
(fcx.infcx().next_ty_vars(ty_param_count), regions) (fcx.infcx().next_ty_vars(ty_param_count), regions)
} else { } else {
if ty_substs_len > user_ty_param_req if ty_substs_len > user_ty_param_req
@ -4128,8 +4157,9 @@ pub fn ast_expr_vstore_to_ty(fcx: &FnCtxt,
} }
} }
_ => { _ => {
fcx.ccx.tcx.sess.span_bug( fcx.ccx.tcx.sess.span_bug(e.span,
e.span, format!("vstore with unexpected contents")) "vstore with unexpected \
contents")
} }
} }
} }
@ -4184,8 +4214,9 @@ pub fn check_bounds_are_used(ccx: &CrateCtxt,
for (i, b) in tps_used.iter().enumerate() { for (i, b) in tps_used.iter().enumerate() {
if !*b { if !*b {
ccx.tcx.sess.span_err( ccx.tcx.sess.span_err(
span, format!("type parameter `{}` is unused", span,
token::get_ident(tps.get(i).ident))); format!("type parameter `{}` is unused",
token::get_ident(tps.get(i).ident)).as_slice());
} }
} }
} }
@ -4222,8 +4253,9 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
} }
op => { op => {
tcx.sess.span_err(it.span, tcx.sess.span_err(it.span,
format!("unrecognized atomic operation function: `{}`", format!("unrecognized atomic operation \
op)); function: `{}`",
op).as_slice());
return; return;
} }
} }
@ -4450,7 +4482,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
ref other => { ref other => {
tcx.sess.span_err(it.span, tcx.sess.span_err(it.span,
format!("unrecognized intrinsic function: `{}`", format!("unrecognized intrinsic function: `{}`",
*other)); *other).as_slice());
return; return;
} }
} }
@ -4468,9 +4500,11 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id)); let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs().len(); let i_n_tps = i_ty.generics.type_param_defs().len();
if i_n_tps != n_tps { if i_n_tps != n_tps {
tcx.sess.span_err(it.span, format!("intrinsic has wrong number \ tcx.sess.span_err(it.span,
of type parameters: found {}, \ format!("intrinsic has wrong number of type \
expected {}", i_n_tps, n_tps)); parameters: found {}, expected {}",
i_n_tps,
n_tps).as_slice());
} else { } else {
require_same_types(tcx, require_same_types(tcx,
None, None,

View file

@ -183,7 +183,7 @@ fn region_of_def(fcx: &FnCtxt, def: ast::Def) -> ty::Region {
} }
_ => { _ => {
tcx.sess.bug(format!("unexpected def in region_of_def: {:?}", tcx.sess.bug(format!("unexpected def in region_of_def: {:?}",
def)) def).as_slice())
} }
} }
} }
@ -880,7 +880,7 @@ fn constrain_autoderefs(rcx: &mut Rcx,
ty::ty_rptr(r, ref m) => (m.mutbl, r), ty::ty_rptr(r, ref m) => (m.mutbl, r),
_ => rcx.tcx().sess.span_bug(deref_expr.span, _ => rcx.tcx().sess.span_bug(deref_expr.span,
format!("bad overloaded deref type {}", format!("bad overloaded deref type {}",
method.ty.repr(rcx.tcx()))) method.ty.repr(rcx.tcx())).as_slice())
}; };
{ {
let mc = mc::MemCategorizationContext::new(rcx); let mc = mc::MemCategorizationContext::new(rcx);
@ -1235,7 +1235,8 @@ fn link_region(rcx: &Rcx,
rcx.tcx().sess.span_bug( rcx.tcx().sess.span_bug(
span, span,
format!("Illegal upvar id: {}", format!("Illegal upvar id: {}",
upvar_id.repr(rcx.tcx()))); upvar_id.repr(
rcx.tcx())).as_slice());
} }
} }
} }

View file

@ -154,7 +154,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
format!("failed to find an implementation of \ format!("failed to find an implementation of \
trait {} for {}", trait {} for {}",
vcx.infcx.trait_ref_to_str(&*trait_ref), vcx.infcx.trait_ref_to_str(&*trait_ref),
vcx.infcx.ty_to_str(ty))); vcx.infcx.ty_to_str(ty)).as_slice());
} }
} }
true true
@ -208,7 +208,7 @@ fn relate_trait_refs(vcx: &VtableContext,
format!("expected {}, but found {} ({})", format!("expected {}, but found {} ({})",
ppaux::trait_ref_to_str(tcx, &r_exp_trait_ref), ppaux::trait_ref_to_str(tcx, &r_exp_trait_ref),
ppaux::trait_ref_to_str(tcx, &r_act_trait_ref), ppaux::trait_ref_to_str(tcx, &r_act_trait_ref),
ty::type_err_to_str(tcx, err))); ty::type_err_to_str(tcx, err)).as_slice());
} }
} }
} }
@ -491,9 +491,9 @@ fn fixup_ty(vcx: &VtableContext,
Ok(new_type) => Some(new_type), Ok(new_type) => Some(new_type),
Err(e) if !is_early => { Err(e) if !is_early => {
tcx.sess.span_fatal(span, tcx.sess.span_fatal(span,
format!("cannot determine a type \ format!("cannot determine a type for this bounded type \
for this bounded type parameter: {}", parameter: {}",
fixup_err_to_str(e))) fixup_err_to_str(e)).as_slice())
} }
Err(_) => { Err(_) => {
None None
@ -550,8 +550,9 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
match (&ty::get(ty).sty, store) { match (&ty::get(ty).sty, store) {
(&ty::ty_rptr(_, mt), ty::RegionTraitStore(_, mutbl)) (&ty::ty_rptr(_, mt), ty::RegionTraitStore(_, mutbl))
if !mutability_allowed(mt.mutbl, mutbl) => { if !mutability_allowed(mt.mutbl, mutbl) => {
fcx.tcx().sess.span_err(ex.span, fcx.tcx()
format!("types differ in mutability")); .sess
.span_err(ex.span, "types differ in mutability");
} }
(&ty::ty_uniq(..), ty::UniqTraitStore) | (&ty::ty_uniq(..), ty::UniqTraitStore) |
@ -609,7 +610,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
ex.span, ex.span,
format!("can only cast an boxed pointer \ format!("can only cast an boxed pointer \
to a boxed object, not a {}", to a boxed object, not a {}",
ty::ty_sort_str(fcx.tcx(), ty))); ty::ty_sort_str(fcx.tcx(), ty)).as_slice());
} }
(_, ty::RegionTraitStore(..)) => { (_, ty::RegionTraitStore(..)) => {
@ -617,7 +618,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
ex.span, ex.span,
format!("can only cast an &-pointer \ format!("can only cast an &-pointer \
to an &-object, not a {}", to an &-object, not a {}",
ty::ty_sort_str(fcx.tcx(), ty))); ty::ty_sort_str(fcx.tcx(), ty)).as_slice());
} }
} }
} }

View file

@ -402,7 +402,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this expression: {}", this expression: {}",
infer::fixup_err_to_str(e))) infer::fixup_err_to_str(e)).as_slice())
} }
ResolvingLocal(span) => { ResolvingLocal(span) => {
@ -410,7 +410,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this local variable: {}", this local variable: {}",
infer::fixup_err_to_str(e))) infer::fixup_err_to_str(e)).as_slice())
} }
ResolvingPattern(span) => { ResolvingPattern(span) => {
@ -418,7 +418,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this pattern binding: {}", this pattern binding: {}",
infer::fixup_err_to_str(e))) infer::fixup_err_to_str(e)).as_slice())
} }
ResolvingUpvar(upvar_id) => { ResolvingUpvar(upvar_id) => {
@ -430,13 +430,15 @@ impl<'cx> Resolver<'cx> {
ty::local_var_name_str( ty::local_var_name_str(
self.tcx, self.tcx,
upvar_id.var_id).get().to_str(), upvar_id.var_id).get().to_str(),
infer::fixup_err_to_str(e))); infer::fixup_err_to_str(e)).as_slice());
} }
ResolvingImplRes(span) => { ResolvingImplRes(span) => {
self.tcx.sess.span_err( self.tcx
span, .sess
format!("cannot determine a type for impl supertrait")); .span_err(span,
"cannot determine a type for impl \
supertrait");
} }
} }
} }

View file

@ -431,8 +431,9 @@ impl<'a> CoherenceChecker<'a> {
session.span_err( session.span_err(
self.span_of_impl(impl_a), self.span_of_impl(impl_a),
format!("conflicting implementations for trait `{}`", format!("conflicting implementations for trait `{}`",
ty::item_path_str(self.crate_context.tcx, ty::item_path_str(
trait_def_id))); self.crate_context.tcx,
trait_def_id)).as_slice());
if impl_b.krate == LOCAL_CRATE { if impl_b.krate == LOCAL_CRATE {
session.span_note(self.span_of_impl(impl_b), session.span_note(self.span_of_impl(impl_b),
"note conflicting implementation here"); "note conflicting implementation here");
@ -442,7 +443,7 @@ impl<'a> CoherenceChecker<'a> {
session.note( session.note(
format!("conflicting implementation in crate \ format!("conflicting implementation in crate \
`{}`", `{}`",
cdata.name)); cdata.name).as_slice());
} }
} }
} }

View file

@ -123,7 +123,8 @@ impl<'a> AstConv for CrateCtxt<'a> {
} }
x => { x => {
self.tcx.sess.bug(format!("unexpected sort of node \ self.tcx.sess.bug(format!("unexpected sort of node \
in get_item_ty(): {:?}", x)); in get_item_ty(): {:?}",
x).as_slice());
} }
} }
} }
@ -134,7 +135,8 @@ impl<'a> AstConv for CrateCtxt<'a> {
fn ty_infer(&self, span: Span) -> ty::t { fn ty_infer(&self, span: Span) -> ty::t {
self.tcx.sess.span_err(span, "the type placeholder `_` is not \ self.tcx.sess.span_err(span, "the type placeholder `_` is not \
allowed within types on item signatures."); allowed within types on item \
signatures.");
ty::mk_err() ty::mk_err()
} }
} }
@ -573,7 +575,8 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
if ty_param.bounds.len() > 0 { if ty_param.bounds.len() > 0 {
ccx.tcx.sess.span_err( ccx.tcx.sess.span_err(
span, span,
format!("trait bounds are not allowed in {} definitions", thing)); format!("trait bounds are not allowed in {} definitions",
thing).as_slice());
} }
} }
} }
@ -711,8 +714,10 @@ pub fn convert_struct(ccx: &CrateCtxt,
if result.name != special_idents::unnamed_field.name { if result.name != special_idents::unnamed_field.name {
let dup = match seen_fields.find(&result.name) { let dup = match seen_fields.find(&result.name) {
Some(prev_span) => { Some(prev_span) => {
tcx.sess.span_err(f.span, tcx.sess.span_err(
format!("field `{}` is already declared", token::get_name(result.name))); f.span,
format!("field `{}` is already declared",
token::get_name(result.name)).as_slice());
tcx.sess.span_note(*prev_span, tcx.sess.span_note(*prev_span,
"previously declared here"); "previously declared here");
true true
@ -840,7 +845,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt,
ccx.tcx.sess.span_fatal( ccx.tcx.sess.span_fatal(
ast_trait_ref.path.span, ast_trait_ref.path.span,
format!("`{}` is not a trait", format!("`{}` is not a trait",
path_to_str(&ast_trait_ref.path))); path_to_str(&ast_trait_ref.path)).as_slice());
} }
} }
} }
@ -852,8 +857,10 @@ fn get_trait_def(ccx: &CrateCtxt, trait_id: ast::DefId) -> Rc<ty::TraitDef> {
match ccx.tcx.map.get(trait_id.node) { match ccx.tcx.map.get(trait_id.node) {
ast_map::NodeItem(item) => trait_def_of_item(ccx, item), ast_map::NodeItem(item) => trait_def_of_item(ccx, item),
_ => ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", _ => {
trait_id.node)) ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
trait_id.node).as_slice())
}
} }
} }
@ -889,7 +896,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::Item) -> Rc<ty::TraitDef> {
ref s => { ref s => {
tcx.sess.span_bug( tcx.sess.span_bug(
it.span, it.span,
format!("trait_def_of_item invoked on {:?}", s)); format!("trait_def_of_item invoked on {:?}", s).as_slice());
} }
} }
} }
@ -960,9 +967,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
return tpt; return tpt;
} }
ast::ItemTrait(..) => { ast::ItemTrait(..) => {
tcx.sess.span_bug( tcx.sess.span_bug(it.span, "invoked ty_of_item on trait");
it.span,
format!("invoked ty_of_item on trait"));
} }
ast::ItemStruct(_, ref generics) => { ast::ItemStruct(_, ref generics) => {
let ty_generics = ty_generics_for_type(ccx, generics); let ty_generics = ty_generics_for_type(ccx, generics);
@ -1113,8 +1118,7 @@ fn ty_generics(ccx: &CrateCtxt,
if !ccx.tcx.sess.features.issue_5723_bootstrap.get() { if !ccx.tcx.sess.features.issue_5723_bootstrap.get() {
ccx.tcx.sess.span_err( ccx.tcx.sess.span_err(
span, span,
format!("only the 'static lifetime is \ "only the 'static lifetime is accepted here.");
accepted here."));
} }
} }
} }
@ -1151,7 +1155,8 @@ fn ty_generics(ccx: &CrateCtxt,
format!("incompatible bounds on type parameter {}, \ format!("incompatible bounds on type parameter {}, \
bound {} does not allow unsized type", bound {} does not allow unsized type",
token::get_ident(ident), token::get_ident(ident),
ppaux::trait_ref_to_str(tcx, &*trait_ref))); ppaux::trait_ref_to_str(tcx,
&*trait_ref)).as_slice());
} }
true true
}); });

View file

@ -212,7 +212,7 @@ impl<'f> Coerce<'f> {
self.get_ref().infcx.tcx.sess.span_bug( self.get_ref().infcx.tcx.sess.span_bug(
self.get_ref().trace.origin.span(), self.get_ref().trace.origin.span(),
format!("failed to resolve even without \ format!("failed to resolve even without \
any force options: {:?}", e)); any force options: {:?}", e).as_slice());
} }
} }
} }

View file

@ -118,10 +118,9 @@ pub trait Combine {
// I think it should never happen that we unify two // I think it should never happen that we unify two
// substs and one of them has a self_ty and one // substs and one of them has a self_ty and one
// doesn't...? I could be wrong about this. // doesn't...? I could be wrong about this.
self.infcx().tcx.sess.bug( self.infcx().tcx.sess.bug("substitution a had a self_ty \
format!("substitution a had a self_ty \ and substitution b didn't, or \
and substitution b didn't, \ vice versa");
or vice versa"));
} }
} }
} }
@ -405,7 +404,7 @@ pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
format!("{}: bot and var types should have been handled ({},{})", format!("{}: bot and var types should have been handled ({},{})",
this.tag(), this.tag(),
a.inf_str(this.infcx()), a.inf_str(this.infcx()),
b.inf_str(this.infcx()))); b.inf_str(this.infcx())).as_slice());
} }
// Relate integral variables to other types // Relate integral variables to other types

View file

@ -355,7 +355,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
format!("{}: {} ({})", format!("{}: {} ({})",
message_root_str, message_root_str,
expected_found_str, expected_found_str,
ty::type_err_to_str(self.tcx, terr))); ty::type_err_to_str(self.tcx, terr)).as_slice());
} }
fn report_and_explain_type_error(&self, fn report_and_explain_type_error(&self,
@ -430,7 +430,10 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
span, span,
format!("lifetime of borrowed pointer outlives \ format!("lifetime of borrowed pointer outlives \
lifetime of captured variable `{}`...", lifetime of captured variable `{}`...",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str())); ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
.to_str()).as_slice());
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"...the borrowed pointer is valid for ", "...the borrowed pointer is valid for ",
@ -439,7 +442,10 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
format!("...but `{}` is only valid for ", format!("...but `{}` is only valid for ",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str()), ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
.to_str()).as_slice(),
sup, sup,
""); "");
} }
@ -483,7 +489,9 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
span, span,
format!("captured variable `{}` does not \ format!("captured variable `{}` does not \
outlive the enclosing closure", outlive the enclosing closure",
ty::local_var_name_str(self.tcx, id).get().to_str())); ty::local_var_name_str(self.tcx,
id).get()
.to_str()).as_slice());
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"captured variable is valid for ", "captured variable is valid for ",
@ -496,9 +504,8 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
""); "");
} }
infer::IndexSlice(span) => { infer::IndexSlice(span) => {
self.tcx.sess.span_err( self.tcx.sess.span_err(span,
span, "index of slice outside its lifetime");
format!("index of slice outside its lifetime"));
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"the slice is only valid for ", "the slice is only valid for ",
@ -591,7 +598,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
span, span,
format!("in type `{}`, pointer has a longer lifetime than \ format!("in type `{}`, pointer has a longer lifetime than \
the data it references", the data it references",
ty.user_string(self.tcx))); ty.user_string(self.tcx)).as_slice());
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"the pointer is valid for ", "the pointer is valid for ",
@ -1022,8 +1029,13 @@ impl<'a> Rebuilder<'a> {
} }
ast::TyPath(ref path, _, id) => { ast::TyPath(ref path, _, id) => {
let a_def = match self.tcx.def_map.borrow().find(&id) { let a_def = match self.tcx.def_map.borrow().find(&id) {
None => self.tcx.sess.fatal(format!("unbound path {}", None => {
pprust::path_to_str(path))), self.tcx
.sess
.fatal(format!(
"unbound path {}",
pprust::path_to_str(path)).as_slice())
}
Some(&d) => d Some(&d) => d
}; };
match a_def { match a_def {
@ -1209,18 +1221,18 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
opt_explicit_self, generics); opt_explicit_self, generics);
let msg = format!("consider using an explicit lifetime \ let msg = format!("consider using an explicit lifetime \
parameter as shown: {}", suggested_fn); parameter as shown: {}", suggested_fn);
self.tcx.sess.span_note(span, msg); self.tcx.sess.span_note(span, msg.as_slice());
} }
fn report_inference_failure(&self, fn report_inference_failure(&self,
var_origin: RegionVariableOrigin) { var_origin: RegionVariableOrigin) {
let var_description = match var_origin { let var_description = match var_origin {
infer::MiscVariable(_) => "".to_owned(), infer::MiscVariable(_) => "".to_strbuf(),
infer::PatternRegion(_) => " for pattern".to_owned(), infer::PatternRegion(_) => " for pattern".to_strbuf(),
infer::AddrOfRegion(_) => " for borrow expression".to_owned(), infer::AddrOfRegion(_) => " for borrow expression".to_strbuf(),
infer::AddrOfSlice(_) => " for slice expression".to_owned(), infer::AddrOfSlice(_) => " for slice expression".to_strbuf(),
infer::Autoref(_) => " for autoref".to_owned(), infer::Autoref(_) => " for autoref".to_strbuf(),
infer::Coercion(_) => " for automatic coercion".to_owned(), infer::Coercion(_) => " for automatic coercion".to_strbuf(),
infer::LateBoundRegion(_, br) => { infer::LateBoundRegion(_, br) => {
format!(" for {}in function call", format!(" for {}in function call",
bound_region_to_str(self.tcx, "lifetime parameter ", true, br)) bound_region_to_str(self.tcx, "lifetime parameter ", true, br))
@ -1247,7 +1259,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
var_origin.span(), var_origin.span(),
format!("cannot infer an appropriate lifetime{} \ format!("cannot infer an appropriate lifetime{} \
due to conflicting requirements", due to conflicting requirements",
var_description)); var_description).as_slice());
} }
fn note_region_origin(&self, origin: SubregionOrigin) { fn note_region_origin(&self, origin: SubregionOrigin) {
@ -1282,7 +1294,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
self.tcx.sess.span_note( self.tcx.sess.span_note(
trace.origin.span(), trace.origin.span(),
format!("...so that {} ({})", format!("...so that {} ({})",
desc, values_str)); desc, values_str).as_slice());
} }
None => { None => {
// Really should avoid printing this error at // Really should avoid printing this error at
@ -1291,7 +1303,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
// doing right now. - nmatsakis // doing right now. - nmatsakis
self.tcx.sess.span_note( self.tcx.sess.span_note(
trace.origin.span(), trace.origin.span(),
format!("...so that {}", desc)); format!("...so that {}", desc).as_slice());
} }
} }
} }
@ -1304,8 +1316,11 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
infer::ReborrowUpvar(span, ref upvar_id) => { infer::ReborrowUpvar(span, ref upvar_id) => {
self.tcx.sess.span_note( self.tcx.sess.span_note(
span, span,
format!("...so that closure can access `{}`", format!(
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str())) "...so that closure can access `{}`",
ty::local_var_name_str(self.tcx, upvar_id.var_id)
.get()
.to_str()).as_slice())
} }
infer::InfStackClosure(span) => { infer::InfStackClosure(span) => {
self.tcx.sess.span_note( self.tcx.sess.span_note(
@ -1328,7 +1343,9 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
span, span,
format!("...so that captured variable `{}` \ format!("...so that captured variable `{}` \
does not outlive the enclosing closure", does not outlive the enclosing closure",
ty::local_var_name_str(self.tcx, id).get().to_str())); ty::local_var_name_str(
self.tcx,
id).get().to_str()).as_slice());
} }
infer::IndexSlice(span) => { infer::IndexSlice(span) => {
self.tcx.sess.span_note( self.tcx.sess.span_note(

View file

@ -249,7 +249,8 @@ impl<'f> Combine for Glb<'f> {
} }
this.get_ref().infcx.tcx.sess.span_bug( this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(), this.get_ref().trace.origin.span(),
format!("could not find original bound region for {:?}", r)) format!("could not find original bound region for {:?}",
r).as_slice())
} }
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region { fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {

View file

@ -529,7 +529,7 @@ pub fn var_ids<T:Combine>(this: &T,
r => { r => {
this.infcx().tcx.sess.span_bug( this.infcx().tcx.sess.span_bug(
this.trace().origin.span(), this.trace().origin.span(),
format!("found non-region-vid: {:?}", r)); format!("found non-region-vid: {:?}", r).as_slice());
} }
}).collect() }).collect()
} }

View file

@ -185,8 +185,9 @@ impl<'f> Combine for Lub<'f> {
this.get_ref().infcx.tcx.sess.span_bug( this.get_ref().infcx.tcx.sess.span_bug(
this.get_ref().trace.origin.span(), this.get_ref().trace.origin.span(),
format!("Region {:?} is not associated with \ format!("region {:?} is not associated with \
any bound region from A!", r0)) any bound region from A!",
r0).as_slice())
} }
} }

View file

@ -695,7 +695,7 @@ impl<'a> InferCtxt<'a> {
format!("resolve_type_vars_if_possible() yielded {} \ format!("resolve_type_vars_if_possible() yielded {} \
when supplied with {}", when supplied with {}",
self.ty_to_str(dummy0), self.ty_to_str(dummy0),
self.ty_to_str(dummy1))); self.ty_to_str(dummy1)).as_slice());
} }
} }
} }
@ -729,7 +729,7 @@ impl<'a> InferCtxt<'a> {
err: Option<&ty::type_err>) { err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty); debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
let error_str = err.map_or("".to_owned(), |t_err| { let error_str = err.map_or("".to_strbuf(), |t_err| {
format!(" ({})", ty::type_err_to_str(self.tcx, t_err)) format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
}); });
let resolved_expected = expected_ty.map(|e_ty| { let resolved_expected = expected_ty.map(|e_ty| {
@ -737,11 +737,19 @@ impl<'a> InferCtxt<'a> {
}); });
if !resolved_expected.map_or(false, |e| { ty::type_is_error(e) }) { if !resolved_expected.map_or(false, |e| { ty::type_is_error(e) }) {
match resolved_expected { match resolved_expected {
None => self.tcx.sess.span_err(sp, None => {
format!("{}{}", mk_msg(None, actual_ty), error_str)), self.tcx
.sess
.span_err(sp,
format!("{}{}",
mk_msg(None, actual_ty),
error_str).as_slice())
}
Some(e) => { Some(e) => {
self.tcx.sess.span_err(sp, self.tcx.sess.span_err(sp,
format!("{}{}", mk_msg(Some(self.ty_to_str(e)), actual_ty), error_str)); format!("{}{}",
mk_msg(Some(self.ty_to_str(e)), actual_ty),
error_str).as_slice());
} }
} }
for err in err.iter() { for err in err.iter() {

View file

@ -297,7 +297,7 @@ impl<'a> RegionVarBindings<'a> {
origin.span(), origin.span(),
format!("cannot relate bound region: {} <= {}", format!("cannot relate bound region: {} <= {}",
sub.repr(self.tcx), sub.repr(self.tcx),
sup.repr(self.tcx))); sup.repr(self.tcx)).as_slice());
} }
(_, ReStatic) => { (_, ReStatic) => {
// all regions are subregions of static, so we can ignore this // all regions are subregions of static, so we can ignore this
@ -369,8 +369,8 @@ impl<'a> RegionVarBindings<'a> {
None => { None => {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
self.var_origins.borrow().get(rid.to_uint()).span(), self.var_origins.borrow().get(rid.to_uint()).span(),
format!("attempt to resolve region variable before \ "attempt to resolve region variable before values have \
values have been computed!")) been computed!")
} }
Some(ref values) => *values.get(rid.to_uint()) Some(ref values) => *values.get(rid.to_uint())
}; };
@ -546,7 +546,7 @@ impl<'a> RegionVarBindings<'a> {
self.tcx.sess.bug( self.tcx.sess.bug(
format!("cannot relate bound region: LUB({}, {})", format!("cannot relate bound region: LUB({}, {})",
a.repr(self.tcx), a.repr(self.tcx),
b.repr(self.tcx))); b.repr(self.tcx)).as_slice());
} }
(ReStatic, _) | (_, ReStatic) => { (ReStatic, _) | (_, ReStatic) => {
@ -561,7 +561,9 @@ impl<'a> RegionVarBindings<'a> {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
self.var_origins.borrow().get(v_id.to_uint()).span(), self.var_origins.borrow().get(v_id.to_uint()).span(),
format!("lub_concrete_regions invoked with \ format!("lub_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b)); non-concrete regions: {:?}, {:?}",
a,
b).as_slice());
} }
(f @ ReFree(ref fr), ReScope(s_id)) | (f @ ReFree(ref fr), ReScope(s_id)) |
@ -647,7 +649,7 @@ impl<'a> RegionVarBindings<'a> {
self.tcx.sess.bug( self.tcx.sess.bug(
format!("cannot relate bound region: GLB({}, {})", format!("cannot relate bound region: GLB({}, {})",
a.repr(self.tcx), a.repr(self.tcx),
b.repr(self.tcx))); b.repr(self.tcx)).as_slice());
} }
(ReStatic, r) | (r, ReStatic) => { (ReStatic, r) | (r, ReStatic) => {
@ -665,7 +667,9 @@ impl<'a> RegionVarBindings<'a> {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
self.var_origins.borrow().get(v_id.to_uint()).span(), self.var_origins.borrow().get(v_id.to_uint()).span(),
format!("glb_concrete_regions invoked with \ format!("glb_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b)); non-concrete regions: {:?}, {:?}",
a,
b).as_slice());
} }
(ReFree(ref fr), s @ ReScope(s_id)) | (ReFree(ref fr), s @ ReScope(s_id)) |
@ -1175,7 +1179,7 @@ impl<'a> RegionVarBindings<'a> {
upper_bounds.iter() upper_bounds.iter()
.map(|x| x.region) .map(|x| x.region)
.collect::<Vec<ty::Region>>() .collect::<Vec<ty::Region>>()
.repr(self.tcx))); .repr(self.tcx)).as_slice());
} }
fn collect_error_for_contracting_node( fn collect_error_for_contracting_node(
@ -1222,7 +1226,7 @@ impl<'a> RegionVarBindings<'a> {
upper_bounds.iter() upper_bounds.iter()
.map(|x| x.region) .map(|x| x.region)
.collect::<Vec<ty::Region>>() .collect::<Vec<ty::Region>>()
.repr(self.tcx))); .repr(self.tcx)).as_slice());
} }
fn collect_concrete_regions(&self, fn collect_concrete_regions(&self,

View file

@ -87,7 +87,7 @@ impl<'a> UnifyInferCtxtMethods for InferCtxt<'a> {
Some(&ref var_val) => (*var_val).clone(), Some(&ref var_val) => (*var_val).clone(),
None => { None => {
tcx.sess.bug(format!( tcx.sess.bug(format!(
"failed lookup of vid `{}`", vid_u)); "failed lookup of vid `{}`", vid_u).as_slice());
} }
}; };
match var_val { match var_val {

View file

@ -311,7 +311,8 @@ pub fn require_same_types(tcx: &ty::ctxt,
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("{}: {}", format!("{}: {}",
msg(), msg(),
ty::type_err_to_str(tcx, terr))); ty::type_err_to_str(tcx,
terr)).as_slice());
ty::note_and_explain_type_err(tcx, terr); ty::note_and_explain_type_err(tcx, terr);
false false
} }
@ -359,8 +360,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
} }
_ => { _ => {
tcx.sess.span_bug(main_span, tcx.sess.span_bug(main_span,
format!("main has a non-function type: found `{}`", format!("main has a non-function type: found \
ppaux::ty_to_str(tcx, main_t))); `{}`",
ppaux::ty_to_str(tcx,
main_t)).as_slice());
} }
} }
} }
@ -411,8 +414,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
} }
_ => { _ => {
tcx.sess.span_bug(start_span, tcx.sess.span_bug(start_span,
format!("start has a non-function type: found `{}`", format!("start has a non-function type: found \
ppaux::ty_to_str(tcx, start_t))); `{}`",
ppaux::ty_to_str(tcx,
start_t)).as_slice());
} }
} }
} }

View file

@ -538,8 +538,8 @@ impl<'a> ConstraintContext<'a> {
Some(&index) => index, Some(&index) => index,
None => { None => {
self.tcx().sess.bug(format!( self.tcx().sess.bug(format!(
"No inferred index entry for {}", "no inferred index entry for {}",
self.tcx().map.node_to_str(param_id))); self.tcx().map.node_to_str(param_id)).as_slice());
} }
} }
} }
@ -787,7 +787,7 @@ impl<'a> ConstraintContext<'a> {
self.tcx().sess.bug( self.tcx().sess.bug(
format!("unexpected type encountered in \ format!("unexpected type encountered in \
variance inference: {}", variance inference: {}",
ty.repr(self.tcx()))); ty.repr(self.tcx())).as_slice());
} }
} }
} }
@ -858,9 +858,11 @@ impl<'a> ConstraintContext<'a> {
ty::ReEmpty => { ty::ReEmpty => {
// We don't expect to see anything but 'static or bound // We don't expect to see anything but 'static or bound
// regions when visiting member types or method types. // regions when visiting member types or method types.
self.tcx().sess.bug(format!("unexpected region encountered in \ self.tcx()
variance inference: {}", .sess
region.repr(self.tcx()))); .bug(format!("unexpected region encountered in variance \
inference: {}",
region.repr(self.tcx())).as_slice());
} }
} }
} }

View file

@ -50,11 +50,11 @@ pub fn note_and_explain_region(cx: &ctxt,
(ref str, Some(span)) => { (ref str, Some(span)) => {
cx.sess.span_note( cx.sess.span_note(
span, span,
format!("{}{}{}", prefix, *str, suffix)); format!("{}{}{}", prefix, *str, suffix).as_slice());
} }
(ref str, None) => { (ref str, None) => {
cx.sess.note( cx.sess.note(
format!("{}{}{}", prefix, *str, suffix)); format!("{}{}{}", prefix, *str, suffix).as_slice());
} }
} }
} }
@ -255,13 +255,13 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> StrBuf {
match fn_style { match fn_style {
ast::NormalFn => {} ast::NormalFn => {}
_ => { _ => {
s.push_str(fn_style.to_str()); s.push_str(fn_style.to_str().as_slice());
s.push_char(' '); s.push_char(' ');
} }
}; };
if abi != abi::Rust { if abi != abi::Rust {
s.push_str(format!("extern {} ", abi.to_str())); s.push_str(format!("extern {} ", abi.to_str()).as_slice());
}; };
s.push_str("fn"); s.push_str("fn");
@ -292,7 +292,7 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> StrBuf {
match cty.fn_style { match cty.fn_style {
ast::NormalFn => {} ast::NormalFn => {}
_ => { _ => {
s.push_str(cty.fn_style.to_str()); s.push_str(cty.fn_style.to_str().as_slice());
s.push_char(' '); s.push_char(' ');
} }
}; };

View file

@ -184,12 +184,12 @@ fn path(w: &mut fmt::Formatter, path: &clean::Path, print_all: bool,
for lifetime in last.lifetimes.iter() { for lifetime in last.lifetimes.iter() {
if counter > 0 { generics.push_str(", "); } if counter > 0 { generics.push_str(", "); }
counter += 1; counter += 1;
generics.push_str(format!("{}", *lifetime)); generics.push_str(format!("{}", *lifetime).as_slice());
} }
for ty in last.types.iter() { for ty in last.types.iter() {
if counter > 0 { generics.push_str(", "); } if counter > 0 { generics.push_str(", "); }
counter += 1; counter += 1;
generics.push_str(format!("{}", *ty)); generics.push_str(format!("{}", *ty).as_slice());
} }
generics.push_str("&gt;"); generics.push_str("&gt;");
} }
@ -323,18 +323,22 @@ impl fmt::Show for clean::Type {
{arrow, select, yes{ -&gt; {ret}} other{}}", {arrow, select, yes{ -&gt; {ret}} other{}}",
style = FnStyleSpace(decl.fn_style), style = FnStyleSpace(decl.fn_style),
lifetimes = if decl.lifetimes.len() == 0 { lifetimes = if decl.lifetimes.len() == 0 {
"".to_owned() "".to_strbuf()
} else { } else {
format!("&lt;{:#}&gt;", decl.lifetimes) format!("&lt;{:#}&gt;", decl.lifetimes)
}, },
args = decl.decl.inputs, args = decl.decl.inputs,
arrow = match decl.decl.output { clean::Unit => "no", _ => "yes" }, arrow = match decl.decl.output {
clean::Unit => "no",
_ => "yes",
},
ret = decl.decl.output, ret = decl.decl.output,
bounds = { bounds = {
let mut ret = StrBuf::new(); let mut ret = StrBuf::new();
match *region { match *region {
Some(ref lt) => { Some(ref lt) => {
ret.push_str(format!(": {}", *lt)); ret.push_str(format!(": {}",
*lt).as_slice());
} }
None => {} None => {}
} }
@ -347,7 +351,8 @@ impl fmt::Show for clean::Type {
} else { } else {
ret.push_str(" + "); ret.push_str(" + ");
} }
ret.push_str(format!("{}", *t)); ret.push_str(format!("{}",
*t).as_slice());
} }
} }
} }
@ -416,7 +421,10 @@ impl fmt::Show for clean::Type {
}, **t) }, **t)
} }
clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => { clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => {
let lt = match *l { Some(ref l) => format!("{} ", *l), _ => "".to_owned() }; let lt = match *l {
Some(ref l) => format!("{} ", *l),
_ => "".to_strbuf(),
};
write!(f, "&amp;{}{}{}", write!(f, "&amp;{}{}{}",
lt, lt,
match mutability { match mutability {
@ -460,10 +468,10 @@ impl<'a> fmt::Show for Method<'a> {
clean::SelfValue => args.push_str("self"), clean::SelfValue => args.push_str("self"),
clean::SelfOwned => args.push_str("~self"), clean::SelfOwned => args.push_str("~self"),
clean::SelfBorrowed(Some(ref lt), clean::Immutable) => { clean::SelfBorrowed(Some(ref lt), clean::Immutable) => {
args.push_str(format!("&amp;{} self", *lt)); args.push_str(format!("&amp;{} self", *lt).as_slice());
} }
clean::SelfBorrowed(Some(ref lt), clean::Mutable) => { clean::SelfBorrowed(Some(ref lt), clean::Mutable) => {
args.push_str(format!("&amp;{} mut self", *lt)); args.push_str(format!("&amp;{} mut self", *lt).as_slice());
} }
clean::SelfBorrowed(None, clean::Mutable) => { clean::SelfBorrowed(None, clean::Mutable) => {
args.push_str("&amp;mut self"); args.push_str("&amp;mut self");
@ -475,9 +483,9 @@ impl<'a> fmt::Show for Method<'a> {
for (i, input) in d.inputs.values.iter().enumerate() { for (i, input) in d.inputs.values.iter().enumerate() {
if i > 0 || args.len() > 0 { args.push_str(", "); } if i > 0 || args.len() > 0 { args.push_str(", "); }
if input.name.len() > 0 { if input.name.len() > 0 {
args.push_str(format!("{}: ", input.name)); args.push_str(format!("{}: ", input.name).as_slice());
} }
args.push_str(format!("{}", input.type_)); args.push_str(format!("{}", input.type_).as_slice());
} }
write!(f, write!(f,
"({args}){arrow, select, yes{ -&gt; {ret}} other{}}", "({args}){arrow, select, yes{ -&gt; {ret}} other{}}",

View file

@ -407,8 +407,11 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
if path.exists() { if path.exists() {
for line in BufferedReader::new(File::open(path)).lines() { for line in BufferedReader::new(File::open(path)).lines() {
let line = try!(line); let line = try!(line);
if !line.starts_with(key) { continue } if !line.as_slice().starts_with(key) {
if line.starts_with(format!("{}['{}']", key, krate)) { continue
}
if line.as_slice().starts_with(
format!("{}['{}']", key, krate).as_slice()) {
continue continue
} }
ret.push(line.to_strbuf()); ret.push(line.to_strbuf());
@ -646,7 +649,7 @@ impl<'a> SourceCollector<'a> {
let title = format!("{} -- source", cur.filename_display()); let title = format!("{} -- source", cur.filename_display());
let page = layout::Page { let page = layout::Page {
title: title, title: title.as_slice(),
ty: "source", ty: "source",
root_path: root_path.as_slice(), root_path: root_path.as_slice(),
}; };
@ -1344,7 +1347,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
parents.push_str(": "); parents.push_str(": ");
for (i, p) in t.parents.iter().enumerate() { for (i, p) in t.parents.iter().enumerate() {
if i > 0 { parents.push_str(" + "); } if i > 0 { parents.push_str(" + "); }
parents.push_str(format!("{}", *p)); parents.push_str(format!("{}", *p).as_slice());
} }
} }

View file

@ -132,7 +132,7 @@ pub fn opts() -> Vec<getopts::OptGroup> {
pub fn usage(argv0: &str) { pub fn usage(argv0: &str) {
println!("{}", println!("{}",
getopts::usage(format!("{} [options] <input>", argv0), getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
opts().as_slice())); opts().as_slice()));
} }

View file

@ -80,7 +80,7 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int
let mut css = StrBuf::new(); let mut css = StrBuf::new();
for name in matches.opt_strs("markdown-css").iter() { for name in matches.opt_strs("markdown-css").iter() {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name); let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s) css.push_str(s.as_slice())
} }
let input_str = load_or_return!(input, 1, 2); let input_str = load_or_return!(input, 1, 2);

View file

@ -213,7 +213,8 @@ fn maketest(s: &str, cratename: &str, loose_feature_gating: bool) -> StrBuf {
if !s.contains("extern crate") { if !s.contains("extern crate") {
if s.contains(cratename) { if s.contains(cratename) {
prog.push_str(format!("extern crate {};\n", cratename)); prog.push_str(format!("extern crate {};\n",
cratename).as_slice());
} }
} }
if s.contains("fn main") { if s.contains("fn main") {

View file

@ -387,7 +387,7 @@ impl fmt::Show for UvError {
#[test] #[test]
fn error_smoke_test() { fn error_smoke_test() {
let err: UvError = UvError(uvll::EOF); let err: UvError = UvError(uvll::EOF);
assert_eq!(err.to_str(), "EOF: end of file".to_owned()); assert_eq!(err.to_str(), "EOF: end of file".to_strbuf());
} }
pub fn uv_error_to_io_error(uverr: UvError) -> IoError { pub fn uv_error_to_io_error(uverr: UvError) -> IoError {

View file

@ -663,7 +663,7 @@ pub mod writer {
_ => Err(io::IoError { _ => Err(io::IoError {
kind: io::OtherIoError, kind: io::OtherIoError,
desc: "int too big", desc: "int too big",
detail: Some(format!("{}", n)) detail: Some(format_strbuf!("{}", n))
}) })
} }
} }
@ -676,7 +676,7 @@ pub mod writer {
Err(io::IoError { Err(io::IoError {
kind: io::OtherIoError, kind: io::OtherIoError,
desc: "int too big", desc: "int too big",
detail: Some(format!("{}", n)) detail: Some(format_strbuf!("{}", n))
}) })
} }

View file

@ -16,10 +16,9 @@ use iter::Iterator;
use mem; use mem;
use option::{Option, Some, None}; use option::{Option, Some, None};
use slice::{ImmutableVector, MutableVector, Vector}; use slice::{ImmutableVector, MutableVector, Vector};
use str::OwnedStr; use str::{OwnedStr, Str, StrAllocating, StrSlice};
use str::Str;
use str::{StrAllocating, StrSlice};
use str; use str;
use strbuf::StrBuf;
use to_str::{IntoStr}; use to_str::{IntoStr};
use vec::Vec; use vec::Vec;
@ -249,7 +248,7 @@ impl OwnedAsciiCast for ~[u8] {
} }
} }
impl OwnedAsciiCast for ~str { impl OwnedAsciiCast for StrBuf {
#[inline] #[inline]
fn is_ascii(&self) -> bool { fn is_ascii(&self) -> bool {
self.as_slice().is_ascii() self.as_slice().is_ascii()
@ -257,7 +256,7 @@ impl OwnedAsciiCast for ~str {
#[inline] #[inline]
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> { unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
let v: ~[u8] = mem::transmute(self); let v: Vec<u8> = mem::transmute(self);
v.into_ascii_nocheck() v.into_ascii_nocheck()
} }
} }
@ -314,17 +313,18 @@ impl<'a> AsciiStr for &'a [Ascii] {
impl IntoStr for ~[Ascii] { impl IntoStr for ~[Ascii] {
#[inline] #[inline]
fn into_str(self) -> ~str { fn into_str(self) -> StrBuf {
unsafe { mem::transmute(self) } let vector: Vec<Ascii> = self.as_slice().iter().map(|x| *x).collect();
vector.into_str()
} }
} }
impl IntoStr for Vec<Ascii> { impl IntoStr for Vec<Ascii> {
#[inline] #[inline]
fn into_str(self) -> ~str { fn into_str(self) -> StrBuf {
unsafe { unsafe {
let s: &str = mem::transmute(self.as_slice()); let s: &str = mem::transmute(self.as_slice());
s.to_owned() s.to_strbuf()
} }
} }
} }
@ -346,12 +346,12 @@ pub trait OwnedStrAsciiExt {
/// Convert the string to ASCII upper case: /// Convert the string to ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
/// but non-ASCII letters are unchanged. /// but non-ASCII letters are unchanged.
fn into_ascii_upper(self) -> ~str; fn into_ascii_upper(self) -> StrBuf;
/// Convert the string to ASCII lower case: /// Convert the string to ASCII lower case:
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
/// but non-ASCII letters are unchanged. /// but non-ASCII letters are unchanged.
fn into_ascii_lower(self) -> ~str; fn into_ascii_lower(self) -> StrBuf;
} }
/// Extension methods for ASCII-subset only operations on string slices /// Extension methods for ASCII-subset only operations on string slices
@ -359,12 +359,12 @@ pub trait StrAsciiExt {
/// Makes a copy of the string in ASCII upper case: /// Makes a copy of the string in ASCII upper case:
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
/// but non-ASCII letters are unchanged. /// but non-ASCII letters are unchanged.
fn to_ascii_upper(&self) -> ~str; fn to_ascii_upper(&self) -> StrBuf;
/// Makes a copy of the string in ASCII lower case: /// Makes a copy of the string in ASCII lower case:
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
/// but non-ASCII letters are unchanged. /// but non-ASCII letters are unchanged.
fn to_ascii_lower(&self) -> ~str; fn to_ascii_lower(&self) -> StrBuf;
/// Check that two strings are an ASCII case-insensitive match. /// Check that two strings are an ASCII case-insensitive match.
/// Same as `to_ascii_lower(a) == to_ascii_lower(b)`, /// Same as `to_ascii_lower(a) == to_ascii_lower(b)`,
@ -374,12 +374,12 @@ pub trait StrAsciiExt {
impl<'a> StrAsciiExt for &'a str { impl<'a> StrAsciiExt for &'a str {
#[inline] #[inline]
fn to_ascii_upper(&self) -> ~str { fn to_ascii_upper(&self) -> StrBuf {
unsafe { str_copy_map_bytes(*self, ASCII_UPPER_MAP) } unsafe { str_copy_map_bytes(*self, ASCII_UPPER_MAP) }
} }
#[inline] #[inline]
fn to_ascii_lower(&self) -> ~str { fn to_ascii_lower(&self) -> StrBuf {
unsafe { str_copy_map_bytes(*self, ASCII_LOWER_MAP) } unsafe { str_copy_map_bytes(*self, ASCII_LOWER_MAP) }
} }
@ -394,36 +394,36 @@ impl<'a> StrAsciiExt for &'a str {
} }
} }
impl OwnedStrAsciiExt for ~str { impl OwnedStrAsciiExt for StrBuf {
#[inline] #[inline]
fn into_ascii_upper(self) -> ~str { fn into_ascii_upper(self) -> StrBuf {
unsafe { str_map_bytes(self, ASCII_UPPER_MAP) } unsafe { str_map_bytes(self, ASCII_UPPER_MAP) }
} }
#[inline] #[inline]
fn into_ascii_lower(self) -> ~str { fn into_ascii_lower(self) -> StrBuf {
unsafe { str_map_bytes(self, ASCII_LOWER_MAP) } unsafe { str_map_bytes(self, ASCII_LOWER_MAP) }
} }
} }
#[inline] #[inline]
unsafe fn str_map_bytes(string: ~str, map: &'static [u8]) -> ~str { unsafe fn str_map_bytes(string: StrBuf, map: &'static [u8]) -> StrBuf {
let mut bytes = string.into_bytes(); let mut bytes = string.into_bytes();
for b in bytes.mut_iter() { for b in bytes.mut_iter() {
*b = map[*b as uint]; *b = map[*b as uint];
} }
str::raw::from_utf8_owned(bytes) str::from_utf8(bytes.as_slice()).unwrap().to_strbuf()
} }
#[inline] #[inline]
unsafe fn str_copy_map_bytes(string: &str, map: &'static [u8]) -> ~str { unsafe fn str_copy_map_bytes(string: &str, map: &'static [u8]) -> StrBuf {
let mut s = string.to_owned(); let mut s = string.to_owned();
for b in str::raw::as_owned_vec(&mut s).mut_iter() { for b in str::raw::as_owned_vec(&mut s).mut_iter() {
*b = map[*b as uint]; *b = map[*b as uint];
} }
s s.into_strbuf()
} }
static ASCII_LOWER_MAP: &'static [u8] = &[ static ASCII_LOWER_MAP: &'static [u8] = &[
@ -552,15 +552,17 @@ mod tests {
assert_eq!("( ;".to_ascii(), v2ascii!([40, 32, 59])); assert_eq!("( ;".to_ascii(), v2ascii!([40, 32, 59]));
// FIXME: #5475 borrowchk error, owned vectors do not live long enough // FIXME: #5475 borrowchk error, owned vectors do not live long enough
// if chained-from directly // if chained-from directly
let v = box [40u8, 32u8, 59u8]; assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59])); let v = box [40u8, 32u8, 59u8];
let v = "( ;".to_owned(); assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59])); assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59]));
let v = "( ;".to_strbuf();
assert_eq!(v.as_slice().to_ascii(), v2ascii!([40, 32, 59]));
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_owned()); assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_strbuf());
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_owned()); assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_strbuf());
assert_eq!("".to_ascii().to_lower().into_str(), "".to_owned()); assert_eq!("".to_ascii().to_lower().into_str(), "".to_strbuf());
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_owned()); assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_strbuf());
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_owned()); assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_strbuf());
assert!("aBcDeF&?#".to_ascii().eq_ignore_case("AbCdEf&?#".to_ascii())); assert!("aBcDeF&?#".to_ascii().eq_ignore_case("AbCdEf&?#".to_ascii()));
@ -572,16 +574,16 @@ mod tests {
#[test] #[test]
fn test_ascii_vec_ng() { fn test_ascii_vec_ng() {
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_owned()); assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_strbuf());
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_owned()); assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_strbuf());
assert_eq!("".to_ascii().to_lower().into_str(), "".to_owned()); assert_eq!("".to_ascii().to_lower().into_str(), "".to_strbuf());
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_owned()); assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_strbuf());
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_owned()); assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_strbuf());
} }
#[test] #[test]
fn test_owned_ascii_vec() { fn test_owned_ascii_vec() {
assert_eq!(("( ;".to_owned()).into_ascii(), vec2ascii![40, 32, 59]); assert_eq!(("( ;".to_strbuf()).into_ascii(), vec2ascii![40, 32, 59]);
assert_eq!((box [40u8, 32u8, 59u8]).into_ascii(), vec2ascii![40, 32, 59]); assert_eq!((box [40u8, 32u8, 59u8]).into_ascii(), vec2ascii![40, 32, 59]);
} }
@ -593,8 +595,8 @@ mod tests {
#[test] #[test]
fn test_ascii_into_str() { fn test_ascii_into_str() {
assert_eq!(vec2ascii![40, 32, 59].into_str(), "( ;".to_owned()); assert_eq!(vec2ascii![40, 32, 59].into_str(), "( ;".to_strbuf());
assert_eq!(vec2ascii!(40, 32, 59).into_str(), "( ;".to_owned()); assert_eq!(vec2ascii!(40, 32, 59).into_str(), "( ;".to_strbuf());
} }
#[test] #[test]
@ -641,70 +643,70 @@ mod tests {
assert_eq!((vec![40u8, 32u8, 59u8]).into_ascii_opt(), Some(vec2ascii![40, 32, 59])); assert_eq!((vec![40u8, 32u8, 59u8]).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
assert_eq!((vec![127u8, 128u8, 255u8]).into_ascii_opt(), None); assert_eq!((vec![127u8, 128u8, 255u8]).into_ascii_opt(), None);
assert_eq!(("( ;".to_owned()).into_ascii_opt(), Some(vec2ascii![40, 32, 59])); assert_eq!(("( ;".to_strbuf()).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
assert_eq!(("zoä华".to_owned()).into_ascii_opt(), None); assert_eq!(("zoä华".to_strbuf()).into_ascii_opt(), None);
} }
#[test] #[test]
fn test_to_ascii_upper() { fn test_to_ascii_upper() {
assert_eq!("url()URL()uRl()ürl".to_ascii_upper(), "URL()URL()URL()üRL".to_owned()); assert_eq!("url()URL()uRl()ürl".to_ascii_upper(), "URL()URL()URL()üRL".to_strbuf());
assert_eq!("hıß".to_ascii_upper(), "Hıß".to_owned()); assert_eq!("hıß".to_ascii_upper(), "Hıß".to_strbuf());
let mut i = 0; let mut i = 0;
while i <= 500 { while i <= 500 {
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 } let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
else { i }; else { i };
assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_upper(), assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_upper(),
from_char(from_u32(upper).unwrap())) from_char(from_u32(upper).unwrap()).to_strbuf())
i += 1; i += 1;
} }
} }
#[test] #[test]
fn test_to_ascii_lower() { fn test_to_ascii_lower() {
assert_eq!("url()URL()uRl()Ürl".to_ascii_lower(), "url()url()url()Ürl".to_owned()); assert_eq!("url()URL()uRl()Ürl".to_ascii_lower(), "url()url()url()Ürl".to_strbuf());
// Dotted capital I, Kelvin sign, Sharp S. // Dotted capital I, Kelvin sign, Sharp S.
assert_eq!("ß".to_ascii_lower(), "ß".to_owned()); assert_eq!("ß".to_ascii_lower(), "ß".to_strbuf());
let mut i = 0; let mut i = 0;
while i <= 500 { while i <= 500 {
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 } let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
else { i }; else { i };
assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_lower(), assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_lower(),
from_char(from_u32(lower).unwrap())) from_char(from_u32(lower).unwrap()).to_strbuf())
i += 1; i += 1;
} }
} }
#[test] #[test]
fn test_into_ascii_upper() { fn test_into_ascii_upper() {
assert_eq!(("url()URL()uRl()ürl".to_owned()).into_ascii_upper(), assert_eq!(("url()URL()uRl()ürl".to_strbuf()).into_ascii_upper(),
"URL()URL()URL()üRL".to_owned()); "URL()URL()URL()üRL".to_strbuf());
assert_eq!(("hıß".to_owned()).into_ascii_upper(), "Hıß".to_owned()); assert_eq!(("hıß".to_strbuf()).into_ascii_upper(), "Hıß".to_strbuf());
let mut i = 0; let mut i = 0;
while i <= 500 { while i <= 500 {
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 } let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
else { i }; else { i };
assert_eq!(from_char(from_u32(i).unwrap()).into_ascii_upper(), assert_eq!(from_char(from_u32(i).unwrap()).to_strbuf().into_ascii_upper(),
from_char(from_u32(upper).unwrap())) from_char(from_u32(upper).unwrap()).to_strbuf())
i += 1; i += 1;
} }
} }
#[test] #[test]
fn test_into_ascii_lower() { fn test_into_ascii_lower() {
assert_eq!(("url()URL()uRl()Ürl".to_owned()).into_ascii_lower(), assert_eq!(("url()URL()uRl()Ürl".to_strbuf()).into_ascii_lower(),
"url()url()url()Ürl".to_owned()); "url()url()url()Ürl".to_strbuf());
// Dotted capital I, Kelvin sign, Sharp S. // Dotted capital I, Kelvin sign, Sharp S.
assert_eq!(("ß".to_owned()).into_ascii_lower(), "ß".to_owned()); assert_eq!(("ß".to_strbuf()).into_ascii_lower(), "ß".to_strbuf());
let mut i = 0; let mut i = 0;
while i <= 500 { while i <= 500 {
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 } let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
else { i }; else { i };
assert_eq!(from_char(from_u32(i).unwrap()).into_ascii_lower(), assert_eq!(from_char(from_u32(i).unwrap()).to_strbuf().into_ascii_lower(),
from_char(from_u32(lower).unwrap())) from_char(from_u32(lower).unwrap()).to_strbuf())
i += 1; i += 1;
} }
} }
@ -724,8 +726,11 @@ mod tests {
let c = i; let c = i;
let lower = if 'A' as u32 <= c && c <= 'Z' as u32 { c + 'a' as u32 - 'A' as u32 } let lower = if 'A' as u32 <= c && c <= 'Z' as u32 { c + 'a' as u32 - 'A' as u32 }
else { c }; else { c };
assert!(from_char(from_u32(i).unwrap()). assert!(from_char(from_u32(i).unwrap()).as_slice()
eq_ignore_ascii_case(from_char(from_u32(lower).unwrap()))); .eq_ignore_ascii_case(
from_char(
from_u32(lower)
.unwrap())));
i += 1; i += 1;
} }
} }
@ -733,12 +738,12 @@ mod tests {
#[test] #[test]
fn test_to_str() { fn test_to_str() {
let s = Ascii{ chr: 't' as u8 }.to_str(); let s = Ascii{ chr: 't' as u8 }.to_str();
assert_eq!(s, "t".to_owned()); assert_eq!(s, "t".to_strbuf());
} }
#[test] #[test]
fn test_show() { fn test_show() {
let c = Ascii { chr: 't' as u8 }; let c = Ascii { chr: 't' as u8 };
assert_eq!(format!("{}", c), "t".to_owned()); assert_eq!(format_strbuf!("{}", c), "t".to_strbuf());
} }
} }

View file

@ -990,7 +990,7 @@ mod test {
pub fn stress_factor() -> uint { pub fn stress_factor() -> uint {
match os::getenv("RUST_TEST_STRESS") { match os::getenv("RUST_TEST_STRESS") {
Some(val) => from_str::<uint>(val).unwrap(), Some(val) => from_str::<uint>(val.as_slice()).unwrap(),
None => 1, None => 1,
} }
} }
@ -1523,7 +1523,7 @@ mod sync_tests {
pub fn stress_factor() -> uint { pub fn stress_factor() -> uint {
match os::getenv("RUST_TEST_STRESS") { match os::getenv("RUST_TEST_STRESS") {
Some(val) => from_str::<uint>(val).unwrap(), Some(val) => from_str::<uint>(val.as_slice()).unwrap(),
None => 1, None => 1,
} }
} }

View file

@ -27,9 +27,9 @@ general case.
The `format!` macro is intended to be familiar to those coming from C's The `format!` macro is intended to be familiar to those coming from C's
printf/fprintf functions or Python's `str.format` function. In its current printf/fprintf functions or Python's `str.format` function. In its current
revision, the `format!` macro returns a `~str` type which is the result of the revision, the `format!` macro returns a `StrBuf` type which is the result of
formatting. In the future it will also be able to pass in a stream to format the formatting. In the future it will also be able to pass in a stream to
arguments directly while performing minimal allocations. format arguments directly while performing minimal allocations.
Some examples of the `format!` extension are: Some examples of the `format!` extension are:
@ -282,7 +282,7 @@ use std::io;
# #[allow(unused_must_use)] # #[allow(unused_must_use)]
# fn main() { # fn main() {
format_args!(fmt::format, "this returns {}", "~str"); format_args!(fmt::format, "this returns {}", "StrBuf");
let some_writer: &mut io::Writer = &mut io::stdout(); let some_writer: &mut io::Writer = &mut io::stdout();
format_args!(|args| { write!(some_writer, "{}", args) }, "print with a {}", "closure"); format_args!(|args| { write!(some_writer, "{}", args) }, "print with a {}", "closure");
@ -488,7 +488,7 @@ use io;
use option::None; use option::None;
use repr; use repr;
use result::{Ok, Err}; use result::{Ok, Err};
use str::{StrAllocating}; use str::{Str, StrAllocating};
use str; use str;
use strbuf::StrBuf; use strbuf::StrBuf;
use slice::Vector; use slice::Vector;
@ -545,10 +545,10 @@ pub trait Poly {
/// let s = format_args!(fmt::format, "Hello, {}!", "world"); /// let s = format_args!(fmt::format, "Hello, {}!", "world");
/// assert_eq!(s, "Hello, world!".to_owned()); /// assert_eq!(s, "Hello, world!".to_owned());
/// ``` /// ```
pub fn format(args: &Arguments) -> ~str { pub fn format(args: &Arguments) -> StrBuf{
let mut output = io::MemWriter::new(); let mut output = io::MemWriter::new();
let _ = write!(&mut output, "{}", args); let _ = write!(&mut output, "{}", args);
str::from_utf8(output.unwrap().as_slice()).unwrap().to_owned() str::from_utf8(output.unwrap().as_slice()).unwrap().into_strbuf()
} }
/// Temporary transition utility /// Temporary transition utility
@ -572,7 +572,7 @@ impl<T> Poly for T {
// this allocation of a new string // this allocation of a new string
_ => { _ => {
let s = repr::repr_to_str(self); let s = repr::repr_to_str(self);
f.pad(s) f.pad(s.as_slice())
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show more