libstd: Remove ~str
from all libstd
modules except fmt
and str
.
This commit is contained in:
parent
e402e75f4e
commit
36195eb91f
204 changed files with 2102 additions and 1496 deletions
|
@ -96,7 +96,7 @@ pub fn parse_config(args: Vec<StrBuf> ) -> Config {
|
|||
let args_ = args.tail();
|
||||
if args.get(1).as_slice() == "-h" || args.get(1).as_slice() == "--help" {
|
||||
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
|
||||
println!("{}", getopts::usage(message, groups.as_slice()));
|
||||
println!("{}", getopts::usage(message.as_slice(), groups.as_slice()));
|
||||
println!("");
|
||||
fail!()
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ pub fn parse_config(args: Vec<StrBuf> ) -> Config {
|
|||
|
||||
if matches.opt_present("h") || matches.opt_present("help") {
|
||||
let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
|
||||
println!("{}", getopts::usage(message, groups.as_slice()));
|
||||
println!("{}", getopts::usage(message.as_slice(), groups.as_slice()));
|
||||
println!("");
|
||||
fail!()
|
||||
}
|
||||
|
|
|
@ -157,9 +157,14 @@ fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
|
|||
// module or function. This doesn't seem to be an optimization
|
||||
// with a warm page cache. Maybe with a cold one.
|
||||
let ln = ln.unwrap();
|
||||
if ln.starts_with("fn") || ln.starts_with("mod") {
|
||||
if ln.as_slice().starts_with("fn") ||
|
||||
ln.as_slice().starts_with("mod") {
|
||||
return true;
|
||||
} else { if !(it(ln.trim())) { return false; } }
|
||||
} else {
|
||||
if !(it(ln.as_slice().trim())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -538,7 +538,8 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
|
|||
|
||||
// Set breakpoints on every line that contains the string "#break"
|
||||
for line in breakpoint_lines.iter() {
|
||||
script_str.push_str(format!("breakpoint set --line {}\n", line));
|
||||
script_str.push_str(format!("breakpoint set --line {}\n",
|
||||
line).as_slice());
|
||||
}
|
||||
|
||||
// Append the other commands
|
||||
|
@ -620,18 +621,18 @@ fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str)
|
|||
for line in reader.lines() {
|
||||
match line {
|
||||
Ok(line) => {
|
||||
if line.contains("#break") {
|
||||
if line.as_slice().contains("#break") {
|
||||
breakpoint_lines.push(counter);
|
||||
}
|
||||
|
||||
header::parse_name_value_directive(
|
||||
line,
|
||||
line.as_slice(),
|
||||
command_directive.to_strbuf()).map(|cmd| {
|
||||
commands.push(cmd)
|
||||
});
|
||||
|
||||
header::parse_name_value_directive(
|
||||
line,
|
||||
line.as_slice(),
|
||||
check_directive.to_strbuf()).map(|cmd| {
|
||||
check_lines.push(cmd)
|
||||
});
|
||||
|
|
|
@ -274,12 +274,13 @@ impl<'a> Parser<'a> {
|
|||
self.cur.next();
|
||||
}
|
||||
Some((_, other)) => {
|
||||
self.err(
|
||||
format!("expected `{}` but found `{}`", c, other));
|
||||
self.err(format!("expected `{}` but found `{}`",
|
||||
c,
|
||||
other).as_slice());
|
||||
}
|
||||
None => {
|
||||
self.err(
|
||||
format!("expected `{}` but string was terminated", c));
|
||||
self.err(format!("expected `{}` but string was terminated",
|
||||
c).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -307,7 +308,8 @@ impl<'a> Parser<'a> {
|
|||
Some((_, c @ '#')) | Some((_, c @ '{')) |
|
||||
Some((_, c @ '\\')) | Some((_, c @ '}')) => { c }
|
||||
Some((_, c)) => {
|
||||
self.err(format!("invalid escape character `{}`", c));
|
||||
self.err(format!("invalid escape character `{}`",
|
||||
c).as_slice());
|
||||
c
|
||||
}
|
||||
None => {
|
||||
|
@ -459,7 +461,7 @@ impl<'a> Parser<'a> {
|
|||
return None;
|
||||
}
|
||||
method => {
|
||||
self.err(format!("unknown method: `{}`", method));
|
||||
self.err(format!("unknown method: `{}`", method).as_slice());
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -526,7 +528,7 @@ impl<'a> Parser<'a> {
|
|||
let word = self.word();
|
||||
if word != "offset" {
|
||||
self.err(format!("expected `offset`, found `{}`",
|
||||
word));
|
||||
word).as_slice());
|
||||
} else {
|
||||
self.must_consume(':');
|
||||
match self.integer() {
|
||||
|
@ -566,7 +568,7 @@ impl<'a> Parser<'a> {
|
|||
"many" => Keyword(Many),
|
||||
word => {
|
||||
self.err(format!("unexpected plural selector `{}`",
|
||||
word));
|
||||
word).as_slice());
|
||||
if word == "" {
|
||||
break
|
||||
} else {
|
||||
|
|
|
@ -46,7 +46,7 @@ macro_rules! rtassert (
|
|||
|
||||
macro_rules! rtabort (
|
||||
($($arg:tt)*) => ( {
|
||||
::macros::abort(format!($($arg)*));
|
||||
::macros::abort(format!($($arg)*).as_slice());
|
||||
} )
|
||||
)
|
||||
|
||||
|
|
|
@ -147,7 +147,10 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
Some((err_pos, err_str)) => {
|
||||
let pos = expr.span.lo + syntax::codemap::Pos::from_uint(err_pos + 1);
|
||||
let span = syntax::codemap::mk_sp(pos,pos);
|
||||
cx.span_err(span, format!("invalid hex float literal in hexfloat!: {}", err_str));
|
||||
cx.span_err(span,
|
||||
format!("invalid hex float literal in hexfloat!: \
|
||||
{}",
|
||||
err_str).as_slice());
|
||||
return base::DummyResult::expr(sp);
|
||||
}
|
||||
_ => ()
|
||||
|
|
|
@ -302,8 +302,10 @@ pub fn mod_enabled(level: u32, module: &str) -> bool {
|
|||
enabled(level, module, unsafe { (*DIRECTIVES).iter() })
|
||||
}
|
||||
|
||||
fn enabled(level: u32, module: &str,
|
||||
iter: slice::Items<directive::LogDirective>) -> bool {
|
||||
fn enabled(level: u32,
|
||||
module: &str,
|
||||
iter: slice::Items<directive::LogDirective>)
|
||||
-> bool {
|
||||
// Search for the longest match, the vector is assumed to be pre-sorted.
|
||||
for directive in iter.rev() {
|
||||
match directive.name {
|
||||
|
@ -322,7 +324,7 @@ fn enabled(level: u32, module: &str,
|
|||
/// `Once` primitive (and this function is called from that primitive).
|
||||
fn init() {
|
||||
let mut directives = match os::getenv("RUST_LOG") {
|
||||
Some(spec) => directive::parse_logging_spec(spec),
|
||||
Some(spec) => directive::parse_logging_spec(spec.as_slice()),
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
|
|
|
@ -104,9 +104,10 @@ fn get_error(_: c_int) -> IoError {
|
|||
#[cfg(not(windows))]
|
||||
fn get_error(s: c_int) -> IoError {
|
||||
use std::io;
|
||||
use std::str::raw::from_c_str;
|
||||
|
||||
let err_str = unsafe { from_c_str(gai_strerror(s)) };
|
||||
let err_str = unsafe {
|
||||
CString::new(gai_strerror(s), false).as_str().unwrap().to_strbuf()
|
||||
};
|
||||
IoError {
|
||||
kind: io::OtherIoError,
|
||||
desc: "unable to resolve host",
|
||||
|
|
|
@ -604,7 +604,7 @@ impl_to_biguint!(u32, FromPrimitive::from_u32)
|
|||
impl_to_biguint!(u64, FromPrimitive::from_u64)
|
||||
|
||||
impl ToStrRadix for BigUint {
|
||||
fn to_str_radix(&self, radix: uint) -> ~str {
|
||||
fn to_str_radix(&self, radix: uint) -> StrBuf {
|
||||
assert!(1 < radix && radix <= 16);
|
||||
let (base, max_len) = get_radix_base(radix);
|
||||
if base == BigDigit::base {
|
||||
|
@ -627,15 +627,17 @@ impl ToStrRadix for BigUint {
|
|||
return result;
|
||||
}
|
||||
|
||||
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str {
|
||||
if v.is_empty() { return "0".to_owned() }
|
||||
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> StrBuf {
|
||||
if v.is_empty() {
|
||||
return "0".to_strbuf()
|
||||
}
|
||||
let mut s = StrBuf::with_capacity(v.len() * l);
|
||||
for n in v.iter().rev() {
|
||||
let ss = (*n as uint).to_str_radix(radix);
|
||||
s.push_str("0".repeat(l - ss.len()));
|
||||
s.push_str(ss);
|
||||
s.push_str(ss.as_slice());
|
||||
}
|
||||
s.as_slice().trim_left_chars('0').to_owned()
|
||||
s.as_slice().trim_left_chars('0').to_strbuf()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1209,11 +1211,11 @@ impl_to_bigint!(u64, FromPrimitive::from_u64)
|
|||
|
||||
impl ToStrRadix for BigInt {
|
||||
#[inline]
|
||||
fn to_str_radix(&self, radix: uint) -> ~str {
|
||||
fn to_str_radix(&self, radix: uint) -> StrBuf {
|
||||
match self.sign {
|
||||
Plus => self.data.to_str_radix(radix),
|
||||
Zero => "0".to_owned(),
|
||||
Minus => "-".to_owned() + self.data.to_str_radix(radix)
|
||||
Zero => "0".to_strbuf(),
|
||||
Minus => format_strbuf!("-{}", self.data.to_str_radix(radix)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -175,11 +175,15 @@ impl<T: fmt::Show + Num + Ord> fmt::Show for Complex<T> {
|
|||
}
|
||||
|
||||
impl<T: ToStrRadix + Num + Ord> ToStrRadix for Complex<T> {
|
||||
fn to_str_radix(&self, radix: uint) -> ~str {
|
||||
fn to_str_radix(&self, radix: uint) -> StrBuf {
|
||||
if self.im < Zero::zero() {
|
||||
format!("{}-{}i", self.re.to_str_radix(radix), (-self.im).to_str_radix(radix))
|
||||
format_strbuf!("{}-{}i",
|
||||
self.re.to_str_radix(radix),
|
||||
(-self.im).to_str_radix(radix))
|
||||
} else {
|
||||
format!("{}+{}i", self.re.to_str_radix(radix), self.im.to_str_radix(radix))
|
||||
format_strbuf!("{}+{}i",
|
||||
self.re.to_str_radix(radix),
|
||||
self.im.to_str_radix(radix))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -281,8 +281,10 @@ impl<T: fmt::Show> fmt::Show for Ratio<T> {
|
|||
}
|
||||
impl<T: ToStrRadix> ToStrRadix for Ratio<T> {
|
||||
/// Renders as `numer/denom` where the numbers are in base `radix`.
|
||||
fn to_str_radix(&self, radix: uint) -> ~str {
|
||||
format!("{}/{}", self.numer.to_str_radix(radix), self.denom.to_str_radix(radix))
|
||||
fn to_str_radix(&self, radix: uint) -> StrBuf {
|
||||
format_strbuf!("{}/{}",
|
||||
self.numer.to_str_radix(radix),
|
||||
self.denom.to_str_radix(radix))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -278,7 +278,10 @@ impl<'a> Parser<'a> {
|
|||
fn noteof(&mut self, expected: &str) -> Result<(), Error> {
|
||||
match self.next_char() {
|
||||
true => Ok(()),
|
||||
false => self.err(format!("Expected {} but got EOF.", expected)),
|
||||
false => {
|
||||
self.err(format!("Expected {} but got EOF.",
|
||||
expected).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -286,8 +289,11 @@ impl<'a> Parser<'a> {
|
|||
match self.next_char() {
|
||||
true if self.cur() == expected => Ok(()),
|
||||
true => self.err(format!("Expected '{}' but got '{}'.",
|
||||
expected, self.cur())),
|
||||
false => self.err(format!("Expected '{}' but got EOF.", expected)),
|
||||
expected, self.cur()).as_slice()),
|
||||
false => {
|
||||
self.err(format!("Expected '{}' but got EOF.",
|
||||
expected).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -429,8 +435,10 @@ impl<'a> Parser<'a> {
|
|||
try!(self.noteof("not a ']'"))
|
||||
let c2 = self.cur();
|
||||
if c2 < c {
|
||||
return self.err(format!(
|
||||
"Invalid character class range '{}-{}'", c, c2))
|
||||
return self.err(format!("Invalid character class \
|
||||
range '{}-{}'",
|
||||
c,
|
||||
c2).as_slice())
|
||||
}
|
||||
ranges.push((c, self.cur()))
|
||||
} else {
|
||||
|
@ -491,9 +499,12 @@ impl<'a> Parser<'a> {
|
|||
let closer =
|
||||
match self.pos('}') {
|
||||
Some(i) => i,
|
||||
None => return self.err(format!(
|
||||
"No closing brace for counted repetition starting at \
|
||||
position {}.", start)),
|
||||
None => {
|
||||
return self.err(format!("No closing brace for counted \
|
||||
repetition starting at position \
|
||||
{}.",
|
||||
start).as_slice())
|
||||
}
|
||||
};
|
||||
self.chari = closer;
|
||||
let greed = try!(self.get_next_greedy());
|
||||
|
@ -525,19 +536,19 @@ impl<'a> Parser<'a> {
|
|||
if min > MAX_REPEAT {
|
||||
return self.err(format!(
|
||||
"{} exceeds maximum allowed repetitions ({})",
|
||||
min, MAX_REPEAT));
|
||||
min, MAX_REPEAT).as_slice());
|
||||
}
|
||||
if max.is_some() {
|
||||
let m = max.unwrap();
|
||||
if m > MAX_REPEAT {
|
||||
return self.err(format!(
|
||||
"{} exceeds maximum allowed repetitions ({})",
|
||||
m, MAX_REPEAT));
|
||||
m, MAX_REPEAT).as_slice());
|
||||
}
|
||||
if m < min {
|
||||
return self.err(format!(
|
||||
"Max repetitions ({}) cannot be smaller than min \
|
||||
repetitions ({}).", m, min));
|
||||
repetitions ({}).", m, min).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -600,7 +611,10 @@ impl<'a> Parser<'a> {
|
|||
if c.is_uppercase() { flags |= FLAG_NEGATED }
|
||||
Ok(Class(ranges, flags))
|
||||
}
|
||||
_ => self.err(format!("Invalid escape sequence '\\\\{}'", c)),
|
||||
_ => {
|
||||
self.err(format!("Invalid escape sequence '\\\\{}'",
|
||||
c).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -619,7 +633,7 @@ impl<'a> Parser<'a> {
|
|||
Some(i) => i,
|
||||
None => return self.err(format!(
|
||||
"Missing '\\}' for unclosed '\\{' at position {}",
|
||||
self.chari)),
|
||||
self.chari).as_slice()),
|
||||
};
|
||||
if closer - self.chari + 1 == 0 {
|
||||
return self.err("No Unicode class name found.")
|
||||
|
@ -634,8 +648,10 @@ impl<'a> Parser<'a> {
|
|||
self.chari += 1;
|
||||
}
|
||||
match find_class(UNICODE_CLASSES, name.as_slice()) {
|
||||
None => return self.err(format!(
|
||||
"Could not find Unicode class '{}'", name)),
|
||||
None => {
|
||||
return self.err(format!("Could not find Unicode class '{}'",
|
||||
name).as_slice())
|
||||
}
|
||||
Some(ranges) => {
|
||||
Ok(Class(ranges, negated | (self.flags & FLAG_NOCASE)))
|
||||
}
|
||||
|
@ -659,8 +675,10 @@ impl<'a> Parser<'a> {
|
|||
let s = self.slice(start, end);
|
||||
match num::from_str_radix::<u32>(s.as_slice(), 8) {
|
||||
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
|
||||
None => self.err(format!(
|
||||
"Could not parse '{}' as octal number.", s)),
|
||||
None => {
|
||||
self.err(format!("Could not parse '{}' as octal number.",
|
||||
s).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -674,8 +692,11 @@ impl<'a> Parser<'a> {
|
|||
let start = self.chari + 2;
|
||||
let closer =
|
||||
match self.pos('}') {
|
||||
None => return self.err(format!(
|
||||
"Missing '\\}' for unclosed '\\{' at position {}", start)),
|
||||
None => {
|
||||
return self.err(format!("Missing '\\}' for unclosed \
|
||||
'\\{' at position {}",
|
||||
start).as_slice())
|
||||
}
|
||||
Some(i) => i,
|
||||
};
|
||||
self.chari = closer;
|
||||
|
@ -689,7 +710,8 @@ impl<'a> Parser<'a> {
|
|||
fn parse_hex_two(&mut self) -> Result<Ast, Error> {
|
||||
let (start, end) = (self.chari, self.chari + 2);
|
||||
let bad = self.slice(start - 2, self.chars.len());
|
||||
try!(self.noteof(format!("Invalid hex escape sequence '{}'", bad)))
|
||||
try!(self.noteof(format!("Invalid hex escape sequence '{}'",
|
||||
bad).as_slice()))
|
||||
self.parse_hex_digits(self.slice(start, end).as_slice())
|
||||
}
|
||||
|
||||
|
@ -697,8 +719,10 @@ impl<'a> Parser<'a> {
|
|||
fn parse_hex_digits(&self, s: &str) -> Result<Ast, Error> {
|
||||
match num::from_str_radix::<u32>(s, 16) {
|
||||
Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)),
|
||||
None => self.err(format!(
|
||||
"Could not parse '{}' as hex number.", s)),
|
||||
None => {
|
||||
self.err(format!("Could not parse '{}' as hex number.",
|
||||
s).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -722,7 +746,8 @@ impl<'a> Parser<'a> {
|
|||
"Capture names can only have underscores, letters and digits.")
|
||||
}
|
||||
if self.names.contains(&name) {
|
||||
return self.err(format!("Duplicate capture group name '{}'.", name))
|
||||
return self.err(format!("Duplicate capture group name '{}'.",
|
||||
name).as_slice())
|
||||
}
|
||||
self.names.push(name.clone());
|
||||
self.chari = closer;
|
||||
|
@ -754,7 +779,7 @@ impl<'a> Parser<'a> {
|
|||
if sign < 0 {
|
||||
return self.err(format!(
|
||||
"Cannot negate flags twice in '{}'.",
|
||||
self.slice(start, self.chari + 1)))
|
||||
self.slice(start, self.chari + 1)).as_slice())
|
||||
}
|
||||
sign = -1;
|
||||
saw_flag = false;
|
||||
|
@ -765,7 +790,7 @@ impl<'a> Parser<'a> {
|
|||
if !saw_flag {
|
||||
return self.err(format!(
|
||||
"A valid flag does not follow negation in '{}'",
|
||||
self.slice(start, self.chari + 1)))
|
||||
self.slice(start, self.chari + 1)).as_slice())
|
||||
}
|
||||
flags = flags ^ flags;
|
||||
}
|
||||
|
@ -777,7 +802,7 @@ impl<'a> Parser<'a> {
|
|||
return Ok(())
|
||||
}
|
||||
_ => return self.err(format!(
|
||||
"Unrecognized flag '{}'.", self.cur())),
|
||||
"Unrecognized flag '{}'.", self.cur()).as_slice()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -871,16 +896,21 @@ impl<'a> Parser<'a> {
|
|||
fn parse_uint(&self, s: &str) -> Result<uint, Error> {
|
||||
match from_str::<uint>(s) {
|
||||
Some(i) => Ok(i),
|
||||
None => self.err(format!(
|
||||
"Expected an unsigned integer but got '{}'.", s)),
|
||||
None => {
|
||||
self.err(format!("Expected an unsigned integer but got '{}'.",
|
||||
s).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn char_from_u32(&self, n: u32) -> Result<char, Error> {
|
||||
match char::from_u32(n) {
|
||||
Some(c) => Ok(c),
|
||||
None => self.err(format!(
|
||||
"Could not decode '{}' to unicode character.", n)),
|
||||
None => {
|
||||
self.err(format!("Could not decode '{}' to unicode \
|
||||
character.",
|
||||
n).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ fn native(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree])
|
|||
let re = match Regex::new(regex.to_owned()) {
|
||||
Ok(re) => re,
|
||||
Err(err) => {
|
||||
cx.span_err(sp, err.to_str());
|
||||
cx.span_err(sp, err.to_str().as_slice());
|
||||
return DummyResult::any(sp)
|
||||
}
|
||||
};
|
||||
|
@ -612,7 +612,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<StrBuf> {
|
|||
_ => {
|
||||
cx.span_err(entry.span, format!(
|
||||
"expected string literal but got `{}`",
|
||||
pprust::lit_to_str(lit)));
|
||||
pprust::lit_to_str(lit)).as_slice());
|
||||
return None
|
||||
}
|
||||
}
|
||||
|
@ -620,7 +620,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<StrBuf> {
|
|||
_ => {
|
||||
cx.span_err(entry.span, format!(
|
||||
"expected string literal but got `{}`",
|
||||
pprust::expr_to_str(entry)));
|
||||
pprust::expr_to_str(entry)).as_slice());
|
||||
return None
|
||||
}
|
||||
};
|
||||
|
|
|
@ -56,17 +56,24 @@ fn run_ar(sess: &Session, args: &str, cwd: Option<&Path>,
|
|||
Ok(prog) => {
|
||||
let o = prog.wait_with_output().unwrap();
|
||||
if !o.status.success() {
|
||||
sess.err(format!("{} failed with: {}", cmd, o.status));
|
||||
sess.err(format!("{} failed with: {}",
|
||||
cmd,
|
||||
o.status).as_slice());
|
||||
sess.note(format!("stdout ---\n{}",
|
||||
str::from_utf8(o.output.as_slice()).unwrap()));
|
||||
str::from_utf8(o.output
|
||||
.as_slice()).unwrap())
|
||||
.as_slice());
|
||||
sess.note(format!("stderr ---\n{}",
|
||||
str::from_utf8(o.error.as_slice()).unwrap()));
|
||||
str::from_utf8(o.error
|
||||
.as_slice()).unwrap())
|
||||
.as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
o
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec `{}`: {}", ar.as_slice(), e));
|
||||
sess.err(format!("could not exec `{}`: {}", ar.as_slice(),
|
||||
e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
fail!("rustc::back::archive::run_ar() should not reach this point");
|
||||
}
|
||||
|
@ -158,7 +165,7 @@ impl<'a> Archive<'a> {
|
|||
if skip.iter().any(|s| *s == filename) { continue }
|
||||
if filename.contains(".SYMDEF") { continue }
|
||||
|
||||
let filename = format!("r-{}-{}", name, filename);
|
||||
let filename = format_strbuf!("r-{}-{}", name, filename);
|
||||
let new_filename = file.with_filename(filename);
|
||||
try!(fs::rename(file, &new_filename));
|
||||
inputs.push(new_filename);
|
||||
|
@ -178,8 +185,8 @@ impl<'a> Archive<'a> {
|
|||
};
|
||||
// On Windows, static libraries sometimes show up as libfoo.a and other
|
||||
// times show up as foo.lib
|
||||
let oslibname = format!("{}{}.{}", osprefix, name, osext);
|
||||
let unixlibname = format!("lib{}.a", name);
|
||||
let oslibname = format_strbuf!("{}{}.{}", osprefix, name, osext);
|
||||
let unixlibname = format_strbuf!("lib{}.a", name);
|
||||
|
||||
let mut rustpath = filesearch::rust_path();
|
||||
rustpath.push(self.sess.target_filesearch().get_lib_path());
|
||||
|
@ -194,7 +201,8 @@ impl<'a> Archive<'a> {
|
|||
}
|
||||
}
|
||||
self.sess.fatal(format!("could not find native static library `{}`, \
|
||||
perhaps an -L flag is missing?", name));
|
||||
perhaps an -L flag is missing?",
|
||||
name).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -167,7 +167,9 @@ pub mod write {
|
|||
"dynamic-no-pic" => lib::llvm::RelocDynamicNoPic,
|
||||
_ => {
|
||||
sess.err(format!("{} is not a valid relocation mode",
|
||||
sess.opts.cg.relocation_model));
|
||||
sess.opts
|
||||
.cg
|
||||
.relocation_model).as_slice());
|
||||
sess.abort_if_errors();
|
||||
return;
|
||||
}
|
||||
|
@ -219,7 +221,8 @@ pub mod write {
|
|||
for pass in sess.opts.cg.passes.iter() {
|
||||
pass.as_slice().with_c_str(|s| {
|
||||
if !llvm::LLVMRustAddPass(mpm, s) {
|
||||
sess.warn(format!("unknown pass {}, ignoring", *pass));
|
||||
sess.warn(format!("unknown pass {}, ignoring",
|
||||
*pass).as_slice());
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -360,8 +363,10 @@ pub mod write {
|
|||
match cmd.output() {
|
||||
Ok(prog) => {
|
||||
if !prog.status.success() {
|
||||
sess.err(format!("linking with `{}` failed: {}", pname, prog.status));
|
||||
sess.note(format!("{}", &cmd));
|
||||
sess.err(format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status).as_slice());
|
||||
sess.note(format!("{}", &cmd).as_slice());
|
||||
let mut note = prog.error.clone();
|
||||
note.push_all(prog.output.as_slice());
|
||||
sess.note(str::from_utf8(note.as_slice()).unwrap().to_owned());
|
||||
|
@ -369,7 +374,9 @@ pub mod write {
|
|||
}
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec the linker `{}`: {}", pname, e));
|
||||
sess.err(format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
|
@ -666,7 +673,7 @@ pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
|
|||
|
||||
fn push(n: &mut StrBuf, s: &str) {
|
||||
let sani = sanitize(s);
|
||||
n.push_str(format!("{}{}", sani.len(), sani));
|
||||
n.push_str(format!("{}{}", sani.len(), sani).as_slice());
|
||||
}
|
||||
|
||||
// First, connect each component with <len, name> pairs.
|
||||
|
@ -774,7 +781,9 @@ fn remove(sess: &Session, path: &Path) {
|
|||
match fs::unlink(path) {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to remove {}: {}", path.display(), e));
|
||||
sess.err(format!("failed to remove {}: {}",
|
||||
path.display(),
|
||||
e).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -815,7 +824,7 @@ pub fn filename_for_input(sess: &Session, crate_type: config::CrateType,
|
|||
let libname = output_lib_filename(id);
|
||||
match crate_type {
|
||||
config::CrateTypeRlib => {
|
||||
out_filename.with_filename(format!("lib{}.rlib", libname))
|
||||
out_filename.with_filename(format_strbuf!("lib{}.rlib", libname))
|
||||
}
|
||||
config::CrateTypeDylib => {
|
||||
let (prefix, suffix) = match sess.targ_cfg.os {
|
||||
|
@ -825,10 +834,13 @@ pub fn filename_for_input(sess: &Session, crate_type: config::CrateType,
|
|||
abi::OsAndroid => (loader::ANDROID_DLL_PREFIX, loader::ANDROID_DLL_SUFFIX),
|
||||
abi::OsFreebsd => (loader::FREEBSD_DLL_PREFIX, loader::FREEBSD_DLL_SUFFIX),
|
||||
};
|
||||
out_filename.with_filename(format!("{}{}{}", prefix, libname, suffix))
|
||||
out_filename.with_filename(format_strbuf!("{}{}{}",
|
||||
prefix,
|
||||
libname,
|
||||
suffix))
|
||||
}
|
||||
config::CrateTypeStaticlib => {
|
||||
out_filename.with_filename(format!("lib{}.a", libname))
|
||||
out_filename.with_filename(format_strbuf!("lib{}.a", libname))
|
||||
}
|
||||
config::CrateTypeExecutable => out_filename.clone(),
|
||||
}
|
||||
|
@ -855,12 +867,14 @@ fn link_binary_output(sess: &Session,
|
|||
let obj_is_writeable = is_writeable(&obj_filename);
|
||||
let out_is_writeable = is_writeable(&out_filename);
|
||||
if !out_is_writeable {
|
||||
sess.fatal(format!("output file {} is not writeable -- check its permissions.",
|
||||
out_filename.display()));
|
||||
sess.fatal(format!("output file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
out_filename.display()).as_slice());
|
||||
}
|
||||
else if !obj_is_writeable {
|
||||
sess.fatal(format!("object file {} is not writeable -- check its permissions.",
|
||||
obj_filename.display()));
|
||||
sess.fatal(format!("object file {} is not writeable -- check its \
|
||||
permissions.",
|
||||
obj_filename.display()).as_slice());
|
||||
}
|
||||
|
||||
match crate_type {
|
||||
|
@ -936,7 +950,8 @@ fn link_rlib<'a>(sess: &'a Session,
|
|||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to write {}: {}",
|
||||
metadata.display(), e));
|
||||
metadata.display(),
|
||||
e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
|
@ -956,7 +971,9 @@ fn link_rlib<'a>(sess: &'a Session,
|
|||
}) {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to write compressed bytecode: {}", e));
|
||||
sess.err(format!("failed to write compressed bytecode: \
|
||||
{}",
|
||||
e).as_slice());
|
||||
sess.abort_if_errors()
|
||||
}
|
||||
}
|
||||
|
@ -1003,7 +1020,8 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
|||
let name = sess.cstore.get_crate_data(cnum).name.clone();
|
||||
let p = match *path {
|
||||
Some(ref p) => p.clone(), None => {
|
||||
sess.err(format!("could not find rlib for: `{}`", name));
|
||||
sess.err(format!("could not find rlib for: `{}`",
|
||||
name).as_slice());
|
||||
continue
|
||||
}
|
||||
};
|
||||
|
@ -1015,7 +1033,9 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
|||
cstore::NativeUnknown => "library",
|
||||
cstore::NativeFramework => "framework",
|
||||
};
|
||||
sess.warn(format!("unlinked native {}: {}", name, *lib));
|
||||
sess.warn(format!("unlinked native {}: {}",
|
||||
name,
|
||||
*lib).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1049,8 +1069,10 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
|||
match prog {
|
||||
Ok(prog) => {
|
||||
if !prog.status.success() {
|
||||
sess.err(format!("linking with `{}` failed: {}", pname, prog.status));
|
||||
sess.note(format!("{}", &cmd));
|
||||
sess.err(format!("linking with `{}` failed: {}",
|
||||
pname,
|
||||
prog.status).as_slice());
|
||||
sess.note(format!("{}", &cmd).as_slice());
|
||||
let mut output = prog.error.clone();
|
||||
output.push_all(prog.output.as_slice());
|
||||
sess.note(str::from_utf8(output.as_slice()).unwrap().to_owned());
|
||||
|
@ -1058,7 +1080,9 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
|||
}
|
||||
},
|
||||
Err(e) => {
|
||||
sess.err(format!("could not exec the linker `{}`: {}", pname, e));
|
||||
sess.err(format!("could not exec the linker `{}`: {}",
|
||||
pname,
|
||||
e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
|
@ -1070,7 +1094,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
|||
match Command::new("dsymutil").arg(out_filename).status() {
|
||||
Ok(..) => {}
|
||||
Err(e) => {
|
||||
sess.err(format!("failed to run dsymutil: {}", e));
|
||||
sess.err(format!("failed to run dsymutil: {}", e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
|
@ -1409,7 +1433,8 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
|||
// against the archive.
|
||||
if sess.lto() {
|
||||
let name = sess.cstore.get_crate_data(cnum).name.clone();
|
||||
time(sess.time_passes(), format!("altering {}.rlib", name),
|
||||
time(sess.time_passes(),
|
||||
format!("altering {}.rlib", name).as_slice(),
|
||||
(), |()| {
|
||||
let dst = tmpdir.join(cratepath.filename().unwrap());
|
||||
match fs::copy(&cratepath, &dst) {
|
||||
|
@ -1418,12 +1443,12 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
|||
sess.err(format!("failed to copy {} to {}: {}",
|
||||
cratepath.display(),
|
||||
dst.display(),
|
||||
e));
|
||||
e).as_slice());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
}
|
||||
let mut archive = Archive::open(sess, dst.clone());
|
||||
archive.remove_file(format!("{}.o", name));
|
||||
archive.remove_file(format!("{}.o", name).as_slice());
|
||||
let files = archive.files();
|
||||
if files.iter().any(|s| s.as_slice().ends_with(".o")) {
|
||||
cmd.arg(dst);
|
||||
|
|
|
@ -47,29 +47,46 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
|||
let path = match path {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
sess.fatal(format!("could not find rlib for: `{}`", name));
|
||||
sess.fatal(format!("could not find rlib for: `{}`",
|
||||
name).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
|
||||
debug!("reading {}", name);
|
||||
let bc = time(sess.time_passes(), format!("read {}.bc.deflate", name), (), |_|
|
||||
archive.read(format!("{}.bc.deflate", name)));
|
||||
let bc = time(sess.time_passes(),
|
||||
format!("read {}.bc.deflate", name).as_slice(),
|
||||
(),
|
||||
|_| {
|
||||
archive.read(format!("{}.bc.deflate",
|
||||
name).as_slice())
|
||||
});
|
||||
let bc = bc.expect("missing compressed bytecode in archive!");
|
||||
let bc = time(sess.time_passes(), format!("inflate {}.bc", name), (), |_|
|
||||
let bc = time(sess.time_passes(),
|
||||
format!("inflate {}.bc", name).as_slice(),
|
||||
(),
|
||||
|_| {
|
||||
match flate::inflate_bytes(bc) {
|
||||
Some(bc) => bc,
|
||||
None => sess.fatal(format!("failed to decompress bc of `{}`", name))
|
||||
None => {
|
||||
sess.fatal(format!("failed to decompress \
|
||||
bc of `{}`",
|
||||
name).as_slice())
|
||||
}
|
||||
}
|
||||
});
|
||||
let ptr = bc.as_slice().as_ptr();
|
||||
debug!("linking {}", name);
|
||||
time(sess.time_passes(), format!("ll link {}", name), (), |()| unsafe {
|
||||
time(sess.time_passes(),
|
||||
format!("ll link {}", name).as_slice(),
|
||||
(),
|
||||
|()| unsafe {
|
||||
if !llvm::LLVMRustLinkInExternalBitcode(llmod,
|
||||
ptr as *libc::c_char,
|
||||
bc.len() as libc::size_t) {
|
||||
link::llvm_err(sess,
|
||||
(format_strbuf!("failed to load bc of `{}`",
|
||||
name)));
|
||||
format_strbuf!("failed to load bc of `{}`",
|
||||
name.as_slice()));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -328,18 +328,23 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions
|
|||
if option_to_lookup.as_slice() != candidate { continue }
|
||||
if !setter(&mut cg, value) {
|
||||
match value {
|
||||
Some(..) => early_error(format!("codegen option `{}` takes \
|
||||
no value", key)),
|
||||
None => early_error(format!("codegen option `{0}` requires \
|
||||
Some(..) => {
|
||||
early_error(format!("codegen option `{}` takes no \
|
||||
value", key).as_slice())
|
||||
}
|
||||
None => {
|
||||
early_error(format!("codegen option `{0}` requires \
|
||||
a value (-C {0}=<value>)",
|
||||
key))
|
||||
key).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
if !found {
|
||||
early_error(format!("unknown codegen option: `{}`", key));
|
||||
early_error(format!("unknown codegen option: `{}`",
|
||||
key).as_slice());
|
||||
}
|
||||
}
|
||||
return cg;
|
||||
|
@ -570,7 +575,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
"staticlib" => CrateTypeStaticlib,
|
||||
"dylib" => CrateTypeDylib,
|
||||
"bin" => CrateTypeExecutable,
|
||||
_ => early_error(format!("unknown crate type: `{}`", part))
|
||||
_ => {
|
||||
early_error(format!("unknown crate type: `{}`",
|
||||
part).as_slice())
|
||||
}
|
||||
};
|
||||
crate_types.push(new_part)
|
||||
}
|
||||
|
@ -589,14 +597,17 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
|
||||
let level_short = level_name.slice_chars(0, 1);
|
||||
let level_short = level_short.to_ascii().to_upper().into_str();
|
||||
let flags = matches.opt_strs(level_short).move_iter().collect::<Vec<_>>().append(
|
||||
matches.opt_strs(level_name).as_slice());
|
||||
let flags = matches.opt_strs(level_short.as_slice())
|
||||
.move_iter()
|
||||
.collect::<Vec<_>>()
|
||||
.append(matches.opt_strs(level_name).as_slice());
|
||||
for lint_name in flags.iter() {
|
||||
let lint_name = lint_name.replace("-", "_");
|
||||
let lint_name = lint_name.replace("-", "_").into_strbuf();
|
||||
match lint_dict.find_equiv(&lint_name) {
|
||||
None => {
|
||||
early_error(format!("unknown {} flag: {}",
|
||||
level_name, lint_name));
|
||||
level_name,
|
||||
lint_name).as_slice());
|
||||
}
|
||||
Some(lint) => {
|
||||
lint_opts.push((lint.lint, *level));
|
||||
|
@ -618,7 +629,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
}
|
||||
}
|
||||
if this_bit == 0 {
|
||||
early_error(format!("unknown debug flag: {}", *debug_flag))
|
||||
early_error(format!("unknown debug flag: {}",
|
||||
*debug_flag).as_slice())
|
||||
}
|
||||
debugging_opts |= this_bit;
|
||||
}
|
||||
|
@ -638,7 +650,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
"bc" => link::OutputTypeBitcode,
|
||||
"obj" => link::OutputTypeObject,
|
||||
"link" => link::OutputTypeExe,
|
||||
_ => early_error(format!("unknown emission type: `{}`", part))
|
||||
_ => {
|
||||
early_error(format!("unknown emission type: `{}`",
|
||||
part).as_slice())
|
||||
}
|
||||
};
|
||||
output_types.push(output_type)
|
||||
}
|
||||
|
@ -671,8 +686,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
Some("2") => Default,
|
||||
Some("3") => Aggressive,
|
||||
Some(arg) => {
|
||||
early_error(format!("optimization level needs to be between 0-3 \
|
||||
(instead was `{}`)", arg));
|
||||
early_error(format!("optimization level needs to be \
|
||||
between 0-3 (instead was `{}`)",
|
||||
arg).as_slice());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -692,8 +708,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
None |
|
||||
Some("2") => FullDebugInfo,
|
||||
Some(arg) => {
|
||||
early_error(format!("optimization level needs to be between 0-3 \
|
||||
(instead was `{}`)", arg));
|
||||
early_error(format!("optimization level needs to be between \
|
||||
0-3 (instead was `{}`)",
|
||||
arg).as_slice());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -725,9 +742,11 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
|
||||
None => Auto,
|
||||
|
||||
Some(arg) => early_error(format!(
|
||||
"argument for --color must be auto, always or never (instead was `{}`)",
|
||||
arg))
|
||||
Some(arg) => {
|
||||
early_error(format!("argument for --color must be auto, always \
|
||||
or never (instead was `{}`)",
|
||||
arg).as_slice())
|
||||
}
|
||||
};
|
||||
|
||||
Options {
|
||||
|
|
|
@ -511,7 +511,7 @@ fn write_out_deps(sess: &Session,
|
|||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
sess.fatal(format!("error writing dependencies to `{}`: {}",
|
||||
deps_filename.display(), e));
|
||||
deps_filename.display(), e).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -705,7 +705,8 @@ fn print_flowgraph<W:io::Writer>(analysis: CrateAnalysis,
|
|||
let m = "graphviz::render failed";
|
||||
io::IoError {
|
||||
detail: Some(match orig_detail {
|
||||
None => m.into_owned(), Some(d) => format!("{}: {}", m, d)
|
||||
None => m.into_strbuf(),
|
||||
Some(d) => format_strbuf!("{}: {}", m, d)
|
||||
}),
|
||||
..ioerr
|
||||
}
|
||||
|
|
|
@ -120,7 +120,8 @@ Additional help:
|
|||
-C help Print codegen options
|
||||
-W help Print 'lint' options and default settings
|
||||
-Z help Print internal options for debugging rustc\n",
|
||||
getopts::usage(message, config::optgroups().as_slice()));
|
||||
getopts::usage(message.as_slice(),
|
||||
config::optgroups().as_slice()));
|
||||
}
|
||||
|
||||
fn describe_warnings() {
|
||||
|
@ -305,16 +306,18 @@ pub fn parse_pretty(sess: &Session, name: &str) -> PpMode {
|
|||
(arg, "flowgraph") => {
|
||||
match arg.and_then(from_str) {
|
||||
Some(id) => PpmFlowGraph(id),
|
||||
None => sess.fatal(format_strbuf!("`pretty flowgraph=<nodeid>` needs \
|
||||
None => {
|
||||
sess.fatal(format!("`pretty flowgraph=<nodeid>` needs \
|
||||
an integer <nodeid>; got {}",
|
||||
arg.unwrap_or("nothing")).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
sess.fatal(format!(
|
||||
"argument to `pretty` must be one of `normal`, \
|
||||
`expanded`, `flowgraph=<nodeid>`, `typed`, `identified`, \
|
||||
or `expanded,identified`; got {}", name));
|
||||
or `expanded,identified`; got {}", name).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -406,9 +409,13 @@ fn monitor(f: proc():Send) {
|
|||
|
||||
match r.read_to_str() {
|
||||
Ok(s) => println!("{}", s),
|
||||
Err(e) => emitter.emit(None,
|
||||
format!("failed to read internal stderr: {}", e),
|
||||
diagnostic::Error),
|
||||
Err(e) => {
|
||||
emitter.emit(None,
|
||||
format!("failed to read internal \
|
||||
stderr: {}",
|
||||
e).as_slice(),
|
||||
diagnostic::Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -141,7 +141,8 @@ impl Session {
|
|||
// This exists to help with refactoring to eliminate impossible
|
||||
// cases later on
|
||||
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
|
||||
self.span_bug(sp, format!("impossible case reached: {}", msg));
|
||||
self.span_bug(sp,
|
||||
format!("impossible case reached: {}", msg).as_slice());
|
||||
}
|
||||
pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) }
|
||||
pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) }
|
||||
|
|
|
@ -109,7 +109,7 @@ impl<'a> Context<'a> {
|
|||
self.sess.span_err(span, explain);
|
||||
self.sess.span_note(span, format!("add \\#![feature({})] to the \
|
||||
crate attributes to enable",
|
||||
feature));
|
||||
feature).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ pub fn with_version(krate: &str) -> Option<(InternedString, ast::StrStyle)> {
|
|||
_ => {
|
||||
Some((token::intern_and_get_ident(format!("{}\\#{}",
|
||||
krate,
|
||||
VERSION)),
|
||||
VERSION).as_slice()),
|
||||
ast::CookedStr))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -327,7 +327,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
|
|||
pub fn main() {
|
||||
#![main]
|
||||
use std::slice::Vector;
|
||||
test::test_main_static_x(::std::os::args().as_slice(), TESTS);
|
||||
test::test_main_static(::std::os::args().as_slice(), TESTS);
|
||||
}
|
||||
)).unwrap();
|
||||
|
||||
|
|
|
@ -91,7 +91,8 @@ fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
|
|||
for ((name, _), dupes) in map.move_iter() {
|
||||
if dupes.len() == 1 { continue }
|
||||
diag.handler().warn(
|
||||
format!("using multiple versions of crate `{}`", name));
|
||||
format!("using multiple versions of crate `{}`",
|
||||
name).as_slice());
|
||||
for dupe in dupes.move_iter() {
|
||||
let data = cstore.get_crate_data(dupe);
|
||||
diag.span_note(data.span, "used here");
|
||||
|
@ -161,7 +162,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
|
|||
Some(id) => id
|
||||
}
|
||||
}
|
||||
None => from_str(ident.get().to_str()).unwrap()
|
||||
None => from_str(ident.get().to_str().as_slice()).unwrap()
|
||||
};
|
||||
Some(CrateInfo {
|
||||
ident: ident.get().to_strbuf(),
|
||||
|
@ -224,7 +225,8 @@ fn visit_item(e: &Env, i: &ast::Item) {
|
|||
cstore::NativeUnknown
|
||||
} else {
|
||||
e.sess.span_err(m.span,
|
||||
format!("unknown kind: `{}`", k));
|
||||
format!("unknown kind: `{}`",
|
||||
k).as_slice());
|
||||
cstore::NativeUnknown
|
||||
}
|
||||
}
|
||||
|
@ -243,7 +245,9 @@ fn visit_item(e: &Env, i: &ast::Item) {
|
|||
}
|
||||
};
|
||||
if n.get().is_empty() {
|
||||
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name");
|
||||
e.sess.span_err(m.span,
|
||||
"#[link(name = \"\")] given with \
|
||||
empty name");
|
||||
} else {
|
||||
e.sess
|
||||
.cstore
|
||||
|
@ -425,7 +429,7 @@ impl<'a> CrateLoader for Loader<'a> {
|
|||
let message = format!("crate `{}` contains a macro_registrar fn but \
|
||||
only a version for triple `{}` could be found (need {})",
|
||||
info.ident, target_triple, driver::host_triple());
|
||||
self.env.sess.span_err(krate.span, message);
|
||||
self.env.sess.span_err(krate.span, message.as_slice());
|
||||
// need to abort now because the syntax expansion
|
||||
// code will shortly attempt to load and execute
|
||||
// code from the found library.
|
||||
|
|
|
@ -248,7 +248,7 @@ fn encode_symbol(ecx: &EncodeContext,
|
|||
}
|
||||
None => {
|
||||
ecx.diag.handler().bug(
|
||||
format!("encode_symbol: id not found {}", id));
|
||||
format!("encode_symbol: id not found {}", id).as_slice());
|
||||
}
|
||||
}
|
||||
ebml_w.end_tag();
|
||||
|
@ -375,7 +375,7 @@ fn encode_reexported_static_method(ebml_w: &mut Encoder,
|
|||
ebml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
ebml_w.wr_str(format!("{}::{}",
|
||||
exp.name,
|
||||
token::get_ident(method_ident)));
|
||||
token::get_ident(method_ident)).as_slice());
|
||||
ebml_w.end_tag();
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -1439,7 +1439,10 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
|
|||
attr::mk_attr_inner(
|
||||
attr::mk_name_value_item_str(
|
||||
InternedString::new("crate_id"),
|
||||
token::intern_and_get_ident(ecx.link_meta.crateid.to_str())))
|
||||
token::intern_and_get_ident(ecx.link_meta
|
||||
.crateid
|
||||
.to_str()
|
||||
.as_slice())))
|
||||
}
|
||||
|
||||
let mut attrs = Vec::new();
|
||||
|
|
|
@ -137,15 +137,17 @@ impl<'a> Context<'a> {
|
|||
&Some(ref r) => format!("{} which `{}` depends on",
|
||||
message, r.ident)
|
||||
};
|
||||
self.sess.span_err(self.span, message);
|
||||
self.sess.span_err(self.span, message.as_slice());
|
||||
|
||||
let mismatches = self.rejected_via_triple.iter();
|
||||
if self.rejected_via_triple.len() > 0 {
|
||||
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
|
||||
self.sess.span_note(self.span,
|
||||
format!("expected triple of {}",
|
||||
self.triple).as_slice());
|
||||
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}` path \\#{}, triple {}: {}",
|
||||
self.ident, i+1, got, path.display()));
|
||||
self.ident, i+1, got, path.display()).as_slice());
|
||||
}
|
||||
}
|
||||
if self.rejected_via_hash.len() > 0 {
|
||||
|
@ -155,7 +157,7 @@ impl<'a> Context<'a> {
|
|||
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}` path \\#{}: {}",
|
||||
self.ident, i+1, path.display()));
|
||||
self.ident, i+1, path.display()).as_slice());
|
||||
}
|
||||
match self.root {
|
||||
&None => {}
|
||||
|
@ -163,7 +165,7 @@ impl<'a> Context<'a> {
|
|||
for (i, path) in r.paths().iter().enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
format!("crate `{}` path \\#{}: {}",
|
||||
r.ident, i+1, path.display()));
|
||||
r.ident, i+1, path.display()).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -198,9 +200,10 @@ impl<'a> Context<'a> {
|
|||
None => return FileDoesntMatch,
|
||||
Some(file) => file,
|
||||
};
|
||||
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
|
||||
if file.starts_with(rlib_prefix.as_slice()) &&
|
||||
file.ends_with(".rlib") {
|
||||
info!("rlib candidate: {}", path.display());
|
||||
match self.try_match(file, rlib_prefix, ".rlib") {
|
||||
match self.try_match(file, rlib_prefix.as_slice(), ".rlib") {
|
||||
Some(hash) => {
|
||||
info!("rlib accepted, hash: {}", hash);
|
||||
let slot = candidates.find_or_insert_with(hash, |_| {
|
||||
|
@ -215,9 +218,12 @@ impl<'a> Context<'a> {
|
|||
FileDoesntMatch
|
||||
}
|
||||
}
|
||||
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
|
||||
} else if file.starts_with(dylib_prefix.as_slice()) &&
|
||||
file.ends_with(dysuffix){
|
||||
info!("dylib candidate: {}", path.display());
|
||||
match self.try_match(file, dylib_prefix, dysuffix) {
|
||||
match self.try_match(file,
|
||||
dylib_prefix.as_slice(),
|
||||
dysuffix) {
|
||||
Some(hash) => {
|
||||
info!("dylib accepted, hash: {}", hash);
|
||||
let slot = candidates.find_or_insert_with(hash, |_| {
|
||||
|
@ -271,18 +277,20 @@ impl<'a> Context<'a> {
|
|||
_ => {
|
||||
self.sess.span_err(self.span,
|
||||
format!("multiple matching crates for `{}`",
|
||||
self.crate_id.name));
|
||||
self.crate_id.name).as_slice());
|
||||
self.sess.note("candidates:");
|
||||
for lib in libraries.iter() {
|
||||
match lib.dylib {
|
||||
Some(ref p) => {
|
||||
self.sess.note(format!("path: {}", p.display()));
|
||||
self.sess.note(format!("path: {}",
|
||||
p.display()).as_slice());
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
match lib.rlib {
|
||||
Some(ref p) => {
|
||||
self.sess.note(format!("path: {}", p.display()));
|
||||
self.sess.note(format!("path: {}",
|
||||
p.display()).as_slice());
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
@ -375,10 +383,13 @@ impl<'a> Context<'a> {
|
|||
if ret.is_some() {
|
||||
self.sess.span_err(self.span,
|
||||
format!("multiple {} candidates for `{}` \
|
||||
found", flavor, self.crate_id.name));
|
||||
found",
|
||||
flavor,
|
||||
self.crate_id.name).as_slice());
|
||||
self.sess.span_note(self.span,
|
||||
format!(r"candidate \#1: {}",
|
||||
ret.get_ref().display()));
|
||||
ret.get_ref()
|
||||
.display()).as_slice());
|
||||
error = 1;
|
||||
ret = None;
|
||||
}
|
||||
|
@ -386,7 +397,7 @@ impl<'a> Context<'a> {
|
|||
error += 1;
|
||||
self.sess.span_note(self.span,
|
||||
format!(r"candidate \#{}: {}", error,
|
||||
lib.display()));
|
||||
lib.display()).as_slice());
|
||||
continue
|
||||
}
|
||||
*slot = Some(metadata);
|
||||
|
@ -450,7 +461,7 @@ impl<'a> Context<'a> {
|
|||
}
|
||||
|
||||
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
|
||||
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
|
||||
diag.handler().note(format!("crate_id: {}", crateid.to_str()).as_slice());
|
||||
}
|
||||
|
||||
impl ArchiveMetadata {
|
||||
|
|
|
@ -155,7 +155,10 @@ fn parse_trait_store(st: &mut PState, conv: conv_did) -> ty::TraitStore {
|
|||
match next(st) {
|
||||
'~' => ty::UniqTraitStore,
|
||||
'&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)),
|
||||
c => st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", c))
|
||||
c => {
|
||||
st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
|
||||
c).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1312,7 +1312,8 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
|
|||
match c::astencode_tag::from_uint(tag) {
|
||||
None => {
|
||||
xcx.dcx.tcx.sess.bug(
|
||||
format!("unknown tag found in side tables: {:x}", tag));
|
||||
format!("unknown tag found in side tables: {:x}",
|
||||
tag).as_slice());
|
||||
}
|
||||
Some(value) => {
|
||||
let val_doc = entry_doc.get(c::tag_table_val as uint);
|
||||
|
@ -1376,7 +1377,8 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
|
|||
}
|
||||
_ => {
|
||||
xcx.dcx.tcx.sess.bug(
|
||||
format!("unknown tag found in side tables: {:x}", tag));
|
||||
format!("unknown tag found in side tables: {:x}",
|
||||
tag).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -243,7 +243,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
if restr.loan_path != loan2.loan_path { continue; }
|
||||
|
||||
let old_pronoun = if new_loan.loan_path == old_loan.loan_path {
|
||||
"it".to_owned()
|
||||
"it".to_strbuf()
|
||||
} else {
|
||||
format!("`{}`",
|
||||
self.bccx.loan_path_to_str(&*old_loan.loan_path))
|
||||
|
@ -255,7 +255,8 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
new_loan.span,
|
||||
format!("cannot borrow `{}` as mutable \
|
||||
more than once at a time",
|
||||
self.bccx.loan_path_to_str(&*new_loan.loan_path)));
|
||||
self.bccx.loan_path_to_str(
|
||||
&*new_loan.loan_path)).as_slice());
|
||||
}
|
||||
|
||||
(ty::UniqueImmBorrow, _) => {
|
||||
|
@ -264,7 +265,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
format!("closure requires unique access to `{}` \
|
||||
but {} is already borrowed",
|
||||
self.bccx.loan_path_to_str(&*new_loan.loan_path),
|
||||
old_pronoun));
|
||||
old_pronoun).as_slice());
|
||||
}
|
||||
|
||||
(_, ty::UniqueImmBorrow) => {
|
||||
|
@ -273,7 +274,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
format!("cannot borrow `{}` as {} because \
|
||||
previous closure requires unique access",
|
||||
self.bccx.loan_path_to_str(&*new_loan.loan_path),
|
||||
new_loan.kind.to_user_str()));
|
||||
new_loan.kind.to_user_str()).as_slice());
|
||||
}
|
||||
|
||||
(_, _) => {
|
||||
|
@ -284,7 +285,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
self.bccx.loan_path_to_str(&*new_loan.loan_path),
|
||||
new_loan.kind.to_user_str(),
|
||||
old_pronoun,
|
||||
old_loan.kind.to_user_str()));
|
||||
old_loan.kind.to_user_str()).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -293,7 +294,8 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
self.bccx.span_note(
|
||||
span,
|
||||
format!("borrow occurs due to use of `{}` in closure",
|
||||
self.bccx.loan_path_to_str(&*new_loan.loan_path)));
|
||||
self.bccx.loan_path_to_str(
|
||||
&*new_loan.loan_path)).as_slice());
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
|
@ -303,7 +305,8 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
format!("the mutable borrow prevents subsequent \
|
||||
moves, borrows, or modification of `{0}` \
|
||||
until the borrow ends",
|
||||
self.bccx.loan_path_to_str(&*old_loan.loan_path))
|
||||
self.bccx.loan_path_to_str(
|
||||
&*old_loan.loan_path))
|
||||
}
|
||||
|
||||
ty::ImmBorrow => {
|
||||
|
@ -340,7 +343,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
|
||||
self.bccx.span_note(
|
||||
old_loan.span,
|
||||
format!("{}; {}", borrow_summary, rule_summary));
|
||||
format!("{}; {}", borrow_summary, rule_summary).as_slice());
|
||||
|
||||
let old_loan_span = self.tcx().map.span(old_loan.kill_scope);
|
||||
self.bccx.span_end_note(old_loan_span,
|
||||
|
@ -428,14 +431,14 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
format!("cannot assign to {} {} `{}`",
|
||||
cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_str(&*cmt),
|
||||
self.bccx.loan_path_to_str(&*lp)));
|
||||
self.bccx.loan_path_to_str(&*lp)).as_slice());
|
||||
}
|
||||
None => {
|
||||
self.bccx.span_err(
|
||||
expr.span,
|
||||
format!("cannot assign to {} {}",
|
||||
cmt.mutbl.to_user_str(),
|
||||
self.bccx.cmt_to_str(&*cmt)));
|
||||
self.bccx.cmt_to_str(&*cmt)).as_slice());
|
||||
}
|
||||
}
|
||||
return;
|
||||
|
@ -672,11 +675,11 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
self.bccx.span_err(
|
||||
expr.span,
|
||||
format!("cannot assign to `{}` because it is borrowed",
|
||||
self.bccx.loan_path_to_str(loan_path)));
|
||||
self.bccx.loan_path_to_str(loan_path)).as_slice());
|
||||
self.bccx.span_note(
|
||||
loan.span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_str(loan_path)));
|
||||
self.bccx.loan_path_to_str(loan_path)).as_slice());
|
||||
}
|
||||
|
||||
fn check_move_out_from_expr(&self, expr: &ast::Expr) {
|
||||
|
@ -702,11 +705,13 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
span,
|
||||
format!("cannot move out of `{}` \
|
||||
because it is borrowed",
|
||||
self.bccx.loan_path_to_str(move_path)));
|
||||
self.bccx.loan_path_to_str(
|
||||
move_path)).as_slice());
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
self.bccx.loan_path_to_str(&*loan_path)));
|
||||
self.bccx.loan_path_to_str(
|
||||
&*loan_path)).as_slice());
|
||||
}
|
||||
}
|
||||
true
|
||||
|
@ -745,11 +750,13 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
freevar.span,
|
||||
format!("cannot move `{}` into closure \
|
||||
because it is borrowed",
|
||||
this.bccx.loan_path_to_str(move_path)));
|
||||
this.bccx.loan_path_to_str(
|
||||
move_path)).as_slice());
|
||||
this.bccx.span_note(
|
||||
loan_span,
|
||||
format!("borrow of `{}` occurs here",
|
||||
this.bccx.loan_path_to_str(&*loan_path)));
|
||||
this.bccx.loan_path_to_str(
|
||||
&*loan_path)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -174,8 +174,9 @@ fn check_aliasability(bccx: &BorrowckCtxt,
|
|||
// static item resides in immutable memory and mutating it would
|
||||
// cause segfaults.
|
||||
bccx.tcx.sess.span_err(borrow_span,
|
||||
format!("borrow of immutable static items with \
|
||||
unsafe interior is not allowed"));
|
||||
"borrow of immutable static items \
|
||||
with unsafe interior is not \
|
||||
allowed");
|
||||
Err(())
|
||||
}
|
||||
mc::InteriorSafe => {
|
||||
|
@ -290,7 +291,8 @@ impl<'a> GatherLoanCtxt<'a> {
|
|||
ty::ReInfer(..) => {
|
||||
self.tcx().sess.span_bug(
|
||||
cmt.span,
|
||||
format!("invalid borrow lifetime: {:?}", loan_region));
|
||||
format!("invalid borrow lifetime: {:?}",
|
||||
loan_region).as_slice());
|
||||
}
|
||||
};
|
||||
debug!("loan_scope = {:?}", loan_scope);
|
||||
|
|
|
@ -131,7 +131,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) {
|
|||
bccx.span_err(
|
||||
move_from.span,
|
||||
format!("cannot move out of {}",
|
||||
bccx.cmt_to_str(&*move_from)));
|
||||
bccx.cmt_to_str(&*move_from)).as_slice());
|
||||
}
|
||||
|
||||
mc::cat_downcast(ref b) |
|
||||
|
@ -143,7 +143,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) {
|
|||
move_from.span,
|
||||
format!("cannot move out of type `{}`, \
|
||||
which defines the `Drop` trait",
|
||||
b.ty.user_string(bccx.tcx)));
|
||||
b.ty.user_string(bccx.tcx)).as_slice());
|
||||
},
|
||||
_ => fail!("this path should not cause illegal move")
|
||||
}
|
||||
|
@ -163,10 +163,10 @@ fn note_move_destination(bccx: &BorrowckCtxt,
|
|||
format!("attempting to move value to here (to prevent the move, \
|
||||
use `ref {0}` or `ref mut {0}` to capture value by \
|
||||
reference)",
|
||||
pat_name));
|
||||
pat_name).as_slice());
|
||||
} else {
|
||||
bccx.span_note(move_to_span,
|
||||
format!("and here (use `ref {0}` or `ref mut {0}`)",
|
||||
pat_name));
|
||||
pat_name).as_slice());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -461,7 +461,7 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
use_span,
|
||||
format!("{} of possibly uninitialized variable: `{}`",
|
||||
verb,
|
||||
self.loan_path_to_str(lp)));
|
||||
self.loan_path_to_str(lp)).as_slice());
|
||||
}
|
||||
_ => {
|
||||
let partially = if lp == moved_lp {""} else {"partially "};
|
||||
|
@ -470,7 +470,7 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
format!("{} of {}moved value: `{}`",
|
||||
verb,
|
||||
partially,
|
||||
self.loan_path_to_str(lp)));
|
||||
self.loan_path_to_str(lp)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -482,8 +482,12 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
Some(ast_map::NodeExpr(expr)) => {
|
||||
(ty::expr_ty_adjusted(self.tcx, expr), expr.span)
|
||||
}
|
||||
r => self.tcx.sess.bug(format!("MoveExpr({:?}) maps to {:?}, not Expr",
|
||||
move.id, r))
|
||||
r => {
|
||||
self.tcx.sess.bug(format!("MoveExpr({:?}) maps to \
|
||||
{:?}, not Expr",
|
||||
move.id,
|
||||
r).as_slice())
|
||||
}
|
||||
};
|
||||
let suggestion = move_suggestion(self.tcx, expr_ty,
|
||||
"moved by default (use `copy` to override)");
|
||||
|
@ -491,16 +495,18 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
expr_span,
|
||||
format!("`{}` moved here because it has type `{}`, which is {}",
|
||||
self.loan_path_to_str(moved_lp),
|
||||
expr_ty.user_string(self.tcx), suggestion));
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion).as_slice());
|
||||
}
|
||||
|
||||
move_data::MovePat => {
|
||||
let pat_ty = ty::node_id_to_type(self.tcx, move.id);
|
||||
self.tcx.sess.span_note(self.tcx.map.span(move.id),
|
||||
format!("`{}` moved here because it has type `{}`, \
|
||||
which is moved by default (use `ref` to override)",
|
||||
which is moved by default (use `ref` to \
|
||||
override)",
|
||||
self.loan_path_to_str(moved_lp),
|
||||
pat_ty.user_string(self.tcx)));
|
||||
pat_ty.user_string(self.tcx)).as_slice());
|
||||
}
|
||||
|
||||
move_data::Captured => {
|
||||
|
@ -508,8 +514,12 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
Some(ast_map::NodeExpr(expr)) => {
|
||||
(ty::expr_ty_adjusted(self.tcx, expr), expr.span)
|
||||
}
|
||||
r => self.tcx.sess.bug(format!("Captured({:?}) maps to {:?}, not Expr",
|
||||
move.id, r))
|
||||
r => {
|
||||
self.tcx.sess.bug(format!("Captured({:?}) maps to \
|
||||
{:?}, not Expr",
|
||||
move.id,
|
||||
r).as_slice())
|
||||
}
|
||||
};
|
||||
let suggestion = move_suggestion(self.tcx, expr_ty,
|
||||
"moved by default (make a copy and \
|
||||
|
@ -519,7 +529,8 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
format!("`{}` moved into closure environment here because it \
|
||||
has type `{}`, which is {}",
|
||||
self.loan_path_to_str(moved_lp),
|
||||
expr_ty.user_string(self.tcx), suggestion));
|
||||
expr_ty.user_string(self.tcx),
|
||||
suggestion).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -547,10 +558,8 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("re-assignment of immutable variable `{}`",
|
||||
self.loan_path_to_str(lp)));
|
||||
self.tcx.sess.span_note(
|
||||
assign.span,
|
||||
format!("prior assignment occurs here"));
|
||||
self.loan_path_to_str(lp)).as_slice());
|
||||
self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
|
||||
}
|
||||
|
||||
pub fn span_err(&self, s: Span, m: &str) {
|
||||
|
@ -657,23 +666,23 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in an aliasable location",
|
||||
prefix));
|
||||
prefix).as_slice());
|
||||
}
|
||||
mc::AliasableStatic(..) |
|
||||
mc::AliasableStaticMut(..) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in a static location", prefix));
|
||||
format!("{} in a static location", prefix).as_slice());
|
||||
}
|
||||
mc::AliasableManaged => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in a `@` pointer", prefix));
|
||||
format!("{} in a `@` pointer", prefix).as_slice());
|
||||
}
|
||||
mc::AliasableBorrowed => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("{} in a `&` reference", prefix));
|
||||
format!("{} in a `&` reference", prefix).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -710,12 +719,13 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
};
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("{} would have to be valid for ", descr),
|
||||
format!("{} would have to be valid for ",
|
||||
descr).as_slice(),
|
||||
loan_scope,
|
||||
"...");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("...but {} is only valid for ", descr),
|
||||
format!("...but {} is only valid for ", descr).as_slice(),
|
||||
ptr_scope,
|
||||
"");
|
||||
}
|
||||
|
@ -739,7 +749,7 @@ impl<'a> BorrowckCtxt<'a> {
|
|||
}
|
||||
mc::PositionalField(idx) => {
|
||||
out.push_char('#'); // invent a notation here
|
||||
out.push_str(idx.to_str());
|
||||
out.push_str(idx.to_str().as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -508,7 +508,9 @@ impl<'a> CFGBuilder<'a> {
|
|||
fn add_returning_edge(&mut self,
|
||||
_from_expr: @ast::Expr,
|
||||
from_index: CFGIndex) {
|
||||
let mut data = CFGEdgeData {exiting_scopes: vec!() };
|
||||
let mut data = CFGEdgeData {
|
||||
exiting_scopes: vec!(),
|
||||
};
|
||||
for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
|
||||
data.exiting_scopes.push(id);
|
||||
}
|
||||
|
@ -533,13 +535,15 @@ impl<'a> CFGBuilder<'a> {
|
|||
}
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("no loop scope for id {:?}", loop_id));
|
||||
format!("no loop scope for id {:?}",
|
||||
loop_id).as_slice());
|
||||
}
|
||||
|
||||
r => {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("bad entry `{:?}` in def_map for label", r));
|
||||
format!("bad entry `{:?}` in def_map for label",
|
||||
r).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,7 +82,9 @@ impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> {
|
|||
let s = self.ast_map.node_to_str(node_id);
|
||||
// left-aligns the lines
|
||||
let s = replace_newline_with_backslash_l(s);
|
||||
label = label.append(format!("exiting scope_{} {}", i, s.as_slice()));
|
||||
label = label.append(format!("exiting scope_{} {}",
|
||||
i,
|
||||
s.as_slice()).as_slice());
|
||||
}
|
||||
dot::EscStr(label.into_maybe_owned())
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr, is_const: bool) {
|
|||
.span_err(e.span,
|
||||
format!("can not cast to `{}` in a constant \
|
||||
expression",
|
||||
ppaux::ty_to_str(v.tcx, ety).as_slice()))
|
||||
ppaux::ty_to_str(v.tcx, ety)).as_slice())
|
||||
}
|
||||
}
|
||||
ExprPath(ref pth) => {
|
||||
|
|
|
@ -57,10 +57,14 @@ impl<'a> CheckLoopVisitor<'a> {
|
|||
match cx {
|
||||
Loop => {}
|
||||
Closure => {
|
||||
self.sess.span_err(span, format!("`{}` inside of a closure", name));
|
||||
self.sess.span_err(span,
|
||||
format!("`{}` inside of a closure",
|
||||
name).as_slice());
|
||||
}
|
||||
Normal => {
|
||||
self.sess.span_err(span, format!("`{}` outside of loop", name));
|
||||
self.sess.span_err(span,
|
||||
format!("`{}` outside of loop",
|
||||
name).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) {
|
|||
// We know the type is inhabited, so this must be wrong
|
||||
cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \
|
||||
type {} is non-empty",
|
||||
ty_to_str(cx.tcx, pat_ty)));
|
||||
ty_to_str(cx.tcx, pat_ty)).as_slice());
|
||||
}
|
||||
// If the type *is* empty, it's vacuously exhaustive
|
||||
return;
|
||||
|
@ -164,8 +164,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
|
|||
match ty::get(ty).sty {
|
||||
ty::ty_bool => {
|
||||
match *ctor {
|
||||
val(const_bool(true)) => Some("true".to_owned()),
|
||||
val(const_bool(false)) => Some("false".to_owned()),
|
||||
val(const_bool(true)) => Some("true".to_strbuf()),
|
||||
val(const_bool(false)) => Some("false".to_strbuf()),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
@ -177,7 +177,11 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
|
|||
let variants = ty::enum_variants(cx.tcx, id);
|
||||
|
||||
match variants.iter().find(|v| v.id == vid) {
|
||||
Some(v) => Some(token::get_ident(v.name).get().to_str()),
|
||||
Some(v) => {
|
||||
Some(token::get_ident(v.name).get()
|
||||
.to_str()
|
||||
.into_strbuf())
|
||||
}
|
||||
None => {
|
||||
fail!("check_exhaustive: bad variant in ctor")
|
||||
}
|
||||
|
@ -185,7 +189,9 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
|
|||
}
|
||||
ty::ty_vec(..) | ty::ty_rptr(..) => {
|
||||
match *ctor {
|
||||
vec(n) => Some(format!("vectors of length {}", n)),
|
||||
vec(n) => {
|
||||
Some(format_strbuf!("vectors of length {}", n))
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
@ -193,11 +199,11 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
|
|||
}
|
||||
}
|
||||
};
|
||||
let msg = "non-exhaustive patterns".to_owned() + match ext {
|
||||
Some(ref s) => format!(": {} not covered", *s),
|
||||
None => "".to_owned()
|
||||
};
|
||||
cx.tcx.sess.span_err(sp, msg);
|
||||
let msg = format_strbuf!("non-exhaustive patterns{}", match ext {
|
||||
Some(ref s) => format_strbuf!(": {} not covered", *s),
|
||||
None => "".to_strbuf()
|
||||
});
|
||||
cx.tcx.sess.span_err(sp, msg.as_slice());
|
||||
}
|
||||
|
||||
type matrix = Vec<Vec<@Pat> > ;
|
||||
|
@ -739,7 +745,8 @@ fn specialize(cx: &MatchCheckCtxt,
|
|||
pat_span,
|
||||
format!("struct pattern resolved to {}, \
|
||||
not a struct",
|
||||
ty_to_str(cx.tcx, left_ty)));
|
||||
ty_to_str(cx.tcx,
|
||||
left_ty)).as_slice());
|
||||
}
|
||||
}
|
||||
let args = class_fields.iter().map(|class_field| {
|
||||
|
@ -980,9 +987,10 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
|||
_ => {
|
||||
cx.tcx.sess.span_bug(
|
||||
p.span,
|
||||
format!("binding pattern {} is \
|
||||
not an identifier: {:?}",
|
||||
p.id, p.node));
|
||||
format!("binding pattern {} is not an \
|
||||
identifier: {:?}",
|
||||
p.id,
|
||||
p.node).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -436,10 +436,11 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
// (#5900). Fall back to doing a limited lookup to get past it.
|
||||
let ety = ty::expr_ty_opt(tcx.ty_ctxt(), e)
|
||||
.or_else(|| astconv::ast_ty_to_prim_ty(tcx.ty_ctxt(), target_ty))
|
||||
.unwrap_or_else(|| tcx.ty_ctxt().sess.span_fatal(
|
||||
target_ty.span,
|
||||
format!("target type not found for const cast")
|
||||
));
|
||||
.unwrap_or_else(|| {
|
||||
tcx.ty_ctxt().sess.span_fatal(target_ty.span,
|
||||
"target type not found for \
|
||||
const cast")
|
||||
});
|
||||
|
||||
let base = eval_const_expr_partial(tcx, base);
|
||||
match base {
|
||||
|
|
|
@ -102,14 +102,14 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
|
|||
let gens_str = if gens.iter().any(|&u| u != 0) {
|
||||
format!(" gen: {}", bits_to_str(gens))
|
||||
} else {
|
||||
"".to_owned()
|
||||
"".to_strbuf()
|
||||
};
|
||||
|
||||
let kills = self.kills.slice(start, end);
|
||||
let kills_str = if kills.iter().any(|&u| u != 0) {
|
||||
format!(" kill: {}", bits_to_str(kills))
|
||||
} else {
|
||||
"".to_owned()
|
||||
"".to_strbuf()
|
||||
};
|
||||
|
||||
try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}",
|
||||
|
@ -653,7 +653,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
|
|||
from_expr.span,
|
||||
format!("pop_scopes(from_expr={}, to_scope={:?}) \
|
||||
to_scope does not enclose from_expr",
|
||||
from_expr.repr(tcx), to_scope.loop_id));
|
||||
from_expr.repr(tcx),
|
||||
to_scope.loop_id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -765,7 +766,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
|
|||
None => {
|
||||
self.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("no loop scope for id {:?}", loop_id));
|
||||
format!("no loop scope for id {:?}",
|
||||
loop_id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -773,7 +775,8 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
|
|||
r => {
|
||||
self.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("bad entry `{:?}` in def_map for label", r));
|
||||
format!("bad entry `{:?}` in def_map for label",
|
||||
r).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -789,7 +792,9 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
|
|||
|
||||
fn reset(&mut self, bits: &mut [uint]) {
|
||||
let e = if self.dfcx.oper.initial_value() {uint::MAX} else {0};
|
||||
for b in bits.mut_iter() { *b = e; }
|
||||
for b in bits.mut_iter() {
|
||||
*b = e;
|
||||
}
|
||||
}
|
||||
|
||||
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
|
||||
|
@ -841,7 +846,7 @@ fn bits_to_str(words: &[uint]) -> StrBuf {
|
|||
let mut v = word;
|
||||
for _ in range(0u, uint::BYTES) {
|
||||
result.push_char(sep);
|
||||
result.push_str(format!("{:02x}", v & 0xFF));
|
||||
result.push_str(format!("{:02x}", v & 0xFF).as_slice());
|
||||
v >>= 8;
|
||||
sep = '-';
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session,
|
|||
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
|
||||
if src.rlib.is_some() { return }
|
||||
sess.err(format!("dependency `{}` not found in rlib format",
|
||||
data.name));
|
||||
data.name).as_slice());
|
||||
});
|
||||
return Vec::new();
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ fn calculate_type(sess: &session::Session,
|
|||
match kind {
|
||||
cstore::RequireStatic => "rlib",
|
||||
cstore::RequireDynamic => "dylib",
|
||||
}));
|
||||
}).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,7 +211,8 @@ fn add_library(sess: &session::Session,
|
|||
if link2 != link || link == cstore::RequireStatic {
|
||||
let data = sess.cstore.get_crate_data(cnum);
|
||||
sess.err(format!("cannot satisfy dependencies so `{}` only \
|
||||
shows up once", data.name));
|
||||
shows up once",
|
||||
data.name).as_slice());
|
||||
sess.note("having upstream crates all available in one format \
|
||||
will likely make this go away");
|
||||
}
|
||||
|
|
|
@ -48,8 +48,9 @@ impl<'a> EffectCheckVisitor<'a> {
|
|||
SafeContext => {
|
||||
// Report an error.
|
||||
self.tcx.sess.span_err(span,
|
||||
format!("{} requires unsafe function or block",
|
||||
description))
|
||||
format!("{} requires unsafe function or \
|
||||
block",
|
||||
description).as_slice())
|
||||
}
|
||||
UnsafeBlock(block_id) => {
|
||||
// OK, but record this.
|
||||
|
|
|
@ -422,7 +422,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
|
|||
self.tcx().sess.span_bug(
|
||||
callee.span,
|
||||
format!("unxpected callee type {}",
|
||||
callee_ty.repr(self.tcx())));
|
||||
callee_ty.repr(self.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -448,9 +448,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
|
|||
}
|
||||
|
||||
ast::StmtMac(..) => {
|
||||
self.tcx().sess.span_bug(
|
||||
stmt.span,
|
||||
format!("unexpanded stmt macro"));
|
||||
self.tcx().sess.span_bug(stmt.span, "unexpanded stmt macro");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -518,7 +516,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
|
|||
_ => {
|
||||
self.tcx().sess.span_bug(
|
||||
with_expr.span,
|
||||
format!("with expression doesn't evaluate to a struct"));
|
||||
"with expression doesn't evaluate to a struct");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -601,7 +599,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
|
|||
ty::ty_rptr(r, ref m) => (m.mutbl, r),
|
||||
_ => self.tcx().sess.span_bug(expr.span,
|
||||
format!("bad overloaded deref type {}",
|
||||
method_ty.repr(self.tcx())))
|
||||
method_ty.repr(self.tcx())).as_slice())
|
||||
};
|
||||
let bk = ty::BorrowKind::from_mutbl(m);
|
||||
self.delegate.borrow(expr.id, expr.span, cmt,
|
||||
|
|
|
@ -127,10 +127,12 @@ fn check_impl_of_trait(cx: &mut Context, it: &Item, trait_ref: &TraitRef, self_t
|
|||
check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| {
|
||||
cx.tcx.sess.span_err(self_type.span,
|
||||
format!("the type `{}', which does not fulfill `{}`, cannot implement this \
|
||||
trait", ty_to_str(cx.tcx, self_ty), missing.user_string(cx.tcx)));
|
||||
trait",
|
||||
ty_to_str(cx.tcx, self_ty),
|
||||
missing.user_string(cx.tcx)).as_slice());
|
||||
cx.tcx.sess.span_note(self_type.span,
|
||||
format!("types implementing this trait must fulfill `{}`",
|
||||
trait_def.bounds.user_string(cx.tcx)));
|
||||
trait_def.bounds.user_string(cx.tcx)).as_slice());
|
||||
});
|
||||
|
||||
// If this is a destructor, check kinds.
|
||||
|
@ -210,8 +212,9 @@ fn with_appropriate_checker(cx: &Context,
|
|||
b(check_for_bare)
|
||||
}
|
||||
ref s => {
|
||||
cx.tcx.sess.bug(
|
||||
format!("expect fn type in kind checker, not {:?}", s));
|
||||
cx.tcx.sess.bug(format!("expect fn type in kind checker, not \
|
||||
{:?}",
|
||||
s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -392,7 +395,7 @@ pub fn check_typaram_bounds(cx: &Context,
|
|||
format!("instantiating a type parameter with an incompatible type \
|
||||
`{}`, which does not fulfill `{}`",
|
||||
ty_to_str(cx.tcx, ty),
|
||||
missing.user_string(cx.tcx)));
|
||||
missing.user_string(cx.tcx)).as_slice());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -403,19 +406,26 @@ pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
|
|||
// Will be Some if the freevar is implicitly borrowed (stack closure).
|
||||
// Emit a less mysterious error message in this case.
|
||||
match referenced_ty {
|
||||
Some(rty) => cx.tcx.sess.span_err(sp,
|
||||
format!("cannot implicitly borrow variable of type `{}` in a bounded \
|
||||
stack closure (implicit reference does not fulfill `{}`)",
|
||||
ty_to_str(cx.tcx, rty), missing.user_string(cx.tcx))),
|
||||
None => cx.tcx.sess.span_err(sp,
|
||||
Some(rty) => {
|
||||
cx.tcx.sess.span_err(sp,
|
||||
format!("cannot implicitly borrow variable of type `{}` in a \
|
||||
bounded stack closure (implicit reference does not \
|
||||
fulfill `{}`)",
|
||||
ty_to_str(cx.tcx, rty),
|
||||
missing.user_string(cx.tcx)).as_slice())
|
||||
}
|
||||
None => {
|
||||
cx.tcx.sess.span_err(sp,
|
||||
format!("cannot capture variable of type `{}`, which does \
|
||||
not fulfill `{}`, in a bounded closure",
|
||||
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx))),
|
||||
ty_to_str(cx.tcx, ty),
|
||||
missing.user_string(cx.tcx)).as_slice())
|
||||
}
|
||||
}
|
||||
cx.tcx.sess.span_note(
|
||||
sp,
|
||||
format!("this closure's environment must satisfy `{}`",
|
||||
bounds.user_string(cx.tcx)));
|
||||
bounds.user_string(cx.tcx)).as_slice());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -426,7 +436,7 @@ pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
|
|||
format!("cannot pack type `{}`, which does not fulfill \
|
||||
`{}`, as a trait bounded by {}",
|
||||
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),
|
||||
bounds.user_string(cx.tcx)));
|
||||
bounds.user_string(cx.tcx)).as_slice());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -436,9 +446,10 @@ fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) {
|
|||
ty::type_contents(cx.tcx, ty).to_str());
|
||||
if ty::type_moves_by_default(cx.tcx, ty) {
|
||||
cx.tcx.sess.span_err(
|
||||
sp, format!("copying a value of non-copyable type `{}`",
|
||||
ty_to_str(cx.tcx, ty)));
|
||||
cx.tcx.sess.span_note(sp, format!("{}", reason));
|
||||
sp,
|
||||
format!("copying a value of non-copyable type `{}`",
|
||||
ty_to_str(cx.tcx, ty)).as_slice());
|
||||
cx.tcx.sess.span_note(sp, format!("{}", reason).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -448,7 +459,8 @@ pub fn check_static(tcx: &ty::ctxt, ty: ty::t, sp: Span) -> bool {
|
|||
ty::ty_param(..) => {
|
||||
tcx.sess.span_err(sp,
|
||||
format!("value may contain references; \
|
||||
add `'static` bound to `{}`", ty_to_str(tcx, ty)));
|
||||
add `'static` bound to `{}`",
|
||||
ty_to_str(tcx, ty)).as_slice());
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.span_err(sp, "value may contain references");
|
||||
|
@ -564,8 +576,11 @@ pub fn check_cast_for_escaping_regions(
|
|||
// Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound).
|
||||
fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: StrBuf, sp: Span) {
|
||||
if !ty::type_is_sized(tcx, ty) {
|
||||
tcx.sess.span_err(sp, format!("variable `{}` has dynamically sized type `{}`",
|
||||
name, ty_to_str(tcx, ty)));
|
||||
tcx.sess.span_err(sp,
|
||||
format!("variable `{}` has dynamically sized type \
|
||||
`{}`",
|
||||
name,
|
||||
ty_to_str(tcx, ty)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -152,7 +152,8 @@ impl<'a> LanguageItemCollector<'a> {
|
|||
match self.items.items.get(item_index) {
|
||||
&Some(original_def_id) if original_def_id != item_def_id => {
|
||||
self.session.err(format!("duplicate entry for `{}`",
|
||||
LanguageItems::item_name(item_index)));
|
||||
LanguageItems::item_name(
|
||||
item_index)).as_slice());
|
||||
}
|
||||
&Some(_) | &None => {
|
||||
// OK.
|
||||
|
|
|
@ -506,8 +506,10 @@ impl<'a> Context<'a> {
|
|||
let mut note = None;
|
||||
let msg = match src {
|
||||
Default => {
|
||||
format!("{}, \\#[{}({})] on by default", msg,
|
||||
level_to_str(level), self.lint_to_str(lint))
|
||||
format_strbuf!("{}, \\#[{}({})] on by default",
|
||||
msg,
|
||||
level_to_str(level),
|
||||
self.lint_to_str(lint))
|
||||
},
|
||||
CommandLine => {
|
||||
format!("{} [-{} {}]", msg,
|
||||
|
@ -522,8 +524,8 @@ impl<'a> Context<'a> {
|
|||
}
|
||||
};
|
||||
match level {
|
||||
Warn => { self.tcx.sess.span_warn(span, msg); }
|
||||
Deny | Forbid => { self.tcx.sess.span_err(span, msg); }
|
||||
Warn => self.tcx.sess.span_warn(span, msg.as_slice()),
|
||||
Deny | Forbid => self.tcx.sess.span_err(span, msg.as_slice()),
|
||||
Allow => fail!(),
|
||||
}
|
||||
|
||||
|
@ -552,7 +554,7 @@ impl<'a> Context<'a> {
|
|||
UnrecognizedLint,
|
||||
meta.span,
|
||||
format!("unknown `{}` attribute: `{}`",
|
||||
level_to_str(level), lintname));
|
||||
level_to_str(level), lintname).as_slice());
|
||||
}
|
||||
Some(lint) => {
|
||||
let lint = lint.lint;
|
||||
|
@ -561,7 +563,8 @@ impl<'a> Context<'a> {
|
|||
self.tcx.sess.span_err(meta.span,
|
||||
format!("{}({}) overruled by outer forbid({})",
|
||||
level_to_str(level),
|
||||
lintname, lintname));
|
||||
lintname,
|
||||
lintname).as_slice());
|
||||
} else if now != level {
|
||||
let src = self.get_source(lint);
|
||||
self.lint_stack.push((lint, now, src));
|
||||
|
@ -965,13 +968,13 @@ fn check_heap_type(cx: &Context, span: Span, ty: ty::t) {
|
|||
if n_uniq > 0 && lint != ManagedHeapMemory {
|
||||
let s = ty_to_str(cx.tcx, ty);
|
||||
let m = format!("type uses owned (Box type) pointers: {}", s);
|
||||
cx.span_lint(lint, span, m);
|
||||
cx.span_lint(lint, span, m.as_slice());
|
||||
}
|
||||
|
||||
if n_box > 0 && lint != OwnedHeapMemory {
|
||||
let s = ty_to_str(cx.tcx, ty);
|
||||
let m = format!("type uses managed (@ type) pointers: {}", s);
|
||||
cx.span_lint(lint, span, m);
|
||||
cx.span_lint(lint, span, m.as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1122,7 +1125,8 @@ fn check_attrs_usage(cx: &Context, attrs: &[ast::Attribute]) {
|
|||
for &(obs_attr, obs_alter) in obsolete_attrs.iter() {
|
||||
if name.equiv(&obs_attr) {
|
||||
cx.span_lint(AttributeUsage, attr.span,
|
||||
format!("obsolete attribute: {:s}", obs_alter));
|
||||
format!("obsolete attribute: {:s}",
|
||||
obs_alter).as_slice());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -1233,7 +1237,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::Item) {
|
|||
cx.span_lint(
|
||||
NonCamelCaseTypes, span,
|
||||
format!("{} `{}` should have a camel case identifier",
|
||||
sort, token::get_ident(ident)));
|
||||
sort, token::get_ident(ident)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1331,7 +1335,8 @@ fn check_unnecessary_parens_core(cx: &Context, value: &ast::Expr, msg: &str) {
|
|||
match value.node {
|
||||
ast::ExprParen(_) => {
|
||||
cx.span_lint(UnnecessaryParens, value.span,
|
||||
format!("unnecessary parentheses around {}", msg))
|
||||
format!("unnecessary parentheses around {}",
|
||||
msg).as_slice())
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -1506,8 +1511,10 @@ fn check_missing_doc_attrs(cx: &Context,
|
|||
}
|
||||
});
|
||||
if !has_doc {
|
||||
cx.span_lint(MissingDoc, sp,
|
||||
format!("missing documentation for {}", desc));
|
||||
cx.span_lint(MissingDoc,
|
||||
sp,
|
||||
format!("missing documentation for {}",
|
||||
desc).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1675,7 +1682,7 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
|
|||
_ => format!("use of {} item", label)
|
||||
};
|
||||
|
||||
cx.span_lint(lint, e.span, msg);
|
||||
cx.span_lint(lint, e.span, msg.as_slice());
|
||||
}
|
||||
|
||||
impl<'a> Visitor<()> for Context<'a> {
|
||||
|
@ -1906,8 +1913,11 @@ pub fn check_crate(tcx: &ty::ctxt,
|
|||
// in the iteration code.
|
||||
for (id, v) in tcx.sess.lints.borrow().iter() {
|
||||
for &(lint, span, ref msg) in v.iter() {
|
||||
tcx.sess.span_bug(span, format!("unprocessed lint {:?} at {}: {}",
|
||||
lint, tcx.map.node_to_str(*id), *msg))
|
||||
tcx.sess.span_bug(span,
|
||||
format!("unprocessed lint {:?} at {}: {}",
|
||||
lint,
|
||||
tcx.map.node_to_str(*id),
|
||||
*msg).as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -317,7 +317,7 @@ impl<'a> IrMaps<'a> {
|
|||
self.tcx
|
||||
.sess
|
||||
.span_bug(span, format!("no variable registered for id {}",
|
||||
node_id));
|
||||
node_id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -606,8 +606,9 @@ impl<'a> Liveness<'a> {
|
|||
// code have to agree about which AST nodes are worth
|
||||
// creating liveness nodes for.
|
||||
self.ir.tcx.sess.span_bug(
|
||||
span, format!("no live node registered for node {}",
|
||||
node_id));
|
||||
span,
|
||||
format!("no live node registered for node {}",
|
||||
node_id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -224,7 +224,7 @@ pub fn deref_kind(tcx: &ty::ctxt, t: ty::t) -> deref_kind {
|
|||
None => {
|
||||
tcx.sess.bug(
|
||||
format!("deref_cat() invoked on non-derefable type {}",
|
||||
ty_to_str(tcx, t)));
|
||||
ty_to_str(tcx, t)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -578,7 +578,8 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
|
|||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("Upvar of non-closure {} - {}",
|
||||
fn_node_id, ty.repr(self.tcx())));
|
||||
fn_node_id,
|
||||
ty.repr(self.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -727,7 +728,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
|
|||
self.tcx().sess.span_bug(
|
||||
node.span(),
|
||||
format!("Explicit deref of non-derefable type: {}",
|
||||
base_cmt.ty.repr(self.tcx())));
|
||||
base_cmt.ty.repr(self.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -800,7 +801,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
|
|||
self.tcx().sess.span_bug(
|
||||
elt.span(),
|
||||
format!("Explicit index of non-index type `{}`",
|
||||
base_cmt.ty.repr(self.tcx())));
|
||||
base_cmt.ty.repr(self.tcx())).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -884,9 +885,8 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
|
|||
},
|
||||
|
||||
_ => {
|
||||
tcx.sess.span_bug(
|
||||
pat.span,
|
||||
format!("Type of slice pattern is not a slice"));
|
||||
tcx.sess.span_bug(pat.span,
|
||||
"type of slice pattern is not a slice");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -632,7 +632,7 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
UnnamedField(idx) => format!("field \\#{} of {} is private",
|
||||
idx + 1, struct_desc),
|
||||
};
|
||||
self.tcx.sess.span_err(span, msg);
|
||||
self.tcx.sess.span_err(span, msg.as_slice());
|
||||
}
|
||||
|
||||
// Given the ID of a method, checks to ensure it's in scope.
|
||||
|
@ -647,7 +647,8 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
self.report_error(self.ensure_public(span,
|
||||
method_id,
|
||||
None,
|
||||
format!("method `{}`", string)));
|
||||
format!("method `{}`",
|
||||
string).as_slice()));
|
||||
}
|
||||
|
||||
// Checks that a path is in scope.
|
||||
|
@ -661,8 +662,12 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
.unwrap()
|
||||
.identifier);
|
||||
let origdid = def_id_of_def(orig_def);
|
||||
self.ensure_public(span, def, Some(origdid),
|
||||
format!("{} `{}`", tyname, name))
|
||||
self.ensure_public(span,
|
||||
def,
|
||||
Some(origdid),
|
||||
format!("{} `{}`",
|
||||
tyname,
|
||||
name).as_slice())
|
||||
};
|
||||
|
||||
match *self.last_private_map.get(&path_id) {
|
||||
|
|
|
@ -234,7 +234,7 @@ impl<'a> ReachableContext<'a> {
|
|||
None => {
|
||||
self.tcx.sess.bug(format!("found unmapped ID in worklist: \
|
||||
{}",
|
||||
search_item))
|
||||
search_item).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -324,9 +324,12 @@ impl<'a> ReachableContext<'a> {
|
|||
ast_map::NodeVariant(_) |
|
||||
ast_map::NodeStructCtor(_) => {}
|
||||
_ => {
|
||||
self.tcx.sess.bug(format!("found unexpected thingy in \
|
||||
worklist: {}",
|
||||
self.tcx.map.node_to_str(search_item)))
|
||||
self.tcx
|
||||
.sess
|
||||
.bug(format!("found unexpected thingy in worklist: {}",
|
||||
self.tcx
|
||||
.map
|
||||
.node_to_str(search_item)).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1085,14 +1085,14 @@ impl<'a> Resolver<'a> {
|
|||
self.resolve_error(sp,
|
||||
format!("duplicate definition of {} `{}`",
|
||||
namespace_error_to_str(duplicate_type),
|
||||
token::get_ident(name)));
|
||||
token::get_ident(name)).as_slice());
|
||||
{
|
||||
let r = child.span_for_namespace(ns);
|
||||
for sp in r.iter() {
|
||||
self.session.span_note(*sp,
|
||||
format!("first definition of {} `{}` here",
|
||||
namespace_error_to_str(duplicate_type),
|
||||
token::get_ident(name)));
|
||||
token::get_ident(name)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2054,7 +2054,7 @@ impl<'a> Resolver<'a> {
|
|||
import_directive.module_path
|
||||
.as_slice(),
|
||||
import_directive.subclass));
|
||||
self.resolve_error(import_directive.span, msg);
|
||||
self.resolve_error(import_directive.span, msg.as_slice());
|
||||
}
|
||||
Indeterminate => {
|
||||
// Bail out. We'll come around next time.
|
||||
|
@ -2427,7 +2427,7 @@ impl<'a> Resolver<'a> {
|
|||
`{}` in `{}`",
|
||||
token::get_ident(source),
|
||||
self.module_to_str(&*containing_module));
|
||||
self.resolve_error(directive.span, msg);
|
||||
self.resolve_error(directive.span, msg.as_slice());
|
||||
return Failed;
|
||||
}
|
||||
let value_used_public = value_used_reexport || value_used_public;
|
||||
|
@ -2654,11 +2654,14 @@ impl<'a> Resolver<'a> {
|
|||
format!("unresolved import. maybe \
|
||||
a missing `extern crate \
|
||||
{}`?",
|
||||
segment_name));
|
||||
segment_name).as_slice());
|
||||
return Failed;
|
||||
}
|
||||
self.resolve_error(span, format!("unresolved import: could not find `{}` in \
|
||||
`{}`.", segment_name, module_name));
|
||||
self.resolve_error(span,
|
||||
format!("unresolved import: could not \
|
||||
find `{}` in `{}`.",
|
||||
segment_name,
|
||||
module_name).as_slice());
|
||||
return Failed;
|
||||
}
|
||||
Indeterminate => {
|
||||
|
@ -2675,8 +2678,11 @@ impl<'a> Resolver<'a> {
|
|||
match type_def.module_def {
|
||||
None => {
|
||||
// Not a module.
|
||||
self.resolve_error(span, format!("not a module `{}`",
|
||||
token::get_ident(name)));
|
||||
self.resolve_error(
|
||||
span,
|
||||
format!("not a module `{}`",
|
||||
token::get_ident(name))
|
||||
.as_slice());
|
||||
return Failed;
|
||||
}
|
||||
Some(ref module_def) => {
|
||||
|
@ -2717,9 +2723,10 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
None => {
|
||||
// There are no type bindings at all.
|
||||
self.resolve_error(span,
|
||||
self.resolve_error(
|
||||
span,
|
||||
format!("not a module `{}`",
|
||||
token::get_ident(name)));
|
||||
token::get_ident(name)).as_slice());
|
||||
return Failed;
|
||||
}
|
||||
}
|
||||
|
@ -2764,16 +2771,15 @@ impl<'a> Resolver<'a> {
|
|||
let mpath = self.idents_to_str(module_path);
|
||||
match mpath.as_slice().rfind(':') {
|
||||
Some(idx) => {
|
||||
self.resolve_error(span,
|
||||
format!("unresolved import: could \
|
||||
not find `{}` in `{}`",
|
||||
// idx +- 1 to account for
|
||||
// the colons on either
|
||||
// side
|
||||
self.resolve_error(
|
||||
span,
|
||||
format!("unresolved import: could not find `{}` \
|
||||
in `{}`",
|
||||
// idx +- 1 to account for the colons on \
|
||||
// either side
|
||||
mpath.as_slice().slice_from(idx + 1),
|
||||
mpath.as_slice()
|
||||
.slice_from(idx + 1),
|
||||
mpath.as_slice()
|
||||
.slice_to(idx - 1)));
|
||||
.slice_to(idx - 1)).as_slice());
|
||||
},
|
||||
None => (),
|
||||
};
|
||||
|
@ -3200,7 +3206,7 @@ impl<'a> Resolver<'a> {
|
|||
} else {
|
||||
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
||||
sn.as_slice().slice(0, sn.len()));
|
||||
self.resolve_error(imports.get(index).span, err);
|
||||
self.resolve_error(imports.get(index).span, err.as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3870,7 +3876,7 @@ impl<'a> Resolver<'a> {
|
|||
};
|
||||
|
||||
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
|
||||
self.resolve_error(trait_reference.path.span, msg);
|
||||
self.resolve_error(trait_reference.path.span, msg.as_slice());
|
||||
}
|
||||
Some(def) => {
|
||||
debug!("(resolving trait) found trait def: {:?}", def);
|
||||
|
@ -4071,7 +4077,7 @@ impl<'a> Resolver<'a> {
|
|||
format!("variable `{}` from pattern \\#1 is \
|
||||
not bound in pattern \\#{}",
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
i + 1).as_slice());
|
||||
}
|
||||
Some(binding_i) => {
|
||||
if binding_0.binding_mode != binding_i.binding_mode {
|
||||
|
@ -4080,7 +4086,7 @@ impl<'a> Resolver<'a> {
|
|||
format!("variable `{}` is bound with different \
|
||||
mode in pattern \\#{} than in pattern \\#1",
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
i + 1).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4093,7 +4099,7 @@ impl<'a> Resolver<'a> {
|
|||
format!("variable `{}` from pattern \\#{} is \
|
||||
not bound in pattern \\#1",
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
i + 1).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4220,7 +4226,7 @@ impl<'a> Resolver<'a> {
|
|||
None => {
|
||||
let msg = format!("use of undeclared type name `{}`",
|
||||
self.path_idents_to_str(path));
|
||||
self.resolve_error(ty.span, msg);
|
||||
self.resolve_error(ty.span, msg.as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4285,12 +4291,12 @@ impl<'a> Resolver<'a> {
|
|||
self.record_def(pattern.id, (def, lp));
|
||||
}
|
||||
FoundStructOrEnumVariant(..) => {
|
||||
self.resolve_error(pattern.span,
|
||||
format!("declaration of `{}` \
|
||||
shadows an enum \
|
||||
variant or unit-like \
|
||||
struct in scope",
|
||||
token::get_name(renamed)));
|
||||
self.resolve_error(
|
||||
pattern.span,
|
||||
format!("declaration of `{}` shadows an enum \
|
||||
variant or unit-like struct in \
|
||||
scope",
|
||||
token::get_name(renamed)).as_slice());
|
||||
}
|
||||
FoundConst(def, lp) if mode == RefutableMode => {
|
||||
debug!("(resolving pattern) resolving `{}` to \
|
||||
|
@ -4359,9 +4365,10 @@ impl<'a> Resolver<'a> {
|
|||
// in the same disjunct, which is an
|
||||
// error
|
||||
self.resolve_error(pattern.span,
|
||||
format!("identifier `{}` is bound more \
|
||||
than once in the same pattern",
|
||||
path_to_str(path)));
|
||||
format!("identifier `{}` is bound \
|
||||
more than once in the same \
|
||||
pattern",
|
||||
path_to_str(path)).as_slice());
|
||||
}
|
||||
// Not bound in the same pattern: do nothing
|
||||
}
|
||||
|
@ -4407,7 +4414,10 @@ impl<'a> Resolver<'a> {
|
|||
path.span,
|
||||
format!("`{}` is not an enum variant or constant",
|
||||
token::get_ident(
|
||||
path.segments.last().unwrap().identifier)))
|
||||
path.segments
|
||||
.last()
|
||||
.unwrap()
|
||||
.identifier)).as_slice())
|
||||
}
|
||||
None => {
|
||||
self.resolve_error(path.span,
|
||||
|
@ -4435,16 +4445,20 @@ impl<'a> Resolver<'a> {
|
|||
Some(_) => {
|
||||
self.resolve_error(path.span,
|
||||
format!("`{}` is not an enum variant, struct or const",
|
||||
token::get_ident(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
token::get_ident(
|
||||
path.segments
|
||||
.last()
|
||||
.unwrap()
|
||||
.identifier)).as_slice());
|
||||
}
|
||||
None => {
|
||||
self.resolve_error(path.span,
|
||||
format!("unresolved enum variant, struct or const `{}`",
|
||||
token::get_ident(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
token::get_ident(
|
||||
path.segments
|
||||
.last()
|
||||
.unwrap()
|
||||
.identifier)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4485,7 +4499,7 @@ impl<'a> Resolver<'a> {
|
|||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_idents_to_str(path));
|
||||
self.resolve_error(path.span, msg);
|
||||
self.resolve_error(path.span, msg.as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4705,7 +4719,7 @@ impl<'a> Resolver<'a> {
|
|||
Failed => {
|
||||
let msg = format!("use of undeclared module `{}`",
|
||||
self.idents_to_str(module_path_idents.as_slice()));
|
||||
self.resolve_error(path.span, msg);
|
||||
self.resolve_error(path.span, msg.as_slice());
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -4776,7 +4790,7 @@ impl<'a> Resolver<'a> {
|
|||
Failed => {
|
||||
let msg = format!("use of undeclared module `::{}`",
|
||||
self.idents_to_str(module_path_idents.as_slice()));
|
||||
self.resolve_error(path.span, msg);
|
||||
self.resolve_error(path.span, msg.as_slice());
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -5096,12 +5110,12 @@ impl<'a> Resolver<'a> {
|
|||
format!("`{}` is a structure name, but \
|
||||
this expression \
|
||||
uses it like a function name",
|
||||
wrong_name));
|
||||
wrong_name).as_slice());
|
||||
|
||||
self.session.span_note(expr.span,
|
||||
format!("Did you mean to write: \
|
||||
`{} \\{ /* fields */ \\}`?",
|
||||
wrong_name));
|
||||
wrong_name).as_slice());
|
||||
|
||||
}
|
||||
_ => {
|
||||
|
@ -5119,10 +5133,11 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
if method_scope && token::get_name(self.self_ident.name).get()
|
||||
== wrong_name.as_slice() {
|
||||
self.resolve_error(expr.span,
|
||||
format!("`self` is not available in a \
|
||||
static method. Maybe a `self` \
|
||||
argument is missing?"));
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
"`self` is not available \
|
||||
in a static method. Maybe a \
|
||||
`self` argument is missing?");
|
||||
} else {
|
||||
let name = path_to_ident(path).name;
|
||||
let mut msg = match self.find_fallback_in_self_type(name) {
|
||||
|
@ -5130,7 +5145,7 @@ impl<'a> Resolver<'a> {
|
|||
// limit search to 5 to reduce the number
|
||||
// of stupid suggestions
|
||||
self.find_best_match_for_name(wrong_name.as_slice(), 5)
|
||||
.map_or("".into_owned(),
|
||||
.map_or("".to_strbuf(),
|
||||
|x| format!("`{}`", x))
|
||||
}
|
||||
Field =>
|
||||
|
@ -5147,8 +5162,11 @@ impl<'a> Resolver<'a> {
|
|||
msg = format!(" Did you mean {}?", msg)
|
||||
}
|
||||
|
||||
self.resolve_error(expr.span, format!("unresolved name `{}`.{}",
|
||||
wrong_name, msg));
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
format!("unresolved name `{}`.{}",
|
||||
wrong_name,
|
||||
msg).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5182,7 +5200,7 @@ impl<'a> Resolver<'a> {
|
|||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_idents_to_str(path));
|
||||
self.resolve_error(path.span, msg);
|
||||
self.resolve_error(path.span, msg.as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5211,10 +5229,12 @@ impl<'a> Resolver<'a> {
|
|||
let renamed = mtwt::resolve(label);
|
||||
match self.search_ribs(self.label_ribs.borrow().as_slice(),
|
||||
renamed, expr.span) {
|
||||
None =>
|
||||
self.resolve_error(expr.span,
|
||||
None => {
|
||||
self.resolve_error(
|
||||
expr.span,
|
||||
format!("use of undeclared label `{}`",
|
||||
token::get_ident(label))),
|
||||
token::get_ident(label)).as_slice())
|
||||
}
|
||||
Some(DlDef(def @ DefLabel(_))) => {
|
||||
// Since this def is a label, it is never read.
|
||||
self.record_def(expr.id, (def, LastMod(AllPublic)))
|
||||
|
@ -5343,8 +5363,12 @@ impl<'a> Resolver<'a> {
|
|||
// times, so here is a sanity check it at least comes to
|
||||
// the same conclusion! - nmatsakis
|
||||
if def != *old_value {
|
||||
self.session.bug(format!("node_id {:?} resolved first to {:?} \
|
||||
and then {:?}", node_id, *old_value, def));
|
||||
self.session
|
||||
.bug(format!("node_id {:?} resolved first to {:?} and \
|
||||
then {:?}",
|
||||
node_id,
|
||||
*old_value,
|
||||
def).as_slice());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -5356,10 +5380,10 @@ impl<'a> Resolver<'a> {
|
|||
match pat_binding_mode {
|
||||
BindByValue(_) => {}
|
||||
BindByRef(..) => {
|
||||
self.resolve_error(
|
||||
pat.span,
|
||||
format!("cannot use `ref` binding mode with {}",
|
||||
descr));
|
||||
self.resolve_error(pat.span,
|
||||
format!("cannot use `ref` binding mode \
|
||||
with {}",
|
||||
descr).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -341,7 +341,7 @@ impl<'a> LifetimeContext<'a> {
|
|||
self.sess.span_err(
|
||||
lifetime_ref.span,
|
||||
format!("use of undeclared lifetime name `'{}`",
|
||||
token::get_name(lifetime_ref.name)));
|
||||
token::get_name(lifetime_ref.name)).as_slice());
|
||||
}
|
||||
|
||||
fn check_lifetime_names(&self, lifetimes: &Vec<ast::Lifetime>) {
|
||||
|
@ -354,7 +354,7 @@ impl<'a> LifetimeContext<'a> {
|
|||
self.sess.span_err(
|
||||
lifetime.span,
|
||||
format!("illegal lifetime parameter name: `{}`",
|
||||
token::get_name(lifetime.name)));
|
||||
token::get_name(lifetime.name)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -366,7 +366,7 @@ impl<'a> LifetimeContext<'a> {
|
|||
lifetime_j.span,
|
||||
format!("lifetime name `'{}` declared twice in \
|
||||
the same scope",
|
||||
token::get_name(lifetime_j.name)));
|
||||
token::get_name(lifetime_j.name)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,14 +112,17 @@ impl<'a> TypeFolder for SubstFolder<'a> {
|
|||
let root_msg = match self.root_ty {
|
||||
Some(root) => format!(" in the substitution of `{}`",
|
||||
root.repr(self.tcx)),
|
||||
None => "".to_owned()
|
||||
None => "".to_strbuf()
|
||||
};
|
||||
let m = format!("can't use type parameters from outer \
|
||||
function{}; try using a local type \
|
||||
parameter instead", root_msg);
|
||||
parameter instead",
|
||||
root_msg);
|
||||
match self.span {
|
||||
Some(span) => self.tcx.sess.span_err(span, m),
|
||||
None => self.tcx.sess.err(m)
|
||||
Some(span) => {
|
||||
self.tcx.sess.span_err(span, m.as_slice())
|
||||
}
|
||||
None => self.tcx.sess.err(m.as_slice())
|
||||
}
|
||||
ty::mk_err()
|
||||
}
|
||||
|
@ -131,12 +134,15 @@ impl<'a> TypeFolder for SubstFolder<'a> {
|
|||
let root_msg = match self.root_ty {
|
||||
Some(root) => format!(" in the substitution of `{}`",
|
||||
root.repr(self.tcx)),
|
||||
None => "".to_owned()
|
||||
None => "".to_strbuf()
|
||||
};
|
||||
let m = format!("missing `Self` type param{}", root_msg);
|
||||
let m = format!("missing `Self` type param{}",
|
||||
root_msg);
|
||||
match self.span {
|
||||
Some(span) => self.tcx.sess.span_err(span, m),
|
||||
None => self.tcx.sess.err(m)
|
||||
Some(span) => {
|
||||
self.tcx.sess.span_err(span, m.as_slice())
|
||||
}
|
||||
None => self.tcx.sess.err(m.as_slice())
|
||||
}
|
||||
ty::mk_err()
|
||||
}
|
||||
|
|
|
@ -462,7 +462,7 @@ fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
|
|||
bcx.sess().span_bug(
|
||||
p.span,
|
||||
format!("expected an identifier pattern but found p: {}",
|
||||
p.repr(bcx.tcx())));
|
||||
p.repr(bcx.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1229,8 +1229,10 @@ fn compare_values<'a>(
|
|||
rhs: ValueRef,
|
||||
rhs_t: ty::t)
|
||||
-> Result<'a> {
|
||||
let did = langcall(cx, None,
|
||||
format!("comparison of `{}`", cx.ty_to_str(rhs_t)),
|
||||
let did = langcall(cx,
|
||||
None,
|
||||
format!("comparison of `{}`",
|
||||
cx.ty_to_str(rhs_t)).as_slice(),
|
||||
StrEqFnLangItem);
|
||||
let result = callee::trans_lang_call(cx, did, [lhs, rhs], None);
|
||||
Result {
|
||||
|
@ -1252,8 +1254,10 @@ fn compare_values<'a>(
|
|||
Store(cx, lhs, scratch_lhs);
|
||||
let scratch_rhs = alloca(cx, val_ty(rhs), "__rhs");
|
||||
Store(cx, rhs, scratch_rhs);
|
||||
let did = langcall(cx, None,
|
||||
format!("comparison of `{}`", cx.ty_to_str(rhs_t)),
|
||||
let did = langcall(cx,
|
||||
None,
|
||||
format!("comparison of `{}`",
|
||||
cx.ty_to_str(rhs_t)).as_slice(),
|
||||
UniqStrEqFnLangItem);
|
||||
let result = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs], None);
|
||||
Result {
|
||||
|
@ -2154,7 +2158,7 @@ fn bind_irrefutable_pat<'a>(
|
|||
|
||||
if bcx.sess().asm_comments() {
|
||||
add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
|
||||
pat.repr(bcx.tcx())));
|
||||
pat.repr(bcx.tcx())).as_slice());
|
||||
}
|
||||
|
||||
let _indenter = indenter();
|
||||
|
@ -2273,7 +2277,7 @@ fn bind_irrefutable_pat<'a>(
|
|||
}
|
||||
ast::PatVec(..) => {
|
||||
bcx.sess().span_bug(pat.span,
|
||||
format!("vector patterns are never irrefutable!"));
|
||||
"vector patterns are never irrefutable!");
|
||||
}
|
||||
ast::PatWild | ast::PatWildMulti | ast::PatLit(_) | ast::PatRange(_, _) => ()
|
||||
}
|
||||
|
|
|
@ -192,7 +192,8 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
|||
if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) {
|
||||
cx.sess().bug(format!("non-C-like enum {} with specified \
|
||||
discriminants",
|
||||
ty::item_path_str(cx.tcx(), def_id)))
|
||||
ty::item_path_str(cx.tcx(),
|
||||
def_id)).as_slice())
|
||||
}
|
||||
|
||||
if cases.len() == 1 {
|
||||
|
|
|
@ -341,7 +341,8 @@ fn require_alloc_fn(bcx: &Block, info_ty: ty::t, it: LangItem) -> ast::DefId {
|
|||
Ok(id) => id,
|
||||
Err(s) => {
|
||||
bcx.sess().fatal(format!("allocation of `{}` {}",
|
||||
bcx.ty_to_str(info_ty), s));
|
||||
bcx.ty_to_str(info_ty),
|
||||
s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -476,7 +477,7 @@ pub fn unset_split_stack(f: ValueRef) {
|
|||
// silently mangles such symbols, breaking our linkage model.
|
||||
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: StrBuf) {
|
||||
if ccx.all_llvm_symbols.borrow().contains(&sym) {
|
||||
ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym));
|
||||
ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_slice());
|
||||
}
|
||||
ccx.all_llvm_symbols.borrow_mut().insert(sym);
|
||||
}
|
||||
|
@ -739,8 +740,11 @@ pub fn iter_structural_ty<'r,
|
|||
|
||||
for variant in (*variants).iter() {
|
||||
let variant_cx =
|
||||
fcx.new_temp_block("enum-iter-variant-".to_owned() +
|
||||
variant.disr_val.to_str());
|
||||
fcx.new_temp_block(
|
||||
format_strbuf!("enum-iter-variant-{}",
|
||||
variant.disr_val
|
||||
.to_str()
|
||||
.as_slice()).as_slice());
|
||||
match adt::trans_case(cx, &*repr, variant.disr_val) {
|
||||
_match::single_result(r) => {
|
||||
AddCase(llswitch, r.val, variant_cx.llbb)
|
||||
|
@ -839,7 +843,7 @@ pub fn fail_if_zero<'a>(
|
|||
}
|
||||
_ => {
|
||||
cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
|
||||
ty_to_str(cx.tcx(), rhs_t)));
|
||||
ty_to_str(cx.tcx(), rhs_t)).as_slice());
|
||||
}
|
||||
};
|
||||
with_cond(cx, is_zero, |bcx| {
|
||||
|
@ -1504,7 +1508,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: &CrateContext,
|
|||
_ => ccx.sess().bug(
|
||||
format!("trans_enum_variant_or_tuple_like_struct: \
|
||||
unexpected ctor return type {}",
|
||||
ty_to_str(ccx.tcx(), ctor_ty)))
|
||||
ty_to_str(ccx.tcx(), ctor_ty)).as_slice())
|
||||
};
|
||||
|
||||
let arena = TypedArena::new();
|
||||
|
@ -2052,7 +2056,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
|
||||
ref variant => {
|
||||
ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}",
|
||||
variant))
|
||||
variant).as_slice())
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2116,7 +2120,9 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
|
|||
let compressed = Vec::from_slice(encoder::metadata_encoding_version)
|
||||
.append(match flate::deflate_bytes(metadata.as_slice()) {
|
||||
Some(compressed) => compressed,
|
||||
None => cx.sess().fatal(format!("failed to compress metadata"))
|
||||
None => {
|
||||
cx.sess().fatal("failed to compress metadata")
|
||||
}
|
||||
}.as_slice());
|
||||
let llmeta = C_bytes(cx, compressed.as_slice());
|
||||
let llconst = C_struct(cx, [llmeta], false);
|
||||
|
|
|
@ -750,9 +750,11 @@ impl<'a> Builder<'a> {
|
|||
|
||||
pub fn add_span_comment(&self, sp: Span, text: &str) {
|
||||
if self.ccx.sess().asm_comments() {
|
||||
let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_str(sp));
|
||||
debug!("{}", s);
|
||||
self.add_comment(s);
|
||||
let s = format!("{} ({})",
|
||||
text,
|
||||
self.ccx.sess().codemap().span_to_str(sp));
|
||||
debug!("{}", s.as_slice());
|
||||
self.add_comment(s.as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -761,7 +763,7 @@ impl<'a> Builder<'a> {
|
|||
let sanitized = text.replace("$", "");
|
||||
let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
|
||||
self.count_insn("inlineasm");
|
||||
let asm = comment_text.with_c_str(|c| {
|
||||
let asm = comment_text.as_slice().with_c_str(|c| {
|
||||
unsafe {
|
||||
llvm::LLVMConstInlineAsm(Type::func([], &Type::void(self.ccx)).to_ref(),
|
||||
c, noname(), False, False)
|
||||
|
|
|
@ -102,8 +102,9 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
|
|||
_ => {
|
||||
bcx.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("type of callee is neither bare-fn nor closure: {}",
|
||||
bcx.ty_to_str(datum.ty)));
|
||||
format!("type of callee is neither bare-fn nor closure: \
|
||||
{}",
|
||||
bcx.ty_to_str(datum.ty)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -151,7 +152,7 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
|
|||
bcx.tcx().sess.span_bug(
|
||||
ref_expr.span,
|
||||
format!("cannot translate def {:?} \
|
||||
to a callable thing!", def));
|
||||
to a callable thing!", def).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -329,7 +329,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
|
||||
self.ccx.sess().bug(
|
||||
format!("no cleanup scope {} found",
|
||||
self.ccx.tcx.map.node_to_str(cleanup_scope)));
|
||||
self.ccx.tcx.map.node_to_str(cleanup_scope)).as_slice());
|
||||
}
|
||||
|
||||
fn schedule_clean_in_custom_scope(&self,
|
||||
|
@ -531,7 +531,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
|
|||
LoopExit(id, _) => {
|
||||
self.ccx.sess().bug(format!(
|
||||
"cannot exit from scope {:?}, \
|
||||
not in scope", id));
|
||||
not in scope", id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -878,7 +878,8 @@ pub fn temporary_scope(tcx: &ty::ctxt,
|
|||
r
|
||||
}
|
||||
None => {
|
||||
tcx.sess.bug(format!("no temporary scope available for expr {}", id))
|
||||
tcx.sess.bug(format!("no temporary scope available for expr {}",
|
||||
id).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ pub fn store_environment<'a>(
|
|||
|
||||
if ccx.sess().asm_comments() {
|
||||
add_comment(bcx, format!("Copy {} into closure",
|
||||
bv.to_str(ccx)));
|
||||
bv.to_str(ccx)).as_slice());
|
||||
}
|
||||
|
||||
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
|
||||
|
@ -386,8 +386,9 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
|
|||
ast::DefVariant(_, did, _) | ast::DefStruct(did) => did,
|
||||
_ => {
|
||||
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
|
||||
expected a statically resolved fn, got {:?}",
|
||||
def));
|
||||
expected a statically resolved fn, got \
|
||||
{:?}",
|
||||
def).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -405,7 +406,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
|
|||
_ => {
|
||||
ccx.sess().bug(format!("get_wrapper_for_bare_fn: \
|
||||
expected a closure ty, got {}",
|
||||
closure_ty.repr(tcx)));
|
||||
closure_ty.repr(tcx)).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ pub fn gensym_name(name: &str) -> PathElem {
|
|||
let num = token::gensym(name);
|
||||
// use one colon which will get translated to a period by the mangler, and
|
||||
// we're guaranteed that `num` is globally unique for this crate.
|
||||
PathName(token::gensym(format!("{}:{}", name, num)))
|
||||
PathName(token::gensym(format!("{}:{}", name, num).as_slice()))
|
||||
}
|
||||
|
||||
pub struct tydesc_info {
|
||||
|
@ -459,7 +459,7 @@ impl<'a> Block<'a> {
|
|||
Some(&v) => v,
|
||||
None => {
|
||||
self.tcx().sess.bug(format!(
|
||||
"no def associated with node id {:?}", nid));
|
||||
"no def associated with node id {:?}", nid).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -747,9 +747,10 @@ pub fn node_id_substs(bcx: &Block,
|
|||
|
||||
if !substs.tps.iter().all(|t| !ty::type_needs_infer(*t)) {
|
||||
bcx.sess().bug(
|
||||
format!("type parameters for node {:?} include inference types: {}",
|
||||
format!("type parameters for node {:?} include inference types: \
|
||||
{}",
|
||||
node,
|
||||
substs.repr(bcx.tcx())));
|
||||
substs.repr(bcx.tcx())).as_slice());
|
||||
}
|
||||
|
||||
substs.substp(tcx, bcx.fcx.param_substs)
|
||||
|
@ -816,7 +817,7 @@ pub fn resolve_vtable_under_param_substs(tcx: &ty::ctxt,
|
|||
_ => {
|
||||
tcx.sess.bug(format!(
|
||||
"resolve_vtable_under_param_substs: asked to lookup \
|
||||
but no vtables in the fn_ctxt!"))
|
||||
but no vtables in the fn_ctxt!").as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -870,8 +871,8 @@ pub fn langcall(bcx: &Block,
|
|||
Err(s) => {
|
||||
let msg = format!("{} {}", msg, s);
|
||||
match span {
|
||||
Some(span) => { bcx.tcx().sess.span_fatal(span, msg); }
|
||||
None => { bcx.tcx().sess.fatal(msg); }
|
||||
Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_slice()),
|
||||
None => bcx.tcx().sess.fatal(msg.as_slice()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,8 +55,9 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
|
|||
C_integral(Type::uint_from_ty(cx, t), i as u64, false)
|
||||
}
|
||||
_ => cx.sess().span_bug(lit.span,
|
||||
format!("integer literal has type {} (expected int or uint)",
|
||||
ty_to_str(cx.tcx(), lit_int_ty)))
|
||||
format!("integer literal has type {} (expected int \
|
||||
or uint)",
|
||||
ty_to_str(cx.tcx(), lit_int_ty)).as_slice())
|
||||
}
|
||||
}
|
||||
ast::LitFloat(ref fs, t) => {
|
||||
|
@ -150,14 +151,14 @@ fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool)
|
|||
}
|
||||
_ => {
|
||||
cx.sess().bug(format!("unexpected dereferenceable type {}",
|
||||
ty_to_str(cx.tcx(), t)))
|
||||
ty_to_str(cx.tcx(), t)).as_slice())
|
||||
}
|
||||
};
|
||||
(dv, mt.ty)
|
||||
}
|
||||
None => {
|
||||
cx.sess().bug(format!("can't dereference const of type {}",
|
||||
ty_to_str(cx.tcx(), t)))
|
||||
ty_to_str(cx.tcx(), t)).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,7 +207,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
|
|||
cx.sess()
|
||||
.span_bug(e.span,
|
||||
format!("unexpected static function: {:?}",
|
||||
store))
|
||||
store).as_slice())
|
||||
}
|
||||
ty::AutoObject(..) => {
|
||||
cx.sess()
|
||||
|
@ -256,11 +257,11 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
cx.sess().span_bug(e.span,
|
||||
format!("unimplemented \
|
||||
const autoref \
|
||||
{:?}",
|
||||
autoref))
|
||||
cx.sess()
|
||||
.span_bug(e.span,
|
||||
format!("unimplemented const \
|
||||
autoref {:?}",
|
||||
autoref).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -281,7 +282,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
|
|||
}
|
||||
cx.sess().bug(format!("const {} of type {} has size {} instead of {}",
|
||||
e.repr(cx.tcx()), ty_to_str(cx.tcx(), ety),
|
||||
csize, tsize));
|
||||
csize, tsize).as_slice());
|
||||
}
|
||||
(llconst, inlineable)
|
||||
}
|
||||
|
|
|
@ -165,7 +165,7 @@ pub fn trans_if<'a>(bcx: &'a Block<'a>,
|
|||
}
|
||||
|
||||
let name = format!("then-block-{}-", thn.id);
|
||||
let then_bcx_in = bcx.fcx.new_id_block(name, thn.id);
|
||||
let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id);
|
||||
let then_bcx_out = trans_block(then_bcx_in, thn, dest);
|
||||
debuginfo::clear_source_location(bcx.fcx);
|
||||
|
||||
|
@ -287,7 +287,8 @@ pub fn trans_break_cont<'a>(bcx: &'a Block<'a>,
|
|||
match bcx.tcx().def_map.borrow().find(&expr_id) {
|
||||
Some(&ast::DefLabel(loop_id)) => loop_id,
|
||||
ref r => {
|
||||
bcx.tcx().sess.bug(format!("{:?} in def-map for label", r))
|
||||
bcx.tcx().sess.bug(format!("{:?} in def-map for label",
|
||||
r).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -317,16 +317,21 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
|||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemStatic(..) => (item.ident, item.span),
|
||||
_ => cx.sess().span_bug(item.span,
|
||||
format!("debuginfo::create_global_var_metadata() -
|
||||
Captured var-id refers to unexpected ast_item
|
||||
variant: {:?}",
|
||||
var_item))
|
||||
_ => {
|
||||
cx.sess()
|
||||
.span_bug(item.span,
|
||||
format!("debuginfo::\
|
||||
create_global_var_metadata() -
|
||||
Captured var-id refers to \
|
||||
unexpected ast_item variant: {:?}",
|
||||
var_item).as_slice())
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() - Captured var-id \
|
||||
refers to unexpected ast_map variant: {:?}",
|
||||
var_item))
|
||||
_ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \
|
||||
- Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
var_item).as_slice())
|
||||
};
|
||||
|
||||
let filename = span_start(cx, span).file.name.clone();
|
||||
|
@ -340,7 +345,8 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
|||
|
||||
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
|
||||
let var_name = token::get_ident(ident).get().to_str();
|
||||
let linkage_name = namespace_node.mangled_name_of_contained_item(var_name);
|
||||
let linkage_name =
|
||||
namespace_node.mangled_name_of_contained_item(var_name.as_slice());
|
||||
let var_scope = namespace_node.scope;
|
||||
|
||||
var_name.as_slice().with_c_str(|var_name| {
|
||||
|
@ -380,7 +386,7 @@ pub fn create_local_var_metadata(bcx: &Block, local: &ast::Local) {
|
|||
None => {
|
||||
bcx.sess().span_bug(span,
|
||||
format!("no entry in lllocals table for {:?}",
|
||||
node_id));
|
||||
node_id).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -430,13 +436,17 @@ pub fn create_captured_var_metadata(bcx: &Block,
|
|||
"debuginfo::create_captured_var_metadata() - \
|
||||
Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
ast_item));
|
||||
ast_item).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.sess().span_bug(span, format!("debuginfo::create_captured_var_metadata() - \
|
||||
Captured var-id refers to unexpected ast_map variant: {:?}", ast_item));
|
||||
cx.sess()
|
||||
.span_bug(span,
|
||||
format!("debuginfo::create_captured_var_metadata() - \
|
||||
Captured var-id refers to unexpected \
|
||||
ast_map variant: {:?}",
|
||||
ast_item).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -519,7 +529,7 @@ pub fn create_argument_metadata(bcx: &Block, arg: &ast::Arg) {
|
|||
None => {
|
||||
bcx.sess().span_bug(span,
|
||||
format!("no entry in llargs table for {:?}",
|
||||
node_id));
|
||||
node_id).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -653,7 +663,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
ast::ExprFnBlock(fn_decl, top_level_block) |
|
||||
ast::ExprProc(fn_decl, top_level_block) => {
|
||||
let name = format!("fn{}", token::gensym("fn"));
|
||||
let name = token::str_to_ident(name);
|
||||
let name = token::str_to_ident(name.as_slice());
|
||||
(name, fn_decl,
|
||||
// This is not quite right. It should actually inherit the generics of the
|
||||
// enclosing function.
|
||||
|
@ -681,7 +691,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
cx.sess()
|
||||
.bug(format!("create_function_debug_context: \
|
||||
unexpected sort of node: {:?}",
|
||||
fnitem))
|
||||
fnitem).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -691,7 +701,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
return FunctionDebugContext { repr: FunctionWithoutDebugInfo };
|
||||
}
|
||||
_ => cx.sess().bug(format!("create_function_debug_context: \
|
||||
unexpected sort of node: {:?}", fnitem))
|
||||
unexpected sort of node: {:?}",
|
||||
fnitem).as_slice())
|
||||
};
|
||||
|
||||
// This can be the case for functions inlined from another crate
|
||||
|
@ -1124,7 +1135,8 @@ fn scope_metadata(fcx: &FunctionContext,
|
|||
let node = fcx.ccx.tcx.map.get(node_id);
|
||||
|
||||
fcx.ccx.sess().span_bug(span,
|
||||
format!("debuginfo: Could not find scope info for node {:?}", node));
|
||||
format!("debuginfo: Could not find scope info for node {:?}",
|
||||
node).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1499,14 +1511,17 @@ fn describe_enum_variant(cx: &CrateContext,
|
|||
// Get the argument names from the enum variant info
|
||||
let mut arg_names: Vec<_> = match variant_info.arg_names {
|
||||
Some(ref names) => {
|
||||
names.iter().map(|ident| token::get_ident(*ident).get().to_str()).collect()
|
||||
names.iter()
|
||||
.map(|ident| {
|
||||
token::get_ident(*ident).get().to_str().into_strbuf()
|
||||
}).collect()
|
||||
}
|
||||
None => variant_info.args.iter().map(|_| "".to_owned()).collect()
|
||||
None => variant_info.args.iter().map(|_| "".to_strbuf()).collect()
|
||||
};
|
||||
|
||||
// If this is not a univariant enum, there is also the (unnamed) discriminant field
|
||||
if discriminant_type_metadata.is_some() {
|
||||
arg_names.insert(0, "".to_owned());
|
||||
arg_names.insert(0, "".to_strbuf());
|
||||
}
|
||||
|
||||
// Build an array of (field name, field type) pairs to be captured in the factory closure.
|
||||
|
@ -1861,7 +1876,7 @@ fn boxed_type_metadata(cx: &CrateContext,
|
|||
-> DICompositeType {
|
||||
let box_type_name = match content_type_name {
|
||||
Some(content_type_name) => format!("Boxed<{}>", content_type_name),
|
||||
None => "BoxedType".to_owned()
|
||||
None => "BoxedType".to_strbuf()
|
||||
};
|
||||
|
||||
let box_llvm_type = Type::at_box(cx, content_llvm_type);
|
||||
|
@ -1913,7 +1928,7 @@ fn boxed_type_metadata(cx: &CrateContext,
|
|||
return composite_type_metadata(
|
||||
cx,
|
||||
box_llvm_type,
|
||||
box_type_name,
|
||||
box_type_name.as_slice(),
|
||||
member_descriptions,
|
||||
file_metadata,
|
||||
file_metadata,
|
||||
|
@ -1971,7 +1986,9 @@ fn vec_metadata(cx: &CrateContext,
|
|||
let (element_size, element_align) = size_and_align_of(cx, element_llvm_type);
|
||||
|
||||
let vec_llvm_type = Type::vec(cx, &element_llvm_type);
|
||||
let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx(), element_type));
|
||||
let vec_type_name = format!("[{}]",
|
||||
ppaux::ty_to_str(cx.tcx(), element_type));
|
||||
let vec_type_name = vec_type_name.as_slice();
|
||||
|
||||
let member_llvm_types = vec_llvm_type.field_types();
|
||||
|
||||
|
@ -2254,7 +2271,11 @@ fn type_metadata(cx: &CrateContext,
|
|||
elements.as_slice(),
|
||||
usage_site_span).finalize(cx)
|
||||
}
|
||||
_ => cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty))
|
||||
_ => {
|
||||
cx.sess().bug(format!("debuginfo: unexpected type in \
|
||||
type_metadata: {:?}",
|
||||
sty).as_slice())
|
||||
}
|
||||
};
|
||||
|
||||
debug_context(cx).created_types.borrow_mut().insert(cache_id, type_metadata);
|
||||
|
@ -2852,13 +2873,13 @@ impl NamespaceTreeNode {
|
|||
None => {}
|
||||
}
|
||||
let string = token::get_name(node.name);
|
||||
output.push_str(format!("{}", string.get().len()));
|
||||
output.push_str(format!("{}", string.get().len()).as_slice());
|
||||
output.push_str(string.get());
|
||||
}
|
||||
|
||||
let mut name = StrBuf::from_str("_ZN");
|
||||
fill_nested(self, &mut name);
|
||||
name.push_str(format!("{}", item_name.len()));
|
||||
name.push_str(format!("{}", item_name.len()).as_slice());
|
||||
name.push_str(item_name);
|
||||
name.push_char('E');
|
||||
name
|
||||
|
@ -2941,7 +2962,8 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
|
|||
Some(node) => node,
|
||||
None => {
|
||||
cx.sess().bug(format!("debuginfo::namespace_for_item(): \
|
||||
path too short for {:?}", def_id));
|
||||
path too short for {:?}",
|
||||
def_id).as_slice());
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -422,7 +422,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
|
|||
expr.span,
|
||||
format!("trans_rvalue_datum_unadjusted reached \
|
||||
fall-through case: {:?}",
|
||||
expr.node));
|
||||
expr.node).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -636,7 +636,7 @@ fn trans_rvalue_stmt_unadjusted<'a>(bcx: &'a Block<'a>,
|
|||
expr.span,
|
||||
format!("trans_rvalue_stmt_unadjusted reached \
|
||||
fall-through case: {:?}",
|
||||
expr.node));
|
||||
expr.node).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -765,8 +765,9 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
|
|||
_ => {
|
||||
bcx.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("trans_rvalue_dps_unadjusted reached fall-through case: {:?}",
|
||||
expr.node));
|
||||
format!("trans_rvalue_dps_unadjusted reached fall-through \
|
||||
case: {:?}",
|
||||
expr.node).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -815,7 +816,7 @@ fn trans_def_dps_unadjusted<'a>(
|
|||
_ => {
|
||||
bcx.tcx().sess.span_bug(ref_expr.span, format!(
|
||||
"Non-DPS def {:?} referened by {}",
|
||||
def, bcx.node_id_to_str(ref_expr.id)));
|
||||
def, bcx.node_id_to_str(ref_expr.id)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -839,7 +840,7 @@ fn trans_def_fn_unadjusted<'a>(bcx: &'a Block<'a>,
|
|||
bcx.tcx().sess.span_bug(ref_expr.span, format!(
|
||||
"trans_def_fn_unadjusted invoked on: {:?} for {}",
|
||||
def,
|
||||
ref_expr.repr(bcx.tcx())));
|
||||
ref_expr.repr(bcx.tcx())).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -865,7 +866,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
|
|||
Some(&val) => Datum(val, local_ty, Lvalue),
|
||||
None => {
|
||||
bcx.sess().bug(format!(
|
||||
"trans_local_var: no llval for upvar {:?} found", nid));
|
||||
"trans_local_var: no llval for upvar {:?} found",
|
||||
nid).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -877,7 +879,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
|
|||
}
|
||||
_ => {
|
||||
bcx.sess().unimpl(format!(
|
||||
"unsupported def type in trans_local_var: {:?}", def));
|
||||
"unsupported def type in trans_local_var: {:?}",
|
||||
def).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -889,7 +892,8 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
|
|||
Some(&v) => v,
|
||||
None => {
|
||||
bcx.sess().bug(format!(
|
||||
"trans_local_var: no datum for local/arg {:?} found", nid));
|
||||
"trans_local_var: no datum for local/arg {:?} found",
|
||||
nid).as_slice());
|
||||
}
|
||||
};
|
||||
debug!("take_local(nid={:?}, v={}, ty={})",
|
||||
|
@ -922,7 +926,7 @@ pub fn with_field_tys<R>(tcx: &ty::ctxt,
|
|||
tcx.sess.bug(format!(
|
||||
"cannot get field types from the enum type {} \
|
||||
without a node ID",
|
||||
ty.repr(tcx)));
|
||||
ty.repr(tcx)).as_slice());
|
||||
}
|
||||
Some(node_id) => {
|
||||
let def = tcx.def_map.borrow().get_copy(&node_id);
|
||||
|
@ -947,7 +951,7 @@ pub fn with_field_tys<R>(tcx: &ty::ctxt,
|
|||
_ => {
|
||||
tcx.sess.bug(format!(
|
||||
"cannot get field types from the type {}",
|
||||
ty.repr(tcx)));
|
||||
ty.repr(tcx)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1586,16 +1590,22 @@ fn trans_imm_cast<'a>(bcx: &'a Block<'a>,
|
|||
val_ty(lldiscrim_a),
|
||||
lldiscrim_a, true),
|
||||
cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
|
||||
_ => ccx.sess().bug(format!("translating unsupported cast: \
|
||||
_ => {
|
||||
ccx.sess().bug(format!("translating unsupported cast: \
|
||||
{} ({:?}) -> {} ({:?})",
|
||||
t_in.repr(bcx.tcx()), k_in,
|
||||
t_out.repr(bcx.tcx()), k_out))
|
||||
t_in.repr(bcx.tcx()),
|
||||
k_in,
|
||||
t_out.repr(bcx.tcx()),
|
||||
k_out).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => ccx.sess().bug(format!("translating unsupported cast: \
|
||||
{} ({:?}) -> {} ({:?})",
|
||||
t_in.repr(bcx.tcx()), k_in,
|
||||
t_out.repr(bcx.tcx()), k_out))
|
||||
t_in.repr(bcx.tcx()),
|
||||
k_in,
|
||||
t_out.repr(bcx.tcx()),
|
||||
k_out).as_slice())
|
||||
};
|
||||
return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
|
||||
}
|
||||
|
@ -1757,7 +1767,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
|
|||
bcx.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("deref invoked on expr of illegal type {}",
|
||||
datum.ty.repr(bcx.tcx())));
|
||||
datum.ty.repr(bcx.tcx())).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -81,13 +81,12 @@ pub fn llvm_calling_convention(ccx: &CrateContext,
|
|||
match abi {
|
||||
RustIntrinsic => {
|
||||
// Intrinsics are emitted by monomorphic fn
|
||||
ccx.sess().bug(format!("asked to register intrinsic fn"));
|
||||
ccx.sess().bug("asked to register intrinsic fn");
|
||||
}
|
||||
|
||||
Rust => {
|
||||
// FIXME(#3678) Implement linking to foreign fns with Rust ABI
|
||||
ccx.sess().unimpl(
|
||||
format!("foreign functions with Rust ABI"));
|
||||
ccx.sess().unimpl("foreign functions with Rust ABI");
|
||||
}
|
||||
|
||||
// It's the ABI's job to select this, not us.
|
||||
|
@ -203,13 +202,13 @@ pub fn register_foreign_item_fn(ccx: &CrateContext, abi: Abi, fty: ty::t,
|
|||
ccx.sess().span_fatal(s,
|
||||
format!("ABI `{}` has no suitable calling convention \
|
||||
for target architecture",
|
||||
abi.user_string(ccx.tcx())))
|
||||
abi.user_string(ccx.tcx())).as_slice())
|
||||
}
|
||||
None => {
|
||||
ccx.sess().fatal(
|
||||
format!("ABI `{}` has no suitable calling convention \
|
||||
for target architecture",
|
||||
abi.user_string(ccx.tcx())))
|
||||
abi.user_string(ccx.tcx())).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -371,7 +370,7 @@ pub fn trans_native_call<'a>(
|
|||
ccx.sess().fatal(
|
||||
format!("ABI string `{}` has no suitable ABI \
|
||||
for target architecture",
|
||||
fn_abi.user_string(ccx.tcx())));
|
||||
fn_abi.user_string(ccx.tcx())).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -557,7 +556,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
|
|||
ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \
|
||||
expected a bare fn ty",
|
||||
ccx.tcx.map.path_to_str(id),
|
||||
t.repr(tcx)));
|
||||
t.repr(tcx)).as_slice());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -396,7 +396,7 @@ pub fn trans_intrinsic(ccx: &CrateContext,
|
|||
intype = ty_to_str(ccx.tcx(), in_type),
|
||||
insize = in_type_size as uint,
|
||||
outtype = ty_to_str(ccx.tcx(), out_type),
|
||||
outsize = out_type_size as uint));
|
||||
outsize = out_type_size as uint).as_slice());
|
||||
}
|
||||
|
||||
if !return_type_is_void(ccx, out_type) {
|
||||
|
|
|
@ -205,7 +205,8 @@ pub fn monomorphic_fn(ccx: &CrateContext,
|
|||
hash_id.hash(&mut state);
|
||||
mono_ty.hash(&mut state);
|
||||
|
||||
exported_name(path, format!("h{}", state.result()),
|
||||
exported_name(path,
|
||||
format!("h{}", state.result()).as_slice(),
|
||||
ccx.link_meta.crateid.version_or_default())
|
||||
});
|
||||
debug!("monomorphize_fn mangled to {}", s);
|
||||
|
@ -287,7 +288,7 @@ pub fn monomorphic_fn(ccx: &CrateContext,
|
|||
}
|
||||
_ => {
|
||||
ccx.sess().bug(format!("can't monomorphize a {:?}",
|
||||
map_node))
|
||||
map_node).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -311,7 +312,8 @@ pub fn monomorphic_fn(ccx: &CrateContext,
|
|||
ast_map::NodeBlock(..) |
|
||||
ast_map::NodePat(..) |
|
||||
ast_map::NodeLocal(..) => {
|
||||
ccx.sess().bug(format!("can't monomorphize a {:?}", map_node))
|
||||
ccx.sess().bug(format!("can't monomorphize a {:?}",
|
||||
map_node).as_slice())
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -155,7 +155,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type {
|
|||
ty::ty_self(_) | ty::ty_infer(..) | ty::ty_param(..) |
|
||||
ty::ty_err(..) | ty::ty_vec(_, None) | ty::ty_str => {
|
||||
cx.sess().bug(format!("fictitious type {:?} in sizing_type_of()",
|
||||
ty::get(t).sty))
|
||||
ty::get(t).sty).as_slice())
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -2620,7 +2620,7 @@ pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> Rc<ty::TraitRef> {
|
|||
Some(t) => t.clone(),
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_trait_ref: no trait ref for node `{}`",
|
||||
cx.map.node_to_str(id)))
|
||||
cx.map.node_to_str(id)).as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2633,7 +2633,7 @@ pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t {
|
|||
Some(t) => t,
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_type: no type for node `{}`",
|
||||
cx.map.node_to_str(id)))
|
||||
cx.map.node_to_str(id)).as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2717,7 +2717,8 @@ pub fn ty_region(tcx: &ctxt,
|
|||
ref s => {
|
||||
tcx.sess.span_bug(
|
||||
span,
|
||||
format!("ty_region() invoked on in appropriate ty: {:?}", s));
|
||||
format!("ty_region() invoked on in appropriate ty: {:?}",
|
||||
s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2774,11 +2775,12 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
|
|||
}
|
||||
Some(f) => {
|
||||
cx.sess.bug(format!("Node id {} is not an expr: {:?}",
|
||||
id, f));
|
||||
id,
|
||||
f).as_slice());
|
||||
}
|
||||
None => {
|
||||
cx.sess.bug(format!("Node id {} is not present \
|
||||
in the node map", id));
|
||||
in the node map", id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2793,14 +2795,15 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
|
|||
_ => {
|
||||
cx.sess.bug(
|
||||
format!("Variable id {} maps to {:?}, not local",
|
||||
id, pat));
|
||||
id,
|
||||
pat).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
r => {
|
||||
cx.sess.bug(
|
||||
format!("Variable id {} maps to {:?}, not local",
|
||||
id, r));
|
||||
cx.sess.bug(format!("Variable id {} maps to {:?}, not local",
|
||||
id,
|
||||
r).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2832,7 +2835,7 @@ pub fn adjust_ty(cx: &ctxt,
|
|||
cx.sess.bug(
|
||||
format!("add_env adjustment on non-bare-fn: \
|
||||
{:?}",
|
||||
b));
|
||||
b).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2857,7 +2860,8 @@ pub fn adjust_ty(cx: &ctxt,
|
|||
format!("the {}th autoderef failed: \
|
||||
{}",
|
||||
i,
|
||||
ty_to_str(cx, adjusted_ty)));
|
||||
ty_to_str(cx, adjusted_ty))
|
||||
.as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2923,7 +2927,8 @@ pub fn adjust_ty(cx: &ctxt,
|
|||
_ => {
|
||||
cx.sess.span_bug(
|
||||
span,
|
||||
format!("borrow-vec associated with bad sty: {:?}", get(ty).sty));
|
||||
format!("borrow-vec associated with bad sty: {:?}",
|
||||
get(ty).sty).as_slice());
|
||||
}
|
||||
},
|
||||
ty_vec(mt, Some(_)) => ty::mk_slice(cx, r, ty::mt {ty: mt.ty, mutbl: m}),
|
||||
|
@ -2931,7 +2936,8 @@ pub fn adjust_ty(cx: &ctxt,
|
|||
ref s => {
|
||||
cx.sess.span_bug(
|
||||
span,
|
||||
format!("borrow-vec associated with bad sty: {:?}", s));
|
||||
format!("borrow-vec associated with bad sty: {:?}",
|
||||
s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2947,7 +2953,7 @@ pub fn adjust_ty(cx: &ctxt,
|
|||
cx.sess.span_bug(
|
||||
span,
|
||||
format!("borrow-trait-obj associated with bad sty: {:?}",
|
||||
s));
|
||||
s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2996,7 +3002,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> ast::Def {
|
|||
Some(&def) => def,
|
||||
None => {
|
||||
tcx.sess.span_bug(expr.span, format!(
|
||||
"no def-map entry for expr {:?}", expr.id));
|
||||
"no def-map entry for expr {:?}", expr.id).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3070,9 +3076,11 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
|
|||
ast::DefLocal(..) => LvalueExpr,
|
||||
|
||||
def => {
|
||||
tcx.sess.span_bug(expr.span, format!(
|
||||
"uncategorized def for expr {:?}: {:?}",
|
||||
expr.id, def));
|
||||
tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("uncategorized def for expr {:?}: {:?}",
|
||||
expr.id,
|
||||
def).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3193,7 +3201,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
|
|||
token::get_name(name),
|
||||
fields.iter()
|
||||
.map(|f| token::get_ident(f.ident).get().to_strbuf())
|
||||
.collect::<Vec<StrBuf>>()));
|
||||
.collect::<Vec<StrBuf>>()).as_slice());
|
||||
}
|
||||
|
||||
pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> {
|
||||
|
@ -3444,10 +3452,18 @@ pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec<Rc<Method>> {
|
|||
let (_, p) = ast_util::split_trait_methods(ms.as_slice());
|
||||
p.iter().map(|m| method(cx, ast_util::local_def(m.id))).collect()
|
||||
}
|
||||
_ => cx.sess.bug(format!("provided_trait_methods: `{}` is not a trait", id))
|
||||
_ => {
|
||||
cx.sess.bug(format!("provided_trait_methods: `{}` is \
|
||||
not a trait",
|
||||
id).as_slice())
|
||||
}
|
||||
}
|
||||
_ => cx.sess.bug(format!("provided_trait_methods: `{}` is not a trait", id))
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(format!("provided_trait_methods: `{}` is not a \
|
||||
trait",
|
||||
id).as_slice())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
csearch::get_provided_trait_methods(cx, id)
|
||||
|
@ -3800,7 +3816,7 @@ pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc<Vec<Rc<VariantInfo>>> {
|
|||
cx.sess
|
||||
.span_err(e.span,
|
||||
format!("expected constant: {}",
|
||||
*err));
|
||||
*err).as_slice());
|
||||
}
|
||||
},
|
||||
None => {}
|
||||
|
@ -3963,7 +3979,7 @@ fn each_super_struct(cx: &ctxt, mut did: ast::DefId, f: |ast::DefId|) {
|
|||
None => {
|
||||
cx.sess.bug(
|
||||
format!("ID not mapped to super-struct: {}",
|
||||
cx.map.node_to_str(did.node)));
|
||||
cx.map.node_to_str(did.node)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3985,7 +4001,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
|
|||
_ => {
|
||||
cx.sess.bug(
|
||||
format!("ID not mapped to struct fields: {}",
|
||||
cx.map.node_to_str(did.node)));
|
||||
cx.map.node_to_str(did.node)).as_slice());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -180,7 +180,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
|
|||
format!("wrong number of lifetime parameters: \
|
||||
expected {} but found {}",
|
||||
expected_num_region_params,
|
||||
supplied_num_region_params));
|
||||
supplied_num_region_params).as_slice());
|
||||
}
|
||||
|
||||
match anon_regions {
|
||||
|
@ -204,7 +204,9 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
|
|||
};
|
||||
this.tcx().sess.span_fatal(path.span,
|
||||
format!("wrong number of type arguments: {} {} but found {}",
|
||||
expected, required_ty_param_count, supplied_ty_param_count));
|
||||
expected,
|
||||
required_ty_param_count,
|
||||
supplied_ty_param_count).as_slice());
|
||||
} else if supplied_ty_param_count > formal_ty_param_count {
|
||||
let expected = if required_ty_param_count < formal_ty_param_count {
|
||||
"expected at most"
|
||||
|
@ -213,7 +215,9 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
|
|||
};
|
||||
this.tcx().sess.span_fatal(path.span,
|
||||
format!("wrong number of type arguments: {} {} but found {}",
|
||||
expected, formal_ty_param_count, supplied_ty_param_count));
|
||||
expected,
|
||||
formal_ty_param_count,
|
||||
supplied_ty_param_count).as_slice());
|
||||
}
|
||||
|
||||
if supplied_ty_param_count > required_ty_param_count
|
||||
|
@ -317,8 +321,11 @@ pub fn ast_ty_to_prim_ty(tcx: &ty::ctxt, ast_ty: &ast::Ty) -> Option<ty::t> {
|
|||
match ast_ty.node {
|
||||
ast::TyPath(ref path, _, id) => {
|
||||
let a_def = match tcx.def_map.borrow().find(&id) {
|
||||
None => tcx.sess.span_bug(
|
||||
ast_ty.span, format!("unbound path {}", path_to_str(path))),
|
||||
None => {
|
||||
tcx.sess.span_bug(ast_ty.span,
|
||||
format!("unbound path {}",
|
||||
path_to_str(path)).as_slice())
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
match a_def {
|
||||
|
@ -382,8 +389,13 @@ pub fn ast_ty_to_builtin_ty<AC:AstConv,
|
|||
match ast_ty.node {
|
||||
ast::TyPath(ref path, _, id) => {
|
||||
let a_def = match this.tcx().def_map.borrow().find(&id) {
|
||||
None => this.tcx().sess.span_bug(
|
||||
ast_ty.span, format!("unbound path {}", path_to_str(path))),
|
||||
None => {
|
||||
this.tcx()
|
||||
.sess
|
||||
.span_bug(ast_ty.span,
|
||||
format!("unbound path {}",
|
||||
path_to_str(path)).as_slice())
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
|
||||
|
@ -493,8 +505,11 @@ fn mk_pointer<AC:AstConv,
|
|||
RPtr(r) => {
|
||||
return ty::mk_str_slice(tcx, r, ast::MutImmutable);
|
||||
}
|
||||
_ => tcx.sess.span_err(path.span,
|
||||
format!("managed strings are not supported")),
|
||||
_ => {
|
||||
tcx.sess
|
||||
.span_err(path.span,
|
||||
"managed strings are not supported")
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(&ast::DefTrait(trait_def_id)) => {
|
||||
|
@ -635,8 +650,12 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
}
|
||||
ast::TyPath(ref path, ref bounds, id) => {
|
||||
let a_def = match tcx.def_map.borrow().find(&id) {
|
||||
None => tcx.sess.span_bug(
|
||||
ast_ty.span, format!("unbound path {}", path_to_str(path))),
|
||||
None => {
|
||||
tcx.sess
|
||||
.span_bug(ast_ty.span,
|
||||
format!("unbound path {}",
|
||||
path_to_str(path)).as_slice())
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
// Kind bounds on path types are only supported for traits.
|
||||
|
@ -653,8 +672,10 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
let path_str = path_to_str(path);
|
||||
tcx.sess.span_err(
|
||||
ast_ty.span,
|
||||
format!("reference to trait `{name}` where a type is expected; \
|
||||
try `Box<{name}>` or `&{name}`", name=path_str));
|
||||
format!("reference to trait `{name}` where a \
|
||||
type is expected; try `Box<{name}>` or \
|
||||
`&{name}`",
|
||||
name=path_str).as_slice());
|
||||
ty::mk_err()
|
||||
}
|
||||
ast::DefTy(did) | ast::DefStruct(did) => {
|
||||
|
@ -675,14 +696,16 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
ast::DefMod(id) => {
|
||||
tcx.sess.span_fatal(ast_ty.span,
|
||||
format!("found module name used as a type: {}",
|
||||
tcx.map.node_to_str(id.node)));
|
||||
tcx.map.node_to_str(id.node)).as_slice());
|
||||
}
|
||||
ast::DefPrimTy(_) => {
|
||||
fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.span_fatal(ast_ty.span,
|
||||
format!("found value name used as a type: {:?}", a_def));
|
||||
format!("found value name used \
|
||||
as a type: {:?}",
|
||||
a_def).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -705,7 +728,9 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
Err(ref r) => {
|
||||
tcx.sess.span_fatal(
|
||||
ast_ty.span,
|
||||
format!("expected constant expr for vector length: {}", *r));
|
||||
format!("expected constant expr for vector \
|
||||
length: {}",
|
||||
*r).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -897,8 +922,8 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
|
|||
}
|
||||
tcx.sess.span_fatal(
|
||||
b.path.span,
|
||||
format!("only the builtin traits can be used \
|
||||
as closure or object bounds"));
|
||||
"only the builtin traits can be used as closure \
|
||||
or object bounds");
|
||||
}
|
||||
ast::StaticRegionTyParamBound => {
|
||||
builtin_bounds.add(ty::BoundStatic);
|
||||
|
@ -907,8 +932,8 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
|
|||
if !tcx.sess.features.issue_5723_bootstrap.get() {
|
||||
tcx.sess.span_err(
|
||||
span,
|
||||
format!("only the 'static lifetime is \
|
||||
accepted here."));
|
||||
"only the 'static lifetime is accepted \
|
||||
here.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -263,7 +263,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
|
|||
npat = subpats_len,
|
||||
kind = kind_name,
|
||||
narg = arg_len);
|
||||
tcx.sess.span_err(pat.span, s);
|
||||
tcx.sess.span_err(pat.span, s.as_slice());
|
||||
error_happened = true;
|
||||
}
|
||||
|
||||
|
@ -280,7 +280,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
|
|||
{npat, plural, =1{# field} other{# fields}}, \
|
||||
but the corresponding {kind} has no fields",
|
||||
npat = subpats_len,
|
||||
kind = kind_name));
|
||||
kind = kind_name).as_slice());
|
||||
error_happened = true;
|
||||
}
|
||||
|
||||
|
@ -324,7 +324,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
Some(&(_, true)) => {
|
||||
tcx.sess.span_err(span,
|
||||
format!("field `{}` bound twice in pattern",
|
||||
token::get_ident(field.ident)));
|
||||
token::get_ident(field.ident)).as_slice());
|
||||
}
|
||||
Some(&(index, ref mut used)) => {
|
||||
*used = true;
|
||||
|
@ -344,7 +344,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
tcx.sess.span_err(span,
|
||||
format!("struct `{}` does not have a field named `{}`",
|
||||
name,
|
||||
token::get_ident(field.ident)));
|
||||
token::get_ident(field.ident)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -356,9 +356,10 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
continue;
|
||||
}
|
||||
|
||||
tcx.sess.span_err(span,
|
||||
tcx.sess
|
||||
.span_err(span,
|
||||
format!("pattern does not mention field `{}`",
|
||||
token::get_name(field.name)));
|
||||
token::get_name(field.name)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -381,10 +382,12 @@ pub fn check_struct_pat(pcx: &pat_ctxt, pat_id: ast::NodeId, span: Span,
|
|||
}
|
||||
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
|
||||
let name = pprust::path_to_str(path);
|
||||
tcx.sess.span_err(span,
|
||||
format!("mismatched types: expected `{}` but found `{}`",
|
||||
tcx.sess
|
||||
.span_err(span,
|
||||
format!("mismatched types: expected `{}` but found \
|
||||
`{}`",
|
||||
fcx.infcx().ty_to_str(expected),
|
||||
name));
|
||||
name).as_slice());
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.span_bug(span, "resolve didn't write in struct ID");
|
||||
|
@ -423,7 +426,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
|
|||
format!("mismatched types: expected `{}` but \
|
||||
found `{}`",
|
||||
fcx.infcx().ty_to_str(expected),
|
||||
name));
|
||||
name).as_slice());
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.span_bug(span, "resolve didn't write in variant");
|
||||
|
|
|
@ -284,7 +284,7 @@ fn construct_transformed_self_ty_for_object(
|
|||
_ => {
|
||||
tcx.sess.span_bug(span,
|
||||
format!("'impossible' transformed_self_ty: {}",
|
||||
transformed_self_ty.repr(tcx)));
|
||||
transformed_self_ty.repr(tcx)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -950,7 +950,7 @@ impl<'a> LookupContext<'a> {
|
|||
|
||||
ty_infer(TyVar(_)) => {
|
||||
self.bug(format!("unexpected type: {}",
|
||||
self.ty_to_str(self_ty)));
|
||||
self.ty_to_str(self_ty)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1235,9 +1235,10 @@ impl<'a> LookupContext<'a> {
|
|||
rcvr_ty, transformed_self_ty) {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
self.bug(format!("{} was a subtype of {} but now is not?",
|
||||
self.bug(format!(
|
||||
"{} was a subtype of {} but now is not?",
|
||||
self.ty_to_str(rcvr_ty),
|
||||
self.ty_to_str(transformed_self_ty)));
|
||||
self.ty_to_str(transformed_self_ty)).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1465,16 +1466,16 @@ impl<'a> LookupContext<'a> {
|
|||
self.tcx().sess.span_note(
|
||||
span,
|
||||
format!("candidate \\#{} is `{}`",
|
||||
idx+1u,
|
||||
ty::item_path_str(self.tcx(), did)));
|
||||
idx + 1u,
|
||||
ty::item_path_str(self.tcx(), did)).as_slice());
|
||||
}
|
||||
|
||||
fn report_param_candidate(&self, idx: uint, did: DefId) {
|
||||
self.tcx().sess.span_note(
|
||||
self.span,
|
||||
format!("candidate \\#{} derives from the bound `{}`",
|
||||
idx+1u,
|
||||
ty::item_path_str(self.tcx(), did)));
|
||||
idx + 1u,
|
||||
ty::item_path_str(self.tcx(), did)).as_slice());
|
||||
}
|
||||
|
||||
fn report_trait_candidate(&self, idx: uint, did: DefId) {
|
||||
|
@ -1482,8 +1483,8 @@ impl<'a> LookupContext<'a> {
|
|||
self.span,
|
||||
format!("candidate \\#{} derives from the type of the receiver, \
|
||||
which is the trait `{}`",
|
||||
idx+1u,
|
||||
ty::item_path_str(self.tcx(), did)));
|
||||
idx + 1u,
|
||||
ty::item_path_str(self.tcx(), did)).as_slice());
|
||||
}
|
||||
|
||||
fn infcx(&'a self) -> &'a infer::InferCtxt<'a> {
|
||||
|
|
|
@ -546,8 +546,11 @@ fn span_for_field(tcx: &ty::ctxt, field: &ty::field_ty, struct_id: ast::DefId) -
|
|||
_ => false,
|
||||
}) {
|
||||
Some(f) => f.span,
|
||||
None => tcx.sess.bug(format!("Could not find field {}",
|
||||
token::get_name(field.name))),
|
||||
None => {
|
||||
tcx.sess
|
||||
.bug(format!("Could not find field {}",
|
||||
token::get_name(field.name)).as_slice())
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => tcx.sess.bug("Field found outside of a struct?"),
|
||||
|
@ -569,8 +572,9 @@ fn check_for_field_shadowing(tcx: &ty::ctxt,
|
|||
match super_fields.iter().find(|sf| f.name == sf.name) {
|
||||
Some(prev_field) => {
|
||||
tcx.sess.span_err(span_for_field(tcx, f, id),
|
||||
format!("field `{}` hides field declared in super-struct",
|
||||
token::get_name(f.name)));
|
||||
format!("field `{}` hides field declared in \
|
||||
super-struct",
|
||||
token::get_name(f.name)).as_slice());
|
||||
tcx.sess.span_note(span_for_field(tcx, prev_field, parent_id),
|
||||
"previously declared here");
|
||||
},
|
||||
|
@ -593,11 +597,13 @@ fn check_fields_sized(tcx: &ty::ctxt,
|
|||
if !ty::type_is_sized(tcx, t) {
|
||||
match f.node.kind {
|
||||
ast::NamedField(ident, _) => {
|
||||
tcx.sess.span_err(f.span, format!("type `{}` is dynamically sized. \
|
||||
tcx.sess.span_err(
|
||||
f.span,
|
||||
format!("type `{}` is dynamically sized. \
|
||||
dynamically sized types may only \
|
||||
appear as the type of the final \
|
||||
field in a struct",
|
||||
token::get_ident(ident)));
|
||||
token::get_ident(ident)).as_slice());
|
||||
}
|
||||
ast::UnnamedField(_) => {
|
||||
tcx.sess.span_err(f.span, "dynamically sized type in field");
|
||||
|
@ -814,9 +820,10 @@ fn check_impl_methods_against_trait(ccx: &CrateCtxt,
|
|||
None => {
|
||||
tcx.sess.span_err(
|
||||
impl_method.span,
|
||||
format!("method `{}` is not a member of trait `{}`",
|
||||
format!(
|
||||
"method `{}` is not a member of trait `{}`",
|
||||
token::get_ident(impl_method_ty.ident),
|
||||
pprust::path_to_str(&ast_trait_ref.path)));
|
||||
pprust::path_to_str(&ast_trait_ref.path)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -842,7 +849,7 @@ fn check_impl_methods_against_trait(ccx: &CrateCtxt,
|
|||
tcx.sess.span_err(
|
||||
impl_span,
|
||||
format!("not all trait methods implemented, missing: {}",
|
||||
missing_methods.connect(", ")));
|
||||
missing_methods.connect(", ")).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -886,7 +893,8 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
format!("method `{}` has a `{}` declaration in the impl, \
|
||||
but not in the trait",
|
||||
token::get_ident(trait_m.ident),
|
||||
pprust::explicit_self_to_str(impl_m.explicit_self)));
|
||||
pprust::explicit_self_to_str(
|
||||
impl_m.explicit_self)).as_slice());
|
||||
return;
|
||||
}
|
||||
(_, &ast::SelfStatic) => {
|
||||
|
@ -895,7 +903,8 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
format!("method `{}` has a `{}` declaration in the trait, \
|
||||
but not in the impl",
|
||||
token::get_ident(trait_m.ident),
|
||||
pprust::explicit_self_to_str(trait_m.explicit_self)));
|
||||
pprust::explicit_self_to_str(
|
||||
trait_m.explicit_self)).as_slice());
|
||||
return;
|
||||
}
|
||||
_ => {
|
||||
|
@ -914,7 +923,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
other{# type parameters}}",
|
||||
method = token::get_ident(trait_m.ident),
|
||||
nimpl = num_impl_m_type_params,
|
||||
ntrait = num_trait_m_type_params));
|
||||
ntrait = num_trait_m_type_params).as_slice());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -927,7 +936,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
method = token::get_ident(trait_m.ident),
|
||||
nimpl = impl_m.fty.sig.inputs.len(),
|
||||
trait = ty::item_path_str(tcx, trait_m.def_id),
|
||||
ntrait = trait_m.fty.sig.inputs.len()));
|
||||
ntrait = trait_m.fty.sig.inputs.len()).as_slice());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -950,7 +959,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
in the trait declaration",
|
||||
token::get_ident(trait_m.ident),
|
||||
i,
|
||||
extra_bounds.user_string(tcx)));
|
||||
extra_bounds.user_string(tcx)).as_slice());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -971,7 +980,9 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
method = token::get_ident(trait_m.ident),
|
||||
typaram = i,
|
||||
nimpl = impl_param_def.bounds.trait_bounds.len(),
|
||||
ntrait = trait_param_def.bounds.trait_bounds.len()));
|
||||
ntrait = trait_param_def.bounds
|
||||
.trait_bounds
|
||||
.len()).as_slice());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -1040,7 +1051,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
|
|||
impl_m_span,
|
||||
format!("method `{}` has an incompatible type for trait: {}",
|
||||
token::get_ident(trait_m.ident),
|
||||
ty::type_err_to_str(tcx, terr)));
|
||||
ty::type_err_to_str(tcx, terr)).as_slice());
|
||||
ty::note_and_explain_type_err(tcx, terr);
|
||||
}
|
||||
}
|
||||
|
@ -1099,7 +1110,8 @@ impl<'a> FnCtxt<'a> {
|
|||
None => {
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("no type for local variable {:?}", nid));
|
||||
format!("no type for local variable {:?}",
|
||||
nid).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1173,7 +1185,7 @@ impl<'a> FnCtxt<'a> {
|
|||
Some(&t) => t,
|
||||
None => {
|
||||
self.tcx().sess.bug(format!("no type for expr in fcx {}",
|
||||
self.tag()));
|
||||
self.tag()).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1185,7 +1197,7 @@ impl<'a> FnCtxt<'a> {
|
|||
self.tcx().sess.bug(
|
||||
format!("no type for node {}: {} in fcx {}",
|
||||
id, self.tcx().map.node_to_str(id),
|
||||
self.tag()));
|
||||
self.tag()).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1197,7 +1209,7 @@ impl<'a> FnCtxt<'a> {
|
|||
self.tcx().sess.bug(
|
||||
format!("no method entry for node {}: {} in fcx {}",
|
||||
id, self.tcx().map.node_to_str(id),
|
||||
self.tag()));
|
||||
self.tag()).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1350,7 +1362,7 @@ pub fn autoderef<T>(fcx: &FnCtxt, sp: Span, base_ty: ty::t,
|
|||
// We've reached the recursion limit, error gracefully.
|
||||
fcx.tcx().sess.span_err(sp,
|
||||
format!("reached the recursion limit while auto-dereferencing {}",
|
||||
base_ty.repr(fcx.tcx())));
|
||||
base_ty.repr(fcx.tcx())).as_slice());
|
||||
(ty::mk_err(), 0, None)
|
||||
}
|
||||
|
||||
|
@ -1607,7 +1619,7 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
|
|||
found {nsupplied, plural, =1{# lifetime parameter} \
|
||||
other{# lifetime parameters}}",
|
||||
nexpected = trait_region_parameter_count,
|
||||
nsupplied = supplied_region_parameter_count));
|
||||
nsupplied = supplied_region_parameter_count).as_slice());
|
||||
}
|
||||
|
||||
// Make sure the number of type parameters supplied on the trait
|
||||
|
@ -1638,7 +1650,8 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
|
|||
nexpected = required_ty_param_count,
|
||||
nsupplied = supplied_ty_param_count)
|
||||
};
|
||||
function_context.tcx().sess.span_err(path.span, msg)
|
||||
function_context.tcx().sess.span_err(path.span,
|
||||
msg.as_slice())
|
||||
} else if supplied_ty_param_count > formal_ty_param_count {
|
||||
let msg = if required_ty_param_count < generics.type_param_defs().len() {
|
||||
format!("the {trait_or_impl} referenced by this path needs at most \
|
||||
|
@ -1659,7 +1672,8 @@ fn check_type_parameter_positions_in_path(function_context: &FnCtxt,
|
|||
nexpected = formal_ty_param_count,
|
||||
nsupplied = supplied_ty_param_count)
|
||||
};
|
||||
function_context.tcx().sess.span_err(path.span, msg)
|
||||
function_context.tcx().sess.span_err(path.span,
|
||||
msg.as_slice())
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -1727,9 +1741,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
|||
fty.sig.output
|
||||
}
|
||||
_ => {
|
||||
fcx.tcx().sess.span_bug(
|
||||
callee_expr.span,
|
||||
format!("method without bare fn type"));
|
||||
fcx.tcx().sess.span_bug(callee_expr.span,
|
||||
"method without bare fn type");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1768,7 +1781,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
|||
nexpected = expected_arg_count,
|
||||
nsupplied = supplied_arg_count);
|
||||
|
||||
tcx.sess.span_err(sp, msg);
|
||||
tcx.sess.span_err(sp, msg.as_slice());
|
||||
|
||||
err_args(supplied_arg_count)
|
||||
}
|
||||
|
@ -1781,7 +1794,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
|||
nexpected = expected_arg_count,
|
||||
nsupplied = supplied_arg_count);
|
||||
|
||||
tcx.sess.span_err(sp, msg);
|
||||
tcx.sess.span_err(sp, msg.as_slice());
|
||||
|
||||
err_args(supplied_arg_count)
|
||||
};
|
||||
|
@ -2484,7 +2497,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
|||
tcx.sess.span_err(
|
||||
field.ident.span,
|
||||
format!("field `{}` specified more than once",
|
||||
token::get_ident(field.ident.node)));
|
||||
token::get_ident(field.ident
|
||||
.node)).as_slice());
|
||||
error_happened = true;
|
||||
}
|
||||
Some((field_id, false)) => {
|
||||
|
@ -2517,14 +2531,16 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|
|||
let name = class_field.name;
|
||||
let (_, seen) = *class_field_map.get(&name);
|
||||
if !seen {
|
||||
missing_fields.push("`".to_owned() + token::get_name(name).get() + "`");
|
||||
missing_fields.push(
|
||||
format!("`{}`", token::get_name(name).get()))
|
||||
}
|
||||
}
|
||||
|
||||
tcx.sess.span_err(span,
|
||||
format!("missing {nfields, plural, =1{field} other{fields}}: {fields}",
|
||||
format!(
|
||||
"missing {nfields, plural, =1{field} other{fields}}: {fields}",
|
||||
nfields = missing_fields.len(),
|
||||
fields = missing_fields.connect(", ")));
|
||||
fields = missing_fields.connect(", ")).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3589,7 +3605,7 @@ pub fn check_representable(tcx: &ty::ctxt,
|
|||
tcx.sess.span_err(
|
||||
sp, format!("illegal recursive {} type; \
|
||||
wrap the inner value in a box to make it representable",
|
||||
designation));
|
||||
designation).as_slice());
|
||||
return false
|
||||
}
|
||||
ty::Representable | ty::ContainsRecursive => (),
|
||||
|
@ -3614,10 +3630,12 @@ pub fn check_instantiable(tcx: &ty::ctxt,
|
|||
-> bool {
|
||||
let item_ty = ty::node_id_to_type(tcx, item_id);
|
||||
if !ty::is_instantiable(tcx, item_ty) {
|
||||
tcx.sess.span_err(sp, format!("this type cannot be instantiated \
|
||||
without an instance of itself; \
|
||||
consider using `Option<{}>`",
|
||||
ppaux::ty_to_str(tcx, item_ty)));
|
||||
tcx.sess
|
||||
.span_err(sp,
|
||||
format!("this type cannot be instantiated without an \
|
||||
instance of itself; consider using \
|
||||
`Option<{}>`",
|
||||
ppaux::ty_to_str(tcx, item_ty)).as_slice());
|
||||
false
|
||||
} else {
|
||||
true
|
||||
|
@ -3670,11 +3688,16 @@ pub fn check_enum_variants_sized(ccx: &CrateCtxt,
|
|||
// A struct value with an unsized final field is itself
|
||||
// unsized and we must track this in the type system.
|
||||
if !ty::type_is_sized(ccx.tcx, *t) {
|
||||
ccx.tcx.sess.span_err(args.get(i).ty.span,
|
||||
ccx.tcx
|
||||
.sess
|
||||
.span_err(
|
||||
args.get(i).ty.span,
|
||||
format!("type `{}` is dynamically sized. \
|
||||
dynamically sized types may only \
|
||||
appear as the final type in a variant",
|
||||
ppaux::ty_to_str(ccx.tcx, *t)));
|
||||
appear as the final type in a \
|
||||
variant",
|
||||
ppaux::ty_to_str(ccx.tcx,
|
||||
*t)).as_slice());
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -3755,7 +3778,11 @@ pub fn check_enum_variants(ccx: &CrateCtxt,
|
|||
ccx.tcx.sess.span_err(e.span, "expected signed integer constant");
|
||||
}
|
||||
Err(ref err) => {
|
||||
ccx.tcx.sess.span_err(e.span, format!("expected constant: {}", *err));
|
||||
ccx.tcx
|
||||
.sess
|
||||
.span_err(e.span,
|
||||
format!("expected constant: {}",
|
||||
*err).as_slice());
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -3906,7 +3933,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
|
|||
found {nsupplied, plural, =1{# lifetime parameter} \
|
||||
other{# lifetime parameters}}",
|
||||
nexpected = num_expected_regions,
|
||||
nsupplied = num_supplied_regions));
|
||||
nsupplied = num_supplied_regions).as_slice());
|
||||
}
|
||||
|
||||
fcx.infcx().region_vars_for_defs(span, tpt.generics.region_param_defs.as_slice())
|
||||
|
@ -3945,7 +3972,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
|
|||
fcx.ccx.tcx.sess.span_err
|
||||
(span,
|
||||
format!("too many type parameters provided: {} {}, found {}",
|
||||
expected, user_ty_param_count, ty_substs_len));
|
||||
expected, user_ty_param_count, ty_substs_len).as_slice());
|
||||
(fcx.infcx().next_ty_vars(ty_param_count), regions)
|
||||
} else if ty_substs_len < user_ty_param_req {
|
||||
let expected = if user_ty_param_req < user_ty_param_count {
|
||||
|
@ -3953,10 +3980,12 @@ pub fn instantiate_path(fcx: &FnCtxt,
|
|||
} else {
|
||||
"expected"
|
||||
};
|
||||
fcx.ccx.tcx.sess.span_err
|
||||
(span,
|
||||
fcx.ccx.tcx.sess.span_err(
|
||||
span,
|
||||
format!("not enough type parameters provided: {} {}, found {}",
|
||||
expected, user_ty_param_req, ty_substs_len));
|
||||
expected,
|
||||
user_ty_param_req,
|
||||
ty_substs_len).as_slice());
|
||||
(fcx.infcx().next_ty_vars(ty_param_count), regions)
|
||||
} else {
|
||||
if ty_substs_len > user_ty_param_req
|
||||
|
@ -4128,8 +4157,9 @@ pub fn ast_expr_vstore_to_ty(fcx: &FnCtxt,
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
fcx.ccx.tcx.sess.span_bug(
|
||||
e.span, format!("vstore with unexpected contents"))
|
||||
fcx.ccx.tcx.sess.span_bug(e.span,
|
||||
"vstore with unexpected \
|
||||
contents")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4184,8 +4214,9 @@ pub fn check_bounds_are_used(ccx: &CrateCtxt,
|
|||
for (i, b) in tps_used.iter().enumerate() {
|
||||
if !*b {
|
||||
ccx.tcx.sess.span_err(
|
||||
span, format!("type parameter `{}` is unused",
|
||||
token::get_ident(tps.get(i).ident)));
|
||||
span,
|
||||
format!("type parameter `{}` is unused",
|
||||
token::get_ident(tps.get(i).ident)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4222,8 +4253,9 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
|
|||
}
|
||||
op => {
|
||||
tcx.sess.span_err(it.span,
|
||||
format!("unrecognized atomic operation function: `{}`",
|
||||
op));
|
||||
format!("unrecognized atomic operation \
|
||||
function: `{}`",
|
||||
op).as_slice());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -4450,7 +4482,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
|
|||
ref other => {
|
||||
tcx.sess.span_err(it.span,
|
||||
format!("unrecognized intrinsic function: `{}`",
|
||||
*other));
|
||||
*other).as_slice());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -4468,9 +4500,11 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
|
|||
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
|
||||
let i_n_tps = i_ty.generics.type_param_defs().len();
|
||||
if i_n_tps != n_tps {
|
||||
tcx.sess.span_err(it.span, format!("intrinsic has wrong number \
|
||||
of type parameters: found {}, \
|
||||
expected {}", i_n_tps, n_tps));
|
||||
tcx.sess.span_err(it.span,
|
||||
format!("intrinsic has wrong number of type \
|
||||
parameters: found {}, expected {}",
|
||||
i_n_tps,
|
||||
n_tps).as_slice());
|
||||
} else {
|
||||
require_same_types(tcx,
|
||||
None,
|
||||
|
|
|
@ -183,7 +183,7 @@ fn region_of_def(fcx: &FnCtxt, def: ast::Def) -> ty::Region {
|
|||
}
|
||||
_ => {
|
||||
tcx.sess.bug(format!("unexpected def in region_of_def: {:?}",
|
||||
def))
|
||||
def).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -880,7 +880,7 @@ fn constrain_autoderefs(rcx: &mut Rcx,
|
|||
ty::ty_rptr(r, ref m) => (m.mutbl, r),
|
||||
_ => rcx.tcx().sess.span_bug(deref_expr.span,
|
||||
format!("bad overloaded deref type {}",
|
||||
method.ty.repr(rcx.tcx())))
|
||||
method.ty.repr(rcx.tcx())).as_slice())
|
||||
};
|
||||
{
|
||||
let mc = mc::MemCategorizationContext::new(rcx);
|
||||
|
@ -1235,7 +1235,8 @@ fn link_region(rcx: &Rcx,
|
|||
rcx.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("Illegal upvar id: {}",
|
||||
upvar_id.repr(rcx.tcx())));
|
||||
upvar_id.repr(
|
||||
rcx.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
|
|||
format!("failed to find an implementation of \
|
||||
trait {} for {}",
|
||||
vcx.infcx.trait_ref_to_str(&*trait_ref),
|
||||
vcx.infcx.ty_to_str(ty)));
|
||||
vcx.infcx.ty_to_str(ty)).as_slice());
|
||||
}
|
||||
}
|
||||
true
|
||||
|
@ -208,7 +208,7 @@ fn relate_trait_refs(vcx: &VtableContext,
|
|||
format!("expected {}, but found {} ({})",
|
||||
ppaux::trait_ref_to_str(tcx, &r_exp_trait_ref),
|
||||
ppaux::trait_ref_to_str(tcx, &r_act_trait_ref),
|
||||
ty::type_err_to_str(tcx, err)));
|
||||
ty::type_err_to_str(tcx, err)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -491,9 +491,9 @@ fn fixup_ty(vcx: &VtableContext,
|
|||
Ok(new_type) => Some(new_type),
|
||||
Err(e) if !is_early => {
|
||||
tcx.sess.span_fatal(span,
|
||||
format!("cannot determine a type \
|
||||
for this bounded type parameter: {}",
|
||||
fixup_err_to_str(e)))
|
||||
format!("cannot determine a type for this bounded type \
|
||||
parameter: {}",
|
||||
fixup_err_to_str(e)).as_slice())
|
||||
}
|
||||
Err(_) => {
|
||||
None
|
||||
|
@ -550,8 +550,9 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
|
|||
match (&ty::get(ty).sty, store) {
|
||||
(&ty::ty_rptr(_, mt), ty::RegionTraitStore(_, mutbl))
|
||||
if !mutability_allowed(mt.mutbl, mutbl) => {
|
||||
fcx.tcx().sess.span_err(ex.span,
|
||||
format!("types differ in mutability"));
|
||||
fcx.tcx()
|
||||
.sess
|
||||
.span_err(ex.span, "types differ in mutability");
|
||||
}
|
||||
|
||||
(&ty::ty_uniq(..), ty::UniqTraitStore) |
|
||||
|
@ -609,7 +610,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
|
|||
ex.span,
|
||||
format!("can only cast an boxed pointer \
|
||||
to a boxed object, not a {}",
|
||||
ty::ty_sort_str(fcx.tcx(), ty)));
|
||||
ty::ty_sort_str(fcx.tcx(), ty)).as_slice());
|
||||
}
|
||||
|
||||
(_, ty::RegionTraitStore(..)) => {
|
||||
|
@ -617,7 +618,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
|
|||
ex.span,
|
||||
format!("can only cast an &-pointer \
|
||||
to an &-object, not a {}",
|
||||
ty::ty_sort_str(fcx.tcx(), ty)));
|
||||
ty::ty_sort_str(fcx.tcx(), ty)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -402,7 +402,7 @@ impl<'cx> Resolver<'cx> {
|
|||
span,
|
||||
format!("cannot determine a type for \
|
||||
this expression: {}",
|
||||
infer::fixup_err_to_str(e)))
|
||||
infer::fixup_err_to_str(e)).as_slice())
|
||||
}
|
||||
|
||||
ResolvingLocal(span) => {
|
||||
|
@ -410,7 +410,7 @@ impl<'cx> Resolver<'cx> {
|
|||
span,
|
||||
format!("cannot determine a type for \
|
||||
this local variable: {}",
|
||||
infer::fixup_err_to_str(e)))
|
||||
infer::fixup_err_to_str(e)).as_slice())
|
||||
}
|
||||
|
||||
ResolvingPattern(span) => {
|
||||
|
@ -418,7 +418,7 @@ impl<'cx> Resolver<'cx> {
|
|||
span,
|
||||
format!("cannot determine a type for \
|
||||
this pattern binding: {}",
|
||||
infer::fixup_err_to_str(e)))
|
||||
infer::fixup_err_to_str(e)).as_slice())
|
||||
}
|
||||
|
||||
ResolvingUpvar(upvar_id) => {
|
||||
|
@ -430,13 +430,15 @@ impl<'cx> Resolver<'cx> {
|
|||
ty::local_var_name_str(
|
||||
self.tcx,
|
||||
upvar_id.var_id).get().to_str(),
|
||||
infer::fixup_err_to_str(e)));
|
||||
infer::fixup_err_to_str(e)).as_slice());
|
||||
}
|
||||
|
||||
ResolvingImplRes(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("cannot determine a type for impl supertrait"));
|
||||
self.tcx
|
||||
.sess
|
||||
.span_err(span,
|
||||
"cannot determine a type for impl \
|
||||
supertrait");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -431,8 +431,9 @@ impl<'a> CoherenceChecker<'a> {
|
|||
session.span_err(
|
||||
self.span_of_impl(impl_a),
|
||||
format!("conflicting implementations for trait `{}`",
|
||||
ty::item_path_str(self.crate_context.tcx,
|
||||
trait_def_id)));
|
||||
ty::item_path_str(
|
||||
self.crate_context.tcx,
|
||||
trait_def_id)).as_slice());
|
||||
if impl_b.krate == LOCAL_CRATE {
|
||||
session.span_note(self.span_of_impl(impl_b),
|
||||
"note conflicting implementation here");
|
||||
|
@ -442,7 +443,7 @@ impl<'a> CoherenceChecker<'a> {
|
|||
session.note(
|
||||
format!("conflicting implementation in crate \
|
||||
`{}`",
|
||||
cdata.name));
|
||||
cdata.name).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,7 +123,8 @@ impl<'a> AstConv for CrateCtxt<'a> {
|
|||
}
|
||||
x => {
|
||||
self.tcx.sess.bug(format!("unexpected sort of node \
|
||||
in get_item_ty(): {:?}", x));
|
||||
in get_item_ty(): {:?}",
|
||||
x).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -134,7 +135,8 @@ impl<'a> AstConv for CrateCtxt<'a> {
|
|||
|
||||
fn ty_infer(&self, span: Span) -> ty::t {
|
||||
self.tcx.sess.span_err(span, "the type placeholder `_` is not \
|
||||
allowed within types on item signatures.");
|
||||
allowed within types on item \
|
||||
signatures.");
|
||||
ty::mk_err()
|
||||
}
|
||||
}
|
||||
|
@ -573,7 +575,8 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
|
|||
if ty_param.bounds.len() > 0 {
|
||||
ccx.tcx.sess.span_err(
|
||||
span,
|
||||
format!("trait bounds are not allowed in {} definitions", thing));
|
||||
format!("trait bounds are not allowed in {} definitions",
|
||||
thing).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -711,8 +714,10 @@ pub fn convert_struct(ccx: &CrateCtxt,
|
|||
if result.name != special_idents::unnamed_field.name {
|
||||
let dup = match seen_fields.find(&result.name) {
|
||||
Some(prev_span) => {
|
||||
tcx.sess.span_err(f.span,
|
||||
format!("field `{}` is already declared", token::get_name(result.name)));
|
||||
tcx.sess.span_err(
|
||||
f.span,
|
||||
format!("field `{}` is already declared",
|
||||
token::get_name(result.name)).as_slice());
|
||||
tcx.sess.span_note(*prev_span,
|
||||
"previously declared here");
|
||||
true
|
||||
|
@ -840,7 +845,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt,
|
|||
ccx.tcx.sess.span_fatal(
|
||||
ast_trait_ref.path.span,
|
||||
format!("`{}` is not a trait",
|
||||
path_to_str(&ast_trait_ref.path)));
|
||||
path_to_str(&ast_trait_ref.path)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -852,8 +857,10 @@ fn get_trait_def(ccx: &CrateCtxt, trait_id: ast::DefId) -> Rc<ty::TraitDef> {
|
|||
|
||||
match ccx.tcx.map.get(trait_id.node) {
|
||||
ast_map::NodeItem(item) => trait_def_of_item(ccx, item),
|
||||
_ => ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
|
||||
trait_id.node))
|
||||
_ => {
|
||||
ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
|
||||
trait_id.node).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -889,7 +896,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::Item) -> Rc<ty::TraitDef> {
|
|||
ref s => {
|
||||
tcx.sess.span_bug(
|
||||
it.span,
|
||||
format!("trait_def_of_item invoked on {:?}", s));
|
||||
format!("trait_def_of_item invoked on {:?}", s).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -960,9 +967,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
|
|||
return tpt;
|
||||
}
|
||||
ast::ItemTrait(..) => {
|
||||
tcx.sess.span_bug(
|
||||
it.span,
|
||||
format!("invoked ty_of_item on trait"));
|
||||
tcx.sess.span_bug(it.span, "invoked ty_of_item on trait");
|
||||
}
|
||||
ast::ItemStruct(_, ref generics) => {
|
||||
let ty_generics = ty_generics_for_type(ccx, generics);
|
||||
|
@ -1113,8 +1118,7 @@ fn ty_generics(ccx: &CrateCtxt,
|
|||
if !ccx.tcx.sess.features.issue_5723_bootstrap.get() {
|
||||
ccx.tcx.sess.span_err(
|
||||
span,
|
||||
format!("only the 'static lifetime is \
|
||||
accepted here."));
|
||||
"only the 'static lifetime is accepted here.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1151,7 +1155,8 @@ fn ty_generics(ccx: &CrateCtxt,
|
|||
format!("incompatible bounds on type parameter {}, \
|
||||
bound {} does not allow unsized type",
|
||||
token::get_ident(ident),
|
||||
ppaux::trait_ref_to_str(tcx, &*trait_ref)));
|
||||
ppaux::trait_ref_to_str(tcx,
|
||||
&*trait_ref)).as_slice());
|
||||
}
|
||||
true
|
||||
});
|
||||
|
|
|
@ -212,7 +212,7 @@ impl<'f> Coerce<'f> {
|
|||
self.get_ref().infcx.tcx.sess.span_bug(
|
||||
self.get_ref().trace.origin.span(),
|
||||
format!("failed to resolve even without \
|
||||
any force options: {:?}", e));
|
||||
any force options: {:?}", e).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -118,10 +118,9 @@ pub trait Combine {
|
|||
// I think it should never happen that we unify two
|
||||
// substs and one of them has a self_ty and one
|
||||
// doesn't...? I could be wrong about this.
|
||||
self.infcx().tcx.sess.bug(
|
||||
format!("substitution a had a self_ty \
|
||||
and substitution b didn't, \
|
||||
or vice versa"));
|
||||
self.infcx().tcx.sess.bug("substitution a had a self_ty \
|
||||
and substitution b didn't, or \
|
||||
vice versa");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -405,7 +404,7 @@ pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
|
|||
format!("{}: bot and var types should have been handled ({},{})",
|
||||
this.tag(),
|
||||
a.inf_str(this.infcx()),
|
||||
b.inf_str(this.infcx())));
|
||||
b.inf_str(this.infcx())).as_slice());
|
||||
}
|
||||
|
||||
// Relate integral variables to other types
|
||||
|
|
|
@ -355,7 +355,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
format!("{}: {} ({})",
|
||||
message_root_str,
|
||||
expected_found_str,
|
||||
ty::type_err_to_str(self.tcx, terr)));
|
||||
ty::type_err_to_str(self.tcx, terr)).as_slice());
|
||||
}
|
||||
|
||||
fn report_and_explain_type_error(&self,
|
||||
|
@ -430,7 +430,10 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
span,
|
||||
format!("lifetime of borrowed pointer outlives \
|
||||
lifetime of captured variable `{}`...",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str()));
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_str()).as_slice());
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"...the borrowed pointer is valid for ",
|
||||
|
@ -439,7 +442,10 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
note_and_explain_region(
|
||||
self.tcx,
|
||||
format!("...but `{}` is only valid for ",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str()),
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_str()).as_slice(),
|
||||
sup,
|
||||
"");
|
||||
}
|
||||
|
@ -483,7 +489,9 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
span,
|
||||
format!("captured variable `{}` does not \
|
||||
outlive the enclosing closure",
|
||||
ty::local_var_name_str(self.tcx, id).get().to_str()));
|
||||
ty::local_var_name_str(self.tcx,
|
||||
id).get()
|
||||
.to_str()).as_slice());
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"captured variable is valid for ",
|
||||
|
@ -496,9 +504,8 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
"");
|
||||
}
|
||||
infer::IndexSlice(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
format!("index of slice outside its lifetime"));
|
||||
self.tcx.sess.span_err(span,
|
||||
"index of slice outside its lifetime");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"the slice is only valid for ",
|
||||
|
@ -591,7 +598,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
|
|||
span,
|
||||
format!("in type `{}`, pointer has a longer lifetime than \
|
||||
the data it references",
|
||||
ty.user_string(self.tcx)));
|
||||
ty.user_string(self.tcx)).as_slice());
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"the pointer is valid for ",
|
||||
|
@ -1022,8 +1029,13 @@ impl<'a> Rebuilder<'a> {
|
|||
}
|
||||
ast::TyPath(ref path, _, id) => {
|
||||
let a_def = match self.tcx.def_map.borrow().find(&id) {
|
||||
None => self.tcx.sess.fatal(format!("unbound path {}",
|
||||
pprust::path_to_str(path))),
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.fatal(format!(
|
||||
"unbound path {}",
|
||||
pprust::path_to_str(path)).as_slice())
|
||||
}
|
||||
Some(&d) => d
|
||||
};
|
||||
match a_def {
|
||||
|
@ -1209,18 +1221,18 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
opt_explicit_self, generics);
|
||||
let msg = format!("consider using an explicit lifetime \
|
||||
parameter as shown: {}", suggested_fn);
|
||||
self.tcx.sess.span_note(span, msg);
|
||||
self.tcx.sess.span_note(span, msg.as_slice());
|
||||
}
|
||||
|
||||
fn report_inference_failure(&self,
|
||||
var_origin: RegionVariableOrigin) {
|
||||
let var_description = match var_origin {
|
||||
infer::MiscVariable(_) => "".to_owned(),
|
||||
infer::PatternRegion(_) => " for pattern".to_owned(),
|
||||
infer::AddrOfRegion(_) => " for borrow expression".to_owned(),
|
||||
infer::AddrOfSlice(_) => " for slice expression".to_owned(),
|
||||
infer::Autoref(_) => " for autoref".to_owned(),
|
||||
infer::Coercion(_) => " for automatic coercion".to_owned(),
|
||||
infer::MiscVariable(_) => "".to_strbuf(),
|
||||
infer::PatternRegion(_) => " for pattern".to_strbuf(),
|
||||
infer::AddrOfRegion(_) => " for borrow expression".to_strbuf(),
|
||||
infer::AddrOfSlice(_) => " for slice expression".to_strbuf(),
|
||||
infer::Autoref(_) => " for autoref".to_strbuf(),
|
||||
infer::Coercion(_) => " for automatic coercion".to_strbuf(),
|
||||
infer::LateBoundRegion(_, br) => {
|
||||
format!(" for {}in function call",
|
||||
bound_region_to_str(self.tcx, "lifetime parameter ", true, br))
|
||||
|
@ -1247,7 +1259,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
var_origin.span(),
|
||||
format!("cannot infer an appropriate lifetime{} \
|
||||
due to conflicting requirements",
|
||||
var_description));
|
||||
var_description).as_slice());
|
||||
}
|
||||
|
||||
fn note_region_origin(&self, origin: SubregionOrigin) {
|
||||
|
@ -1282,7 +1294,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
format!("...so that {} ({})",
|
||||
desc, values_str));
|
||||
desc, values_str).as_slice());
|
||||
}
|
||||
None => {
|
||||
// Really should avoid printing this error at
|
||||
|
@ -1291,7 +1303,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
// doing right now. - nmatsakis
|
||||
self.tcx.sess.span_note(
|
||||
trace.origin.span(),
|
||||
format!("...so that {}", desc));
|
||||
format!("...so that {}", desc).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1304,8 +1316,11 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
infer::ReborrowUpvar(span, ref upvar_id) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
format!("...so that closure can access `{}`",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str()))
|
||||
format!(
|
||||
"...so that closure can access `{}`",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id)
|
||||
.get()
|
||||
.to_str()).as_slice())
|
||||
}
|
||||
infer::InfStackClosure(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
|
@ -1328,7 +1343,9 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
|
|||
span,
|
||||
format!("...so that captured variable `{}` \
|
||||
does not outlive the enclosing closure",
|
||||
ty::local_var_name_str(self.tcx, id).get().to_str()));
|
||||
ty::local_var_name_str(
|
||||
self.tcx,
|
||||
id).get().to_str()).as_slice());
|
||||
}
|
||||
infer::IndexSlice(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
|
|
|
@ -249,7 +249,8 @@ impl<'f> Combine for Glb<'f> {
|
|||
}
|
||||
this.get_ref().infcx.tcx.sess.span_bug(
|
||||
this.get_ref().trace.origin.span(),
|
||||
format!("could not find original bound region for {:?}", r))
|
||||
format!("could not find original bound region for {:?}",
|
||||
r).as_slice())
|
||||
}
|
||||
|
||||
fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region {
|
||||
|
|
|
@ -529,7 +529,7 @@ pub fn var_ids<T:Combine>(this: &T,
|
|||
r => {
|
||||
this.infcx().tcx.sess.span_bug(
|
||||
this.trace().origin.span(),
|
||||
format!("found non-region-vid: {:?}", r));
|
||||
format!("found non-region-vid: {:?}", r).as_slice());
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
|
|
@ -185,8 +185,9 @@ impl<'f> Combine for Lub<'f> {
|
|||
|
||||
this.get_ref().infcx.tcx.sess.span_bug(
|
||||
this.get_ref().trace.origin.span(),
|
||||
format!("Region {:?} is not associated with \
|
||||
any bound region from A!", r0))
|
||||
format!("region {:?} is not associated with \
|
||||
any bound region from A!",
|
||||
r0).as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -695,7 +695,7 @@ impl<'a> InferCtxt<'a> {
|
|||
format!("resolve_type_vars_if_possible() yielded {} \
|
||||
when supplied with {}",
|
||||
self.ty_to_str(dummy0),
|
||||
self.ty_to_str(dummy1)));
|
||||
self.ty_to_str(dummy1)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -729,7 +729,7 @@ impl<'a> InferCtxt<'a> {
|
|||
err: Option<&ty::type_err>) {
|
||||
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
|
||||
|
||||
let error_str = err.map_or("".to_owned(), |t_err| {
|
||||
let error_str = err.map_or("".to_strbuf(), |t_err| {
|
||||
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
|
||||
});
|
||||
let resolved_expected = expected_ty.map(|e_ty| {
|
||||
|
@ -737,11 +737,19 @@ impl<'a> InferCtxt<'a> {
|
|||
});
|
||||
if !resolved_expected.map_or(false, |e| { ty::type_is_error(e) }) {
|
||||
match resolved_expected {
|
||||
None => self.tcx.sess.span_err(sp,
|
||||
format!("{}{}", mk_msg(None, actual_ty), error_str)),
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.span_err(sp,
|
||||
format!("{}{}",
|
||||
mk_msg(None, actual_ty),
|
||||
error_str).as_slice())
|
||||
}
|
||||
Some(e) => {
|
||||
self.tcx.sess.span_err(sp,
|
||||
format!("{}{}", mk_msg(Some(self.ty_to_str(e)), actual_ty), error_str));
|
||||
format!("{}{}",
|
||||
mk_msg(Some(self.ty_to_str(e)), actual_ty),
|
||||
error_str).as_slice());
|
||||
}
|
||||
}
|
||||
for err in err.iter() {
|
||||
|
|
|
@ -297,7 +297,7 @@ impl<'a> RegionVarBindings<'a> {
|
|||
origin.span(),
|
||||
format!("cannot relate bound region: {} <= {}",
|
||||
sub.repr(self.tcx),
|
||||
sup.repr(self.tcx)));
|
||||
sup.repr(self.tcx)).as_slice());
|
||||
}
|
||||
(_, ReStatic) => {
|
||||
// all regions are subregions of static, so we can ignore this
|
||||
|
@ -369,8 +369,8 @@ impl<'a> RegionVarBindings<'a> {
|
|||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
self.var_origins.borrow().get(rid.to_uint()).span(),
|
||||
format!("attempt to resolve region variable before \
|
||||
values have been computed!"))
|
||||
"attempt to resolve region variable before values have \
|
||||
been computed!")
|
||||
}
|
||||
Some(ref values) => *values.get(rid.to_uint())
|
||||
};
|
||||
|
@ -546,7 +546,7 @@ impl<'a> RegionVarBindings<'a> {
|
|||
self.tcx.sess.bug(
|
||||
format!("cannot relate bound region: LUB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)));
|
||||
b.repr(self.tcx)).as_slice());
|
||||
}
|
||||
|
||||
(ReStatic, _) | (_, ReStatic) => {
|
||||
|
@ -561,7 +561,9 @@ impl<'a> RegionVarBindings<'a> {
|
|||
self.tcx.sess.span_bug(
|
||||
self.var_origins.borrow().get(v_id.to_uint()).span(),
|
||||
format!("lub_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}", a, b));
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b).as_slice());
|
||||
}
|
||||
|
||||
(f @ ReFree(ref fr), ReScope(s_id)) |
|
||||
|
@ -647,7 +649,7 @@ impl<'a> RegionVarBindings<'a> {
|
|||
self.tcx.sess.bug(
|
||||
format!("cannot relate bound region: GLB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)));
|
||||
b.repr(self.tcx)).as_slice());
|
||||
}
|
||||
|
||||
(ReStatic, r) | (r, ReStatic) => {
|
||||
|
@ -665,7 +667,9 @@ impl<'a> RegionVarBindings<'a> {
|
|||
self.tcx.sess.span_bug(
|
||||
self.var_origins.borrow().get(v_id.to_uint()).span(),
|
||||
format!("glb_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}", a, b));
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b).as_slice());
|
||||
}
|
||||
|
||||
(ReFree(ref fr), s @ ReScope(s_id)) |
|
||||
|
@ -1175,7 +1179,7 @@ impl<'a> RegionVarBindings<'a> {
|
|||
upper_bounds.iter()
|
||||
.map(|x| x.region)
|
||||
.collect::<Vec<ty::Region>>()
|
||||
.repr(self.tcx)));
|
||||
.repr(self.tcx)).as_slice());
|
||||
}
|
||||
|
||||
fn collect_error_for_contracting_node(
|
||||
|
@ -1222,7 +1226,7 @@ impl<'a> RegionVarBindings<'a> {
|
|||
upper_bounds.iter()
|
||||
.map(|x| x.region)
|
||||
.collect::<Vec<ty::Region>>()
|
||||
.repr(self.tcx)));
|
||||
.repr(self.tcx)).as_slice());
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
|
|
|
@ -87,7 +87,7 @@ impl<'a> UnifyInferCtxtMethods for InferCtxt<'a> {
|
|||
Some(&ref var_val) => (*var_val).clone(),
|
||||
None => {
|
||||
tcx.sess.bug(format!(
|
||||
"failed lookup of vid `{}`", vid_u));
|
||||
"failed lookup of vid `{}`", vid_u).as_slice());
|
||||
}
|
||||
};
|
||||
match var_val {
|
||||
|
|
|
@ -311,7 +311,8 @@ pub fn require_same_types(tcx: &ty::ctxt,
|
|||
tcx.sess.span_err(span,
|
||||
format!("{}: {}",
|
||||
msg(),
|
||||
ty::type_err_to_str(tcx, terr)));
|
||||
ty::type_err_to_str(tcx,
|
||||
terr)).as_slice());
|
||||
ty::note_and_explain_type_err(tcx, terr);
|
||||
false
|
||||
}
|
||||
|
@ -359,8 +360,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
|
|||
}
|
||||
_ => {
|
||||
tcx.sess.span_bug(main_span,
|
||||
format!("main has a non-function type: found `{}`",
|
||||
ppaux::ty_to_str(tcx, main_t)));
|
||||
format!("main has a non-function type: found \
|
||||
`{}`",
|
||||
ppaux::ty_to_str(tcx,
|
||||
main_t)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,8 +414,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
|
|||
}
|
||||
_ => {
|
||||
tcx.sess.span_bug(start_span,
|
||||
format!("start has a non-function type: found `{}`",
|
||||
ppaux::ty_to_str(tcx, start_t)));
|
||||
format!("start has a non-function type: found \
|
||||
`{}`",
|
||||
ppaux::ty_to_str(tcx,
|
||||
start_t)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -538,8 +538,8 @@ impl<'a> ConstraintContext<'a> {
|
|||
Some(&index) => index,
|
||||
None => {
|
||||
self.tcx().sess.bug(format!(
|
||||
"No inferred index entry for {}",
|
||||
self.tcx().map.node_to_str(param_id)));
|
||||
"no inferred index entry for {}",
|
||||
self.tcx().map.node_to_str(param_id)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -787,7 +787,7 @@ impl<'a> ConstraintContext<'a> {
|
|||
self.tcx().sess.bug(
|
||||
format!("unexpected type encountered in \
|
||||
variance inference: {}",
|
||||
ty.repr(self.tcx())));
|
||||
ty.repr(self.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -858,9 +858,11 @@ impl<'a> ConstraintContext<'a> {
|
|||
ty::ReEmpty => {
|
||||
// We don't expect to see anything but 'static or bound
|
||||
// regions when visiting member types or method types.
|
||||
self.tcx().sess.bug(format!("unexpected region encountered in \
|
||||
variance inference: {}",
|
||||
region.repr(self.tcx())));
|
||||
self.tcx()
|
||||
.sess
|
||||
.bug(format!("unexpected region encountered in variance \
|
||||
inference: {}",
|
||||
region.repr(self.tcx())).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,11 +50,11 @@ pub fn note_and_explain_region(cx: &ctxt,
|
|||
(ref str, Some(span)) => {
|
||||
cx.sess.span_note(
|
||||
span,
|
||||
format!("{}{}{}", prefix, *str, suffix));
|
||||
format!("{}{}{}", prefix, *str, suffix).as_slice());
|
||||
}
|
||||
(ref str, None) => {
|
||||
cx.sess.note(
|
||||
format!("{}{}{}", prefix, *str, suffix));
|
||||
format!("{}{}{}", prefix, *str, suffix).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -255,13 +255,13 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> StrBuf {
|
|||
match fn_style {
|
||||
ast::NormalFn => {}
|
||||
_ => {
|
||||
s.push_str(fn_style.to_str());
|
||||
s.push_str(fn_style.to_str().as_slice());
|
||||
s.push_char(' ');
|
||||
}
|
||||
};
|
||||
|
||||
if abi != abi::Rust {
|
||||
s.push_str(format!("extern {} ", abi.to_str()));
|
||||
s.push_str(format!("extern {} ", abi.to_str()).as_slice());
|
||||
};
|
||||
|
||||
s.push_str("fn");
|
||||
|
@ -292,7 +292,7 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> StrBuf {
|
|||
match cty.fn_style {
|
||||
ast::NormalFn => {}
|
||||
_ => {
|
||||
s.push_str(cty.fn_style.to_str());
|
||||
s.push_str(cty.fn_style.to_str().as_slice());
|
||||
s.push_char(' ');
|
||||
}
|
||||
};
|
||||
|
|
|
@ -184,12 +184,12 @@ fn path(w: &mut fmt::Formatter, path: &clean::Path, print_all: bool,
|
|||
for lifetime in last.lifetimes.iter() {
|
||||
if counter > 0 { generics.push_str(", "); }
|
||||
counter += 1;
|
||||
generics.push_str(format!("{}", *lifetime));
|
||||
generics.push_str(format!("{}", *lifetime).as_slice());
|
||||
}
|
||||
for ty in last.types.iter() {
|
||||
if counter > 0 { generics.push_str(", "); }
|
||||
counter += 1;
|
||||
generics.push_str(format!("{}", *ty));
|
||||
generics.push_str(format!("{}", *ty).as_slice());
|
||||
}
|
||||
generics.push_str(">");
|
||||
}
|
||||
|
@ -323,18 +323,22 @@ impl fmt::Show for clean::Type {
|
|||
{arrow, select, yes{ -> {ret}} other{}}",
|
||||
style = FnStyleSpace(decl.fn_style),
|
||||
lifetimes = if decl.lifetimes.len() == 0 {
|
||||
"".to_owned()
|
||||
"".to_strbuf()
|
||||
} else {
|
||||
format!("<{:#}>", decl.lifetimes)
|
||||
},
|
||||
args = decl.decl.inputs,
|
||||
arrow = match decl.decl.output { clean::Unit => "no", _ => "yes" },
|
||||
arrow = match decl.decl.output {
|
||||
clean::Unit => "no",
|
||||
_ => "yes",
|
||||
},
|
||||
ret = decl.decl.output,
|
||||
bounds = {
|
||||
let mut ret = StrBuf::new();
|
||||
match *region {
|
||||
Some(ref lt) => {
|
||||
ret.push_str(format!(": {}", *lt));
|
||||
ret.push_str(format!(": {}",
|
||||
*lt).as_slice());
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
@ -347,7 +351,8 @@ impl fmt::Show for clean::Type {
|
|||
} else {
|
||||
ret.push_str(" + ");
|
||||
}
|
||||
ret.push_str(format!("{}", *t));
|
||||
ret.push_str(format!("{}",
|
||||
*t).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -416,7 +421,10 @@ impl fmt::Show for clean::Type {
|
|||
}, **t)
|
||||
}
|
||||
clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => {
|
||||
let lt = match *l { Some(ref l) => format!("{} ", *l), _ => "".to_owned() };
|
||||
let lt = match *l {
|
||||
Some(ref l) => format!("{} ", *l),
|
||||
_ => "".to_strbuf(),
|
||||
};
|
||||
write!(f, "&{}{}{}",
|
||||
lt,
|
||||
match mutability {
|
||||
|
@ -460,10 +468,10 @@ impl<'a> fmt::Show for Method<'a> {
|
|||
clean::SelfValue => args.push_str("self"),
|
||||
clean::SelfOwned => args.push_str("~self"),
|
||||
clean::SelfBorrowed(Some(ref lt), clean::Immutable) => {
|
||||
args.push_str(format!("&{} self", *lt));
|
||||
args.push_str(format!("&{} self", *lt).as_slice());
|
||||
}
|
||||
clean::SelfBorrowed(Some(ref lt), clean::Mutable) => {
|
||||
args.push_str(format!("&{} mut self", *lt));
|
||||
args.push_str(format!("&{} mut self", *lt).as_slice());
|
||||
}
|
||||
clean::SelfBorrowed(None, clean::Mutable) => {
|
||||
args.push_str("&mut self");
|
||||
|
@ -475,9 +483,9 @@ impl<'a> fmt::Show for Method<'a> {
|
|||
for (i, input) in d.inputs.values.iter().enumerate() {
|
||||
if i > 0 || args.len() > 0 { args.push_str(", "); }
|
||||
if input.name.len() > 0 {
|
||||
args.push_str(format!("{}: ", input.name));
|
||||
args.push_str(format!("{}: ", input.name).as_slice());
|
||||
}
|
||||
args.push_str(format!("{}", input.type_));
|
||||
args.push_str(format!("{}", input.type_).as_slice());
|
||||
}
|
||||
write!(f,
|
||||
"({args}){arrow, select, yes{ -> {ret}} other{}}",
|
||||
|
|
|
@ -407,8 +407,11 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
|
|||
if path.exists() {
|
||||
for line in BufferedReader::new(File::open(path)).lines() {
|
||||
let line = try!(line);
|
||||
if !line.starts_with(key) { continue }
|
||||
if line.starts_with(format!("{}['{}']", key, krate)) {
|
||||
if !line.as_slice().starts_with(key) {
|
||||
continue
|
||||
}
|
||||
if line.as_slice().starts_with(
|
||||
format!("{}['{}']", key, krate).as_slice()) {
|
||||
continue
|
||||
}
|
||||
ret.push(line.to_strbuf());
|
||||
|
@ -646,7 +649,7 @@ impl<'a> SourceCollector<'a> {
|
|||
|
||||
let title = format!("{} -- source", cur.filename_display());
|
||||
let page = layout::Page {
|
||||
title: title,
|
||||
title: title.as_slice(),
|
||||
ty: "source",
|
||||
root_path: root_path.as_slice(),
|
||||
};
|
||||
|
@ -1344,7 +1347,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
parents.push_str(": ");
|
||||
for (i, p) in t.parents.iter().enumerate() {
|
||||
if i > 0 { parents.push_str(" + "); }
|
||||
parents.push_str(format!("{}", *p));
|
||||
parents.push_str(format!("{}", *p).as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ pub fn opts() -> Vec<getopts::OptGroup> {
|
|||
|
||||
pub fn usage(argv0: &str) {
|
||||
println!("{}",
|
||||
getopts::usage(format!("{} [options] <input>", argv0),
|
||||
getopts::usage(format!("{} [options] <input>", argv0).as_slice(),
|
||||
opts().as_slice()));
|
||||
}
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int
|
|||
let mut css = StrBuf::new();
|
||||
for name in matches.opt_strs("markdown-css").iter() {
|
||||
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
|
||||
css.push_str(s)
|
||||
css.push_str(s.as_slice())
|
||||
}
|
||||
|
||||
let input_str = load_or_return!(input, 1, 2);
|
||||
|
|
|
@ -213,7 +213,8 @@ fn maketest(s: &str, cratename: &str, loose_feature_gating: bool) -> StrBuf {
|
|||
|
||||
if !s.contains("extern crate") {
|
||||
if s.contains(cratename) {
|
||||
prog.push_str(format!("extern crate {};\n", cratename));
|
||||
prog.push_str(format!("extern crate {};\n",
|
||||
cratename).as_slice());
|
||||
}
|
||||
}
|
||||
if s.contains("fn main") {
|
||||
|
|
|
@ -387,7 +387,7 @@ impl fmt::Show for UvError {
|
|||
#[test]
|
||||
fn error_smoke_test() {
|
||||
let err: UvError = UvError(uvll::EOF);
|
||||
assert_eq!(err.to_str(), "EOF: end of file".to_owned());
|
||||
assert_eq!(err.to_str(), "EOF: end of file".to_strbuf());
|
||||
}
|
||||
|
||||
pub fn uv_error_to_io_error(uverr: UvError) -> IoError {
|
||||
|
|
|
@ -663,7 +663,7 @@ pub mod writer {
|
|||
_ => Err(io::IoError {
|
||||
kind: io::OtherIoError,
|
||||
desc: "int too big",
|
||||
detail: Some(format!("{}", n))
|
||||
detail: Some(format_strbuf!("{}", n))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -676,7 +676,7 @@ pub mod writer {
|
|||
Err(io::IoError {
|
||||
kind: io::OtherIoError,
|
||||
desc: "int too big",
|
||||
detail: Some(format!("{}", n))
|
||||
detail: Some(format_strbuf!("{}", n))
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -16,10 +16,9 @@ use iter::Iterator;
|
|||
use mem;
|
||||
use option::{Option, Some, None};
|
||||
use slice::{ImmutableVector, MutableVector, Vector};
|
||||
use str::OwnedStr;
|
||||
use str::Str;
|
||||
use str::{StrAllocating, StrSlice};
|
||||
use str::{OwnedStr, Str, StrAllocating, StrSlice};
|
||||
use str;
|
||||
use strbuf::StrBuf;
|
||||
use to_str::{IntoStr};
|
||||
use vec::Vec;
|
||||
|
||||
|
@ -249,7 +248,7 @@ impl OwnedAsciiCast for ~[u8] {
|
|||
}
|
||||
}
|
||||
|
||||
impl OwnedAsciiCast for ~str {
|
||||
impl OwnedAsciiCast for StrBuf {
|
||||
#[inline]
|
||||
fn is_ascii(&self) -> bool {
|
||||
self.as_slice().is_ascii()
|
||||
|
@ -257,7 +256,7 @@ impl OwnedAsciiCast for ~str {
|
|||
|
||||
#[inline]
|
||||
unsafe fn into_ascii_nocheck(self) -> Vec<Ascii> {
|
||||
let v: ~[u8] = mem::transmute(self);
|
||||
let v: Vec<u8> = mem::transmute(self);
|
||||
v.into_ascii_nocheck()
|
||||
}
|
||||
}
|
||||
|
@ -314,17 +313,18 @@ impl<'a> AsciiStr for &'a [Ascii] {
|
|||
|
||||
impl IntoStr for ~[Ascii] {
|
||||
#[inline]
|
||||
fn into_str(self) -> ~str {
|
||||
unsafe { mem::transmute(self) }
|
||||
fn into_str(self) -> StrBuf {
|
||||
let vector: Vec<Ascii> = self.as_slice().iter().map(|x| *x).collect();
|
||||
vector.into_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoStr for Vec<Ascii> {
|
||||
#[inline]
|
||||
fn into_str(self) -> ~str {
|
||||
fn into_str(self) -> StrBuf {
|
||||
unsafe {
|
||||
let s: &str = mem::transmute(self.as_slice());
|
||||
s.to_owned()
|
||||
s.to_strbuf()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -346,12 +346,12 @@ pub trait OwnedStrAsciiExt {
|
|||
/// Convert the string to ASCII upper case:
|
||||
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
|
||||
/// but non-ASCII letters are unchanged.
|
||||
fn into_ascii_upper(self) -> ~str;
|
||||
fn into_ascii_upper(self) -> StrBuf;
|
||||
|
||||
/// Convert the string to ASCII lower case:
|
||||
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
|
||||
/// but non-ASCII letters are unchanged.
|
||||
fn into_ascii_lower(self) -> ~str;
|
||||
fn into_ascii_lower(self) -> StrBuf;
|
||||
}
|
||||
|
||||
/// Extension methods for ASCII-subset only operations on string slices
|
||||
|
@ -359,12 +359,12 @@ pub trait StrAsciiExt {
|
|||
/// Makes a copy of the string in ASCII upper case:
|
||||
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
|
||||
/// but non-ASCII letters are unchanged.
|
||||
fn to_ascii_upper(&self) -> ~str;
|
||||
fn to_ascii_upper(&self) -> StrBuf;
|
||||
|
||||
/// Makes a copy of the string in ASCII lower case:
|
||||
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
|
||||
/// but non-ASCII letters are unchanged.
|
||||
fn to_ascii_lower(&self) -> ~str;
|
||||
fn to_ascii_lower(&self) -> StrBuf;
|
||||
|
||||
/// Check that two strings are an ASCII case-insensitive match.
|
||||
/// Same as `to_ascii_lower(a) == to_ascii_lower(b)`,
|
||||
|
@ -374,12 +374,12 @@ pub trait StrAsciiExt {
|
|||
|
||||
impl<'a> StrAsciiExt for &'a str {
|
||||
#[inline]
|
||||
fn to_ascii_upper(&self) -> ~str {
|
||||
fn to_ascii_upper(&self) -> StrBuf {
|
||||
unsafe { str_copy_map_bytes(*self, ASCII_UPPER_MAP) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn to_ascii_lower(&self) -> ~str {
|
||||
fn to_ascii_lower(&self) -> StrBuf {
|
||||
unsafe { str_copy_map_bytes(*self, ASCII_LOWER_MAP) }
|
||||
}
|
||||
|
||||
|
@ -394,36 +394,36 @@ impl<'a> StrAsciiExt for &'a str {
|
|||
}
|
||||
}
|
||||
|
||||
impl OwnedStrAsciiExt for ~str {
|
||||
impl OwnedStrAsciiExt for StrBuf {
|
||||
#[inline]
|
||||
fn into_ascii_upper(self) -> ~str {
|
||||
fn into_ascii_upper(self) -> StrBuf {
|
||||
unsafe { str_map_bytes(self, ASCII_UPPER_MAP) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn into_ascii_lower(self) -> ~str {
|
||||
fn into_ascii_lower(self) -> StrBuf {
|
||||
unsafe { str_map_bytes(self, ASCII_LOWER_MAP) }
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn str_map_bytes(string: ~str, map: &'static [u8]) -> ~str {
|
||||
unsafe fn str_map_bytes(string: StrBuf, map: &'static [u8]) -> StrBuf {
|
||||
let mut bytes = string.into_bytes();
|
||||
|
||||
for b in bytes.mut_iter() {
|
||||
*b = map[*b as uint];
|
||||
}
|
||||
|
||||
str::raw::from_utf8_owned(bytes)
|
||||
str::from_utf8(bytes.as_slice()).unwrap().to_strbuf()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn str_copy_map_bytes(string: &str, map: &'static [u8]) -> ~str {
|
||||
unsafe fn str_copy_map_bytes(string: &str, map: &'static [u8]) -> StrBuf {
|
||||
let mut s = string.to_owned();
|
||||
for b in str::raw::as_owned_vec(&mut s).mut_iter() {
|
||||
*b = map[*b as uint];
|
||||
}
|
||||
s
|
||||
s.into_strbuf()
|
||||
}
|
||||
|
||||
static ASCII_LOWER_MAP: &'static [u8] = &[
|
||||
|
@ -552,15 +552,17 @@ mod tests {
|
|||
assert_eq!("( ;".to_ascii(), v2ascii!([40, 32, 59]));
|
||||
// FIXME: #5475 borrowchk error, owned vectors do not live long enough
|
||||
// if chained-from directly
|
||||
let v = box [40u8, 32u8, 59u8]; assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59]));
|
||||
let v = "( ;".to_owned(); assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59]));
|
||||
let v = box [40u8, 32u8, 59u8];
|
||||
assert_eq!(v.to_ascii(), v2ascii!([40, 32, 59]));
|
||||
let v = "( ;".to_strbuf();
|
||||
assert_eq!(v.as_slice().to_ascii(), v2ascii!([40, 32, 59]));
|
||||
|
||||
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_owned());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_owned());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_strbuf());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_strbuf());
|
||||
|
||||
assert_eq!("".to_ascii().to_lower().into_str(), "".to_owned());
|
||||
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_owned());
|
||||
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_owned());
|
||||
assert_eq!("".to_ascii().to_lower().into_str(), "".to_strbuf());
|
||||
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_strbuf());
|
||||
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_strbuf());
|
||||
|
||||
assert!("aBcDeF&?#".to_ascii().eq_ignore_case("AbCdEf&?#".to_ascii()));
|
||||
|
||||
|
@ -572,16 +574,16 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_ascii_vec_ng() {
|
||||
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_owned());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_owned());
|
||||
assert_eq!("".to_ascii().to_lower().into_str(), "".to_owned());
|
||||
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_owned());
|
||||
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_owned());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_lower().into_str(), "abcdef&?#".to_strbuf());
|
||||
assert_eq!("abCDef&?#".to_ascii().to_upper().into_str(), "ABCDEF&?#".to_strbuf());
|
||||
assert_eq!("".to_ascii().to_lower().into_str(), "".to_strbuf());
|
||||
assert_eq!("YMCA".to_ascii().to_lower().into_str(), "ymca".to_strbuf());
|
||||
assert_eq!("abcDEFxyz:.;".to_ascii().to_upper().into_str(), "ABCDEFXYZ:.;".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_owned_ascii_vec() {
|
||||
assert_eq!(("( ;".to_owned()).into_ascii(), vec2ascii![40, 32, 59]);
|
||||
assert_eq!(("( ;".to_strbuf()).into_ascii(), vec2ascii![40, 32, 59]);
|
||||
assert_eq!((box [40u8, 32u8, 59u8]).into_ascii(), vec2ascii![40, 32, 59]);
|
||||
}
|
||||
|
||||
|
@ -593,8 +595,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_ascii_into_str() {
|
||||
assert_eq!(vec2ascii![40, 32, 59].into_str(), "( ;".to_owned());
|
||||
assert_eq!(vec2ascii!(40, 32, 59).into_str(), "( ;".to_owned());
|
||||
assert_eq!(vec2ascii![40, 32, 59].into_str(), "( ;".to_strbuf());
|
||||
assert_eq!(vec2ascii!(40, 32, 59).into_str(), "( ;".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -641,70 +643,70 @@ mod tests {
|
|||
assert_eq!((vec![40u8, 32u8, 59u8]).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
|
||||
assert_eq!((vec![127u8, 128u8, 255u8]).into_ascii_opt(), None);
|
||||
|
||||
assert_eq!(("( ;".to_owned()).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
|
||||
assert_eq!(("zoä华".to_owned()).into_ascii_opt(), None);
|
||||
assert_eq!(("( ;".to_strbuf()).into_ascii_opt(), Some(vec2ascii![40, 32, 59]));
|
||||
assert_eq!(("zoä华".to_strbuf()).into_ascii_opt(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_ascii_upper() {
|
||||
assert_eq!("url()URL()uRl()ürl".to_ascii_upper(), "URL()URL()URL()üRL".to_owned());
|
||||
assert_eq!("hıKß".to_ascii_upper(), "HıKß".to_owned());
|
||||
assert_eq!("url()URL()uRl()ürl".to_ascii_upper(), "URL()URL()URL()üRL".to_strbuf());
|
||||
assert_eq!("hıKß".to_ascii_upper(), "HıKß".to_strbuf());
|
||||
|
||||
let mut i = 0;
|
||||
while i <= 500 {
|
||||
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
|
||||
else { i };
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_upper(),
|
||||
from_char(from_u32(upper).unwrap()))
|
||||
from_char(from_u32(upper).unwrap()).to_strbuf())
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_ascii_lower() {
|
||||
assert_eq!("url()URL()uRl()Ürl".to_ascii_lower(), "url()url()url()Ürl".to_owned());
|
||||
assert_eq!("url()URL()uRl()Ürl".to_ascii_lower(), "url()url()url()Ürl".to_strbuf());
|
||||
// Dotted capital I, Kelvin sign, Sharp S.
|
||||
assert_eq!("HİKß".to_ascii_lower(), "hİKß".to_owned());
|
||||
assert_eq!("HİKß".to_ascii_lower(), "hİKß".to_strbuf());
|
||||
|
||||
let mut i = 0;
|
||||
while i <= 500 {
|
||||
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
|
||||
else { i };
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).to_ascii_lower(),
|
||||
from_char(from_u32(lower).unwrap()))
|
||||
from_char(from_u32(lower).unwrap()).to_strbuf())
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_into_ascii_upper() {
|
||||
assert_eq!(("url()URL()uRl()ürl".to_owned()).into_ascii_upper(),
|
||||
"URL()URL()URL()üRL".to_owned());
|
||||
assert_eq!(("hıKß".to_owned()).into_ascii_upper(), "HıKß".to_owned());
|
||||
assert_eq!(("url()URL()uRl()ürl".to_strbuf()).into_ascii_upper(),
|
||||
"URL()URL()URL()üRL".to_strbuf());
|
||||
assert_eq!(("hıKß".to_strbuf()).into_ascii_upper(), "HıKß".to_strbuf());
|
||||
|
||||
let mut i = 0;
|
||||
while i <= 500 {
|
||||
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
|
||||
else { i };
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).into_ascii_upper(),
|
||||
from_char(from_u32(upper).unwrap()))
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).to_strbuf().into_ascii_upper(),
|
||||
from_char(from_u32(upper).unwrap()).to_strbuf())
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_into_ascii_lower() {
|
||||
assert_eq!(("url()URL()uRl()Ürl".to_owned()).into_ascii_lower(),
|
||||
"url()url()url()Ürl".to_owned());
|
||||
assert_eq!(("url()URL()uRl()Ürl".to_strbuf()).into_ascii_lower(),
|
||||
"url()url()url()Ürl".to_strbuf());
|
||||
// Dotted capital I, Kelvin sign, Sharp S.
|
||||
assert_eq!(("HİKß".to_owned()).into_ascii_lower(), "hİKß".to_owned());
|
||||
assert_eq!(("HİKß".to_strbuf()).into_ascii_lower(), "hİKß".to_strbuf());
|
||||
|
||||
let mut i = 0;
|
||||
while i <= 500 {
|
||||
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
|
||||
else { i };
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).into_ascii_lower(),
|
||||
from_char(from_u32(lower).unwrap()))
|
||||
assert_eq!(from_char(from_u32(i).unwrap()).to_strbuf().into_ascii_lower(),
|
||||
from_char(from_u32(lower).unwrap()).to_strbuf())
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
@ -724,8 +726,11 @@ mod tests {
|
|||
let c = i;
|
||||
let lower = if 'A' as u32 <= c && c <= 'Z' as u32 { c + 'a' as u32 - 'A' as u32 }
|
||||
else { c };
|
||||
assert!(from_char(from_u32(i).unwrap()).
|
||||
eq_ignore_ascii_case(from_char(from_u32(lower).unwrap())));
|
||||
assert!(from_char(from_u32(i).unwrap()).as_slice()
|
||||
.eq_ignore_ascii_case(
|
||||
from_char(
|
||||
from_u32(lower)
|
||||
.unwrap())));
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
@ -733,12 +738,12 @@ mod tests {
|
|||
#[test]
|
||||
fn test_to_str() {
|
||||
let s = Ascii{ chr: 't' as u8 }.to_str();
|
||||
assert_eq!(s, "t".to_owned());
|
||||
assert_eq!(s, "t".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_show() {
|
||||
let c = Ascii { chr: 't' as u8 };
|
||||
assert_eq!(format!("{}", c), "t".to_owned());
|
||||
assert_eq!(format_strbuf!("{}", c), "t".to_strbuf());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -990,7 +990,7 @@ mod test {
|
|||
|
||||
pub fn stress_factor() -> uint {
|
||||
match os::getenv("RUST_TEST_STRESS") {
|
||||
Some(val) => from_str::<uint>(val).unwrap(),
|
||||
Some(val) => from_str::<uint>(val.as_slice()).unwrap(),
|
||||
None => 1,
|
||||
}
|
||||
}
|
||||
|
@ -1523,7 +1523,7 @@ mod sync_tests {
|
|||
|
||||
pub fn stress_factor() -> uint {
|
||||
match os::getenv("RUST_TEST_STRESS") {
|
||||
Some(val) => from_str::<uint>(val).unwrap(),
|
||||
Some(val) => from_str::<uint>(val.as_slice()).unwrap(),
|
||||
None => 1,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,9 +27,9 @@ general case.
|
|||
|
||||
The `format!` macro is intended to be familiar to those coming from C's
|
||||
printf/fprintf functions or Python's `str.format` function. In its current
|
||||
revision, the `format!` macro returns a `~str` type which is the result of the
|
||||
formatting. In the future it will also be able to pass in a stream to format
|
||||
arguments directly while performing minimal allocations.
|
||||
revision, the `format!` macro returns a `StrBuf` type which is the result of
|
||||
the formatting. In the future it will also be able to pass in a stream to
|
||||
format arguments directly while performing minimal allocations.
|
||||
|
||||
Some examples of the `format!` extension are:
|
||||
|
||||
|
@ -282,7 +282,7 @@ use std::io;
|
|||
|
||||
# #[allow(unused_must_use)]
|
||||
# fn main() {
|
||||
format_args!(fmt::format, "this returns {}", "~str");
|
||||
format_args!(fmt::format, "this returns {}", "StrBuf");
|
||||
|
||||
let some_writer: &mut io::Writer = &mut io::stdout();
|
||||
format_args!(|args| { write!(some_writer, "{}", args) }, "print with a {}", "closure");
|
||||
|
@ -488,7 +488,7 @@ use io;
|
|||
use option::None;
|
||||
use repr;
|
||||
use result::{Ok, Err};
|
||||
use str::{StrAllocating};
|
||||
use str::{Str, StrAllocating};
|
||||
use str;
|
||||
use strbuf::StrBuf;
|
||||
use slice::Vector;
|
||||
|
@ -545,10 +545,10 @@ pub trait Poly {
|
|||
/// let s = format_args!(fmt::format, "Hello, {}!", "world");
|
||||
/// assert_eq!(s, "Hello, world!".to_owned());
|
||||
/// ```
|
||||
pub fn format(args: &Arguments) -> ~str {
|
||||
pub fn format(args: &Arguments) -> StrBuf{
|
||||
let mut output = io::MemWriter::new();
|
||||
let _ = write!(&mut output, "{}", args);
|
||||
str::from_utf8(output.unwrap().as_slice()).unwrap().to_owned()
|
||||
str::from_utf8(output.unwrap().as_slice()).unwrap().into_strbuf()
|
||||
}
|
||||
|
||||
/// Temporary transition utility
|
||||
|
@ -572,7 +572,7 @@ impl<T> Poly for T {
|
|||
// this allocation of a new string
|
||||
_ => {
|
||||
let s = repr::repr_to_str(self);
|
||||
f.pad(s)
|
||||
f.pad(s.as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue