auto merge of #15493 : brson/rust/tostr, r=pcwalton

This updates https://github.com/rust-lang/rust/pull/15075.

Rename `ToStr::to_str` to `ToString::to_string`. The naive renaming ends up with two `to_string` functions defined on strings in the prelude (the other defined via `collections::str::StrAllocating`). To remedy this I removed `StrAllocating::to_string`, making all conversions from `&str` to `String` go through `Show`. This has a measurable impact on the speed of this conversion, but the sense I get from others is that it's best to go ahead and unify `to_string` and address performance for all `to_string` conversions in `core::fmt`. `String::from_str(...)` still works as a manual fast-path.

Note that the patch was done with a script, and ended up renaming a number of other `*_to_str` functions, particularly inside of rustc. All the ones I saw looked correct, and I didn't notice any additional API breakage.

Closes #15046.
This commit is contained in:
bors 2014-07-08 20:06:40 +00:00
commit 8bb34a3146
208 changed files with 1557 additions and 1390 deletions

View file

@ -187,7 +187,7 @@ pub fn log_config(config: &Config) {
opt_str(&config.filter opt_str(&config.filter
.as_ref() .as_ref()
.map(|re| { .map(|re| {
re.to_str().into_string() re.to_string().into_string()
})))); }))));
logv(c, format!("runtool: {}", opt_str(&config.runtool))); logv(c, format!("runtool: {}", opt_str(&config.runtool)));
logv(c, format!("host-rustcflags: {}", logv(c, format!("host-rustcflags: {}",

View file

@ -31,7 +31,7 @@ pub fn load_errors(re: &Regex, testfile: &Path) -> Vec<ExpectedError> {
fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> { fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> {
re.captures(line).and_then(|caps| { re.captures(line).and_then(|caps| {
let adjusts = caps.name("adjusts").len(); let adjusts = caps.name("adjusts").len();
let kind = caps.name("kind").to_ascii().to_lower().into_str(); let kind = caps.name("kind").to_ascii().to_lower().into_string();
let msg = caps.name("msg").trim().to_string(); let msg = caps.name("msg").trim().to_string();
debug!("line={} kind={} msg={}", line_num, kind, msg); debug!("line={} kind={} msg={}", line_num, kind, msg);

View file

@ -41,7 +41,7 @@ pub fn make_new_path(path: &str) -> String {
Some(curr) => { Some(curr) => {
format!("{}{}{}", path, path_div(), curr) format!("{}{}{}", path, path_div(), curr)
} }
None => path.to_str() None => path.to_string()
} }
} }

View file

@ -465,7 +465,7 @@ fn stringifier(channel: &DuplexStream<String, uint>) {
let mut value: uint; let mut value: uint;
loop { loop {
value = channel.recv(); value = channel.recv();
channel.send(value.to_str()); channel.send(value.to_string());
if value == 0 { break; } if value == 0 { break; }
} }
} }
@ -478,7 +478,7 @@ send strings (the first type parameter) and receive `uint` messages
(the second type parameter). The body itself simply loops, reading (the second type parameter). The body itself simply loops, reading
from the channel and then sending its response back. The actual from the channel and then sending its response back. The actual
response itself is simply the stringified version of the received value, response itself is simply the stringified version of the received value,
`uint::to_str(value)`. `uint::to_string(value)`.
Here is the code for the parent task: Here is the code for the parent task:
@ -492,7 +492,7 @@ use std::comm::duplex;
# let mut value: uint; # let mut value: uint;
# loop { # loop {
# value = channel.recv(); # value = channel.recv();
# channel.send(value.to_str()); # channel.send(value.to_string());
# if value == 0u { break; } # if value == 0u { break; }
# } # }
# } # }

View file

@ -23,7 +23,7 @@ msgstr ""
#| "[tarball]: http://static.rust-lang.org/dist/rust-nightly.tar.gz [win-exe]: " #| "[tarball]: http://static.rust-lang.org/dist/rust-nightly.tar.gz [win-exe]: "
#| "http://static.rust-lang.org/dist/rust-nightly-install.exe" #| "http://static.rust-lang.org/dist/rust-nightly-install.exe"
msgid "" msgid ""
"Use [`ToStr`](http://static.rust-lang.org/doc/master/std/to_str/trait.ToStr." "Use [`ToString`](http://static.rust-lang.org/doc/master/std/to_str/trait.ToString."
"html)." "html)."
msgstr "" msgstr ""
"[tarball]: http://static.rust-lang.org/dist/rust-nightly.tar.gz\n" "[tarball]: http://static.rust-lang.org/dist/rust-nightly.tar.gz\n"
@ -34,7 +34,7 @@ msgstr ""
#, fuzzy #, fuzzy
#| msgid "" #| msgid ""
#| "~~~~ let x: f64 = 4.0; let y: uint = x as uint; assert!(y == 4u); ~~~~" #| "~~~~ let x: f64 = 4.0; let y: uint = x as uint; assert!(y == 4u); ~~~~"
msgid "~~~ let x: int = 42; let y: String = x.to_str(); ~~~" msgid "~~~ let x: int = 42; let y: String = x.to_string(); ~~~"
msgstr "" msgstr ""
"~~~~\n" "~~~~\n"
"let x: f64 = 4.0;\n" "let x: f64 = 4.0;\n"

View file

@ -1656,7 +1656,7 @@ msgstr ""
#| msgid "~~~~ {.ignore} // main.rs extern crate world; fn main() { println(~\"hello \" + world::explore()); } ~~~~" #| msgid "~~~~ {.ignore} // main.rs extern crate world; fn main() { println(~\"hello \" + world::explore()); } ~~~~"
msgid "" msgid ""
"impl Printable for int {\n" "impl Printable for int {\n"
" fn to_string(&self) -> String { self.to_str() }\n" " fn to_string(&self) -> String { self.to_string() }\n"
"}\n" "}\n"
msgstr "" msgstr ""
"~~~~ {.ignore}\n" "~~~~ {.ignore}\n"

View file

@ -4410,9 +4410,9 @@ msgstr ""
#. type: Plain text #. type: Plain text
#: src/doc/tutorial.md:2528 #: src/doc/tutorial.md:2528
msgid "#[deriving(Rand, ToStr)] enum ABC { A, B, C } ~~~" msgid "#[deriving(Rand, ToString)] enum ABC { A, B, C } ~~~"
msgstr "" msgstr ""
"#[deriving(Rand, ToStr)]\n" "#[deriving(Rand, ToString)]\n"
"enum ABC { A, B, C }\n" "enum ABC { A, B, C }\n"
"~~~" "~~~"
@ -4422,15 +4422,15 @@ msgstr ""
#| msgid "" #| msgid ""
#| "The full list of derivable traits is `Eq`, `TotalEq`, `Ord`, `TotalOrd`, " #| "The full list of derivable traits is `Eq`, `TotalEq`, `Ord`, `TotalOrd`, "
#| "`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, " #| "`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, "
#| "`Zero`, and `ToStr`." #| "`Zero`, and `ToString`."
msgid "" msgid ""
"The full list of derivable traits is `Eq`, `TotalEq`, `Ord`, `TotalOrd`, " "The full list of derivable traits is `Eq`, `TotalEq`, `Ord`, `TotalOrd`, "
"`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, " "`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, "
"`Default`, `Zero`, and `ToStr`." "`Default`, `Zero`, and `ToString`."
msgstr "" msgstr ""
"実装を自動的に導出可能なトレイトは、 `Eq`, `TotalEq`, `Ord`, `TotalOrd`, " "実装を自動的に導出可能なトレイトは、 `Eq`, `TotalEq`, `Ord`, `TotalOrd`, "
"`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, `Zero`, " "`Encodable` `Decodable`, `Clone`, `DeepClone`, `Hash`, `Rand`, `Zero`, "
"および `ToStr` です。." "および `ToString` です。."
#. type: Plain text #. type: Plain text
#: src/doc/tutorial.md:2534 #: src/doc/tutorial.md:2534

View file

@ -3671,15 +3671,15 @@ An example of an object type:
~~~~ ~~~~
trait Printable { trait Printable {
fn to_string(&self) -> String; fn stringify(&self) -> String;
} }
impl Printable for int { impl Printable for int {
fn to_string(&self) -> String { self.to_str() } fn stringify(&self) -> String { self.to_string() }
} }
fn print(a: Box<Printable>) { fn print(a: Box<Printable>) {
println!("{}", a.to_string()); println!("{}", a.stringify());
} }
fn main() { fn main() {

View file

@ -100,7 +100,7 @@ syn keyword rustTrait RawPtr
syn keyword rustTrait Buffer Writer Reader Seek syn keyword rustTrait Buffer Writer Reader Seek
syn keyword rustTrait Str StrVector StrSlice OwnedStr syn keyword rustTrait Str StrVector StrSlice OwnedStr
syn keyword rustTrait IntoMaybeOwned StrAllocating syn keyword rustTrait IntoMaybeOwned StrAllocating
syn keyword rustTrait ToStr IntoStr syn keyword rustTrait ToString IntoStr
syn keyword rustTrait Tuple1 Tuple2 Tuple3 Tuple4 syn keyword rustTrait Tuple1 Tuple2 Tuple3 Tuple4
syn keyword rustTrait Tuple5 Tuple6 Tuple7 Tuple8 syn keyword rustTrait Tuple5 Tuple6 Tuple7 Tuple8
syn keyword rustTrait Tuple9 Tuple10 Tuple11 Tuple12 syn keyword rustTrait Tuple9 Tuple10 Tuple11 Tuple12

View file

@ -60,17 +60,17 @@ enum BitvVariant { Big(BigBitv), Small(SmallBitv) }
/// bv.set(3, true); /// bv.set(3, true);
/// bv.set(5, true); /// bv.set(5, true);
/// bv.set(7, true); /// bv.set(7, true);
/// println!("{}", bv.to_str()); /// println!("{}", bv.to_string());
/// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count());
/// ///
/// // flip all values in bitvector, producing non-primes less than 10 /// // flip all values in bitvector, producing non-primes less than 10
/// bv.negate(); /// bv.negate();
/// println!("{}", bv.to_str()); /// println!("{}", bv.to_string());
/// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count());
/// ///
/// // reset bitvector to empty /// // reset bitvector to empty
/// bv.clear(); /// bv.clear();
/// println!("{}", bv.to_str()); /// println!("{}", bv.to_string());
/// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count()); /// println!("total bits set to true: {}", bv.iter().filter(|x| *x).count());
/// ``` /// ```
pub struct Bitv { pub struct Bitv {
@ -996,10 +996,10 @@ mod tests {
#[test] #[test]
fn test_to_str() { fn test_to_str() {
let zerolen = Bitv::new(); let zerolen = Bitv::new();
assert_eq!(zerolen.to_str().as_slice(), ""); assert_eq!(zerolen.to_string().as_slice(), "");
let eightbits = Bitv::with_capacity(8u, false); let eightbits = Bitv::with_capacity(8u, false);
assert_eq!(eightbits.to_str().as_slice(), "00000000") assert_eq!(eightbits.to_string().as_slice(), "00000000")
} }
#[test] #[test]
@ -1022,7 +1022,7 @@ mod tests {
let mut b = bitv::Bitv::with_capacity(2, false); let mut b = bitv::Bitv::with_capacity(2, false);
b.set(0, true); b.set(0, true);
b.set(1, false); b.set(1, false);
assert_eq!(b.to_str().as_slice(), "10"); assert_eq!(b.to_string().as_slice(), "10");
} }
#[test] #[test]
@ -1333,7 +1333,7 @@ mod tests {
fn test_from_bytes() { fn test_from_bytes() {
let bitv = from_bytes([0b10110110, 0b00000000, 0b11111111]); let bitv = from_bytes([0b10110110, 0b00000000, 0b11111111]);
let str = format!("{}{}{}", "10110110", "00000000", "11111111"); let str = format!("{}{}{}", "10110110", "00000000", "11111111");
assert_eq!(bitv.to_str().as_slice(), str.as_slice()); assert_eq!(bitv.to_string().as_slice(), str.as_slice());
} }
#[test] #[test]
@ -1352,7 +1352,7 @@ mod tests {
fn test_from_bools() { fn test_from_bools() {
let bools = vec![true, false, true, true]; let bools = vec![true, false, true, true];
let bitv: Bitv = bools.iter().map(|n| *n).collect(); let bitv: Bitv = bools.iter().map(|n| *n).collect();
assert_eq!(bitv.to_str().as_slice(), "1011"); assert_eq!(bitv.to_string().as_slice(), "1011");
} }
#[test] #[test]
@ -1787,7 +1787,7 @@ mod tests {
s.insert(10); s.insert(10);
s.insert(50); s.insert(50);
s.insert(2); s.insert(2);
assert_eq!("{1, 2, 10, 50}".to_string(), s.to_str()); assert_eq!("{1, 2, 10, 50}".to_string(), s.to_string());
} }
fn rng() -> rand::IsaacRng { fn rng() -> rand::IsaacRng {

View file

@ -787,7 +787,6 @@ mod test_btree {
fn insert_test_one() { fn insert_test_one() {
let b = BTree::new(1i, "abc".to_string(), 2); let b = BTree::new(1i, "abc".to_string(), 2);
let is_insert = b.insert(2i, "xyz".to_string()); let is_insert = b.insert(2i, "xyz".to_string());
//println!("{}", is_insert.clone().to_str());
assert!(is_insert.root.is_leaf()); assert!(is_insert.root.is_leaf());
} }
@ -798,7 +797,7 @@ mod test_btree {
let leaf_elt_3 = LeafElt::new(3i, "ccc".to_string()); let leaf_elt_3 = LeafElt::new(3i, "ccc".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3)); let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3));
let b = BTree::new_with_node_len(n, 3, 2); let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(4, "ddd".to_string()).to_str()); //println!("{}", b.clone().insert(4, "ddd".to_string()).to_string());
assert!(b.insert(4, "ddd".to_string()).root.is_leaf()); assert!(b.insert(4, "ddd".to_string()).root.is_leaf());
} }
@ -810,7 +809,7 @@ mod test_btree {
let leaf_elt_4 = LeafElt::new(4i, "ddd".to_string()); let leaf_elt_4 = LeafElt::new(4i, "ddd".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4)); let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2); let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(5, "eee".to_string()).to_str()); //println!("{}", b.clone().insert(5, "eee".to_string()).to_string());
assert!(!b.insert(5, "eee".to_string()).root.is_leaf()); assert!(!b.insert(5, "eee".to_string()).root.is_leaf());
} }
@ -827,7 +826,7 @@ mod test_btree {
b = b.clone().insert(7, "ggg".to_string()); b = b.clone().insert(7, "ggg".to_string());
b = b.clone().insert(8, "hhh".to_string()); b = b.clone().insert(8, "hhh".to_string());
b = b.clone().insert(0, "omg".to_string()); b = b.clone().insert(0, "omg".to_string());
//println!("{}", b.clone().to_str()); //println!("{}", b.clone().to_string());
assert!(!b.root.is_leaf()); assert!(!b.root.is_leaf());
} }
@ -905,11 +904,11 @@ mod test_btree {
assert!(&b2.cmp(&b) == &Greater) assert!(&b2.cmp(&b) == &Greater)
} }
//Tests the BTree's to_str() method. //Tests the BTree's to_string() method.
#[test] #[test]
fn btree_tostr_test() { fn btree_tostr_test() {
let b = BTree::new(1i, "abc".to_string(), 2); let b = BTree::new(1i, "abc".to_string(), 2);
assert_eq!(b.to_str(), "Key: 1, value: abc;".to_string()) assert_eq!(b.to_string(), "Key: 1, value: abc;".to_string())
} }
} }

View file

@ -1041,12 +1041,12 @@ mod tests {
#[test] #[test]
fn test_show() { fn test_show() {
let list: DList<int> = range(0i, 10).collect(); let list: DList<int> = range(0i, 10).collect();
assert!(list.to_str().as_slice() == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); assert!(list.to_string().as_slice() == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let list: DList<&str> = vec!["just", "one", "test", "more"].iter() let list: DList<&str> = vec!["just", "one", "test", "more"].iter()
.map(|&s| s) .map(|&s| s)
.collect(); .collect();
assert!(list.to_str().as_slice() == "[just, one, test, more]"); assert!(list.to_string().as_slice() == "[just, one, test, more]");
} }
#[cfg(test)] #[cfg(test)]

View file

@ -491,7 +491,7 @@ mod test_map {
map.insert(1, 2i); map.insert(1, 2i);
map.insert(3, 4i); map.insert(3, 4i);
let map_str = map.to_str(); let map_str = map.to_string();
let map_str = map_str.as_slice(); let map_str = map_str.as_slice();
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}"); assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{}", empty), "{}".to_string()); assert_eq!(format!("{}", empty), "{}".to_string());

View file

@ -657,7 +657,7 @@ impl<'a> StrAllocating for MaybeOwned<'a> {
#[inline] #[inline]
fn into_string(self) -> String { fn into_string(self) -> String {
match self { match self {
Slice(s) => s.to_string(), Slice(s) => String::from_str(s),
Owned(s) => s Owned(s) => s
} }
} }
@ -673,7 +673,7 @@ impl<'a> Clone for MaybeOwned<'a> {
fn clone(&self) -> MaybeOwned<'a> { fn clone(&self) -> MaybeOwned<'a> {
match *self { match *self {
Slice(s) => Slice(s), Slice(s) => Slice(s),
Owned(ref s) => Owned(s.to_string()) Owned(ref s) => Owned(String::from_str(s.as_slice()))
} }
} }
} }
@ -762,7 +762,7 @@ pub mod raw {
let a = vec![65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]; let a = vec![65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8];
let b = a.as_ptr(); let b = a.as_ptr();
let c = from_buf_len(b, 3u); let c = from_buf_len(b, 3u);
assert_eq!(c, "AAA".to_string()); assert_eq!(c, String::from_str("AAA"));
} }
} }
} }
@ -776,12 +776,6 @@ pub trait StrAllocating: Str {
/// Convert `self` into a `String`, not making a copy if possible. /// Convert `self` into a `String`, not making a copy if possible.
fn into_string(self) -> String; fn into_string(self) -> String;
/// Convert `self` into a `String`.
#[inline]
fn to_string(&self) -> String {
String::from_str(self.as_slice())
}
#[allow(missing_doc)] #[allow(missing_doc)]
#[deprecated = "replaced by .into_string()"] #[deprecated = "replaced by .into_string()"]
fn into_owned(self) -> String { fn into_owned(self) -> String {
@ -933,7 +927,7 @@ pub trait StrAllocating: Str {
impl<'a> StrAllocating for &'a str { impl<'a> StrAllocating for &'a str {
#[inline] #[inline]
fn into_string(self) -> String { fn into_string(self) -> String {
self.to_string() String::from_str(self)
} }
} }
@ -963,11 +957,19 @@ impl OwnedStr for String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::prelude::*;
use std::iter::AdditiveIterator; use std::iter::AdditiveIterator;
use std::default::Default; use std::default::Default;
use std::char::Char;
use std::clone::Clone;
use std::cmp::{Equal, Greater, Less, Ord, Eq, PartialOrd, PartialEq, Equiv};
use std::result::{Ok, Err};
use std::option::{Some, None};
use std::ptr::RawPtr;
use std::iter::{Iterator, DoubleEndedIterator};
use Collection;
use str::*; use super::*;
use std::slice::{Vector, ImmutableVector};
use string::String; use string::String;
use vec::Vec; use vec::Vec;
@ -1028,17 +1030,17 @@ mod tests {
#[test] #[test]
fn test_collect() { fn test_collect() {
let empty = "".to_string(); let empty = String::from_str("");
let s: String = empty.as_slice().chars().collect(); let s: String = empty.as_slice().chars().collect();
assert_eq!(empty, s); assert_eq!(empty, s);
let data = "ประเทศไทย中".to_string(); let data = String::from_str("ประเทศไทย中");
let s: String = data.as_slice().chars().collect(); let s: String = data.as_slice().chars().collect();
assert_eq!(data, s); assert_eq!(data, s);
} }
#[test] #[test]
fn test_into_bytes() { fn test_into_bytes() {
let data = "asdf".to_string(); let data = String::from_str("asdf");
let buf = data.into_bytes(); let buf = data.into_bytes();
assert_eq!(b"asdf", buf.as_slice()); assert_eq!(b"asdf", buf.as_slice());
} }
@ -1055,7 +1057,7 @@ mod tests {
assert!(data.slice(2u, 4u).find_str("ab").is_none()); assert!(data.slice(2u, 4u).find_str("ab").is_none());
let string = "ประเทศไทย中华Việt Nam"; let string = "ประเทศไทย中华Việt Nam";
let mut data = string.to_string(); let mut data = String::from_str(string);
data.push_str(string); data.push_str(string);
assert!(data.as_slice().find_str("ไท华").is_none()); assert!(data.as_slice().find_str("ไท华").is_none());
assert_eq!(data.as_slice().slice(0u, 43u).find_str(""), Some(0u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str(""), Some(0u));
@ -1092,11 +1094,13 @@ mod tests {
fn t(v: &[String], s: &str) { fn t(v: &[String], s: &str) {
assert_eq!(v.concat().as_slice(), s); assert_eq!(v.concat().as_slice(), s);
} }
t(["you".to_string(), "know".to_string(), "I'm".to_string(), t([String::from_str("you"), String::from_str("know"),
"no".to_string(), "good".to_string()], "youknowI'mnogood"); String::from_str("I'm"),
String::from_str("no"), String::from_str("good")],
"youknowI'mnogood");
let v: &[String] = []; let v: &[String] = [];
t(v, ""); t(v, "");
t(["hi".to_string()], "hi"); t([String::from_str("hi")], "hi");
} }
#[test] #[test]
@ -1104,12 +1108,13 @@ mod tests {
fn t(v: &[String], sep: &str, s: &str) { fn t(v: &[String], sep: &str, s: &str) {
assert_eq!(v.connect(sep).as_slice(), s); assert_eq!(v.connect(sep).as_slice(), s);
} }
t(["you".to_string(), "know".to_string(), "I'm".to_string(), t([String::from_str("you"), String::from_str("know"),
"no".to_string(), "good".to_string()], String::from_str("I'm"),
String::from_str("no"), String::from_str("good")],
" ", "you know I'm no good"); " ", "you know I'm no good");
let v: &[String] = []; let v: &[String] = [];
t(v, " ", ""); t(v, " ", "");
t(["hi".to_string()], " ", "hi"); t([String::from_str("hi")], " ", "hi");
} }
#[test] #[test]
@ -1136,11 +1141,11 @@ mod tests {
#[test] #[test]
fn test_repeat() { fn test_repeat() {
assert_eq!("x".repeat(4), "xxxx".to_string()); assert_eq!("x".repeat(4), String::from_str("xxxx"));
assert_eq!("hi".repeat(4), "hihihihi".to_string()); assert_eq!("hi".repeat(4), String::from_str("hihihihi"));
assert_eq!("ไท华".repeat(3), "ไท华ไท华ไท华".to_string()); assert_eq!("ไท华".repeat(3), String::from_str("ไท华ไท华ไท华"));
assert_eq!("".repeat(4), "".to_string()); assert_eq!("".repeat(4), String::from_str(""));
assert_eq!("hi".repeat(0), "".to_string()); assert_eq!("hi".repeat(0), String::from_str(""));
} }
#[test] #[test]
@ -1168,9 +1173,9 @@ mod tests {
} }
let letters = a_million_letter_a(); let letters = a_million_letter_a();
assert!(half_a_million_letter_a() == assert!(half_a_million_letter_a() ==
unsafe {raw::slice_bytes(letters.as_slice(), unsafe {String::from_str(raw::slice_bytes(letters.as_slice(),
0u, 0u,
500000)}.to_string()); 500000))});
} }
#[test] #[test]
@ -1204,13 +1209,13 @@ mod tests {
#[test] #[test]
fn test_replace() { fn test_replace() {
let a = "a"; let a = "a";
assert_eq!("".replace(a, "b"), "".to_string()); assert_eq!("".replace(a, "b"), String::from_str(""));
assert_eq!("a".replace(a, "b"), "b".to_string()); assert_eq!("a".replace(a, "b"), String::from_str("b"));
assert_eq!("ab".replace(a, "b"), "bb".to_string()); assert_eq!("ab".replace(a, "b"), String::from_str("bb"));
let test = "test"; let test = "test";
assert!(" test test ".replace(test, "toast") == assert!(" test test ".replace(test, "toast") ==
" toast toast ".to_string()); String::from_str(" toast toast "));
assert_eq!(" test test ".replace(test, ""), " ".to_string()); assert_eq!(" test test ".replace(test, ""), String::from_str(" "));
} }
#[test] #[test]
@ -1285,7 +1290,7 @@ mod tests {
} }
let letters = a_million_letter_x(); let letters = a_million_letter_x();
assert!(half_a_million_letter_x() == assert!(half_a_million_letter_x() ==
letters.as_slice().slice(0u, 3u * 500000u).to_string()); String::from_str(letters.as_slice().slice(0u, 3u * 500000u)));
} }
#[test] #[test]
@ -1513,7 +1518,7 @@ mod tests {
let a = vec![65, 65, 65, 65, 65, 65, 65, 0]; let a = vec![65, 65, 65, 65, 65, 65, 65, 0];
let b = a.as_ptr(); let b = a.as_ptr();
let c = raw::from_c_str(b); let c = raw::from_c_str(b);
assert_eq!(c, "AAAAAAA".to_string()); assert_eq!(c, String::from_str("AAAAAAA"));
} }
} }
@ -1535,7 +1540,7 @@ mod tests {
fn test_as_bytes_fail() { fn test_as_bytes_fail() {
// Don't double free. (I'm not sure if this exercises the // Don't double free. (I'm not sure if this exercises the
// original problem code path anymore.) // original problem code path anymore.)
let s = "".to_string(); let s = String::from_str("");
let _bytes = s.as_bytes(); let _bytes = s.as_bytes();
fail!(); fail!();
} }
@ -1578,10 +1583,10 @@ mod tests {
#[test] #[test]
fn vec_str_conversions() { fn vec_str_conversions() {
let s1: String = "All mimsy were the borogoves".to_string(); let s1: String = String::from_str("All mimsy were the borogoves");
let v: Vec<u8> = Vec::from_slice(s1.as_bytes()); let v: Vec<u8> = Vec::from_slice(s1.as_bytes());
let s2: String = from_utf8(v.as_slice()).unwrap().to_string(); let s2: String = String::from_str(from_utf8(v.as_slice()).unwrap());
let mut i: uint = 0u; let mut i: uint = 0u;
let n1: uint = s1.len(); let n1: uint = s1.len();
let n2: uint = v.len(); let n2: uint = v.len();
@ -1624,13 +1629,13 @@ mod tests {
#[test] #[test]
fn test_utf16() { fn test_utf16() {
let pairs = let pairs =
[("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n".to_string(), [(String::from_str("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"),
vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16, vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,
0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16,
0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16,
0xd800_u16, 0xdf30_u16, 0x000a_u16]), 0xd800_u16, 0xdf30_u16, 0x000a_u16]),
("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n".to_string(), (String::from_str("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"),
vec![0xd801_u16, 0xdc12_u16, 0xd801_u16, vec![0xd801_u16, 0xdc12_u16, 0xd801_u16,
0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16, 0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16,
0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16,
@ -1638,7 +1643,7 @@ mod tests {
0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16,
0x000a_u16]), 0x000a_u16]),
("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n".to_string(), (String::from_str("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"),
vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16, vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,
0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16,
0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16,
@ -1647,7 +1652,7 @@ mod tests {
0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16,
0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]), 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]),
("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n".to_string(), (String::from_str("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"),
vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16, vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,
0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16, 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16,
0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16,
@ -1660,7 +1665,7 @@ mod tests {
0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16,
0x000a_u16 ]), 0x000a_u16 ]),
// Issue #12318, even-numbered non-BMP planes // Issue #12318, even-numbered non-BMP planes
("\U00020000".to_string(), (String::from_str("\U00020000"),
vec![0xD840, 0xDC00])]; vec![0xD840, 0xDC00])];
for p in pairs.iter() { for p in pairs.iter() {
@ -1698,16 +1703,16 @@ mod tests {
fn test_utf16_lossy() { fn test_utf16_lossy() {
// completely positive cases tested above. // completely positive cases tested above.
// lead + eof // lead + eof
assert_eq!(from_utf16_lossy([0xD800]), "\uFFFD".to_string()); assert_eq!(from_utf16_lossy([0xD800]), String::from_str("\uFFFD"));
// lead + lead // lead + lead
assert_eq!(from_utf16_lossy([0xD800, 0xD800]), "\uFFFD\uFFFD".to_string()); assert_eq!(from_utf16_lossy([0xD800, 0xD800]), String::from_str("\uFFFD\uFFFD"));
// isolated trail // isolated trail
assert_eq!(from_utf16_lossy([0x0061, 0xDC00]), "a\uFFFD".to_string()); assert_eq!(from_utf16_lossy([0x0061, 0xDC00]), String::from_str("a\uFFFD"));
// general // general
assert_eq!(from_utf16_lossy([0xD800, 0xd801, 0xdc8b, 0xD800]), assert_eq!(from_utf16_lossy([0xD800, 0xd801, 0xdc8b, 0xD800]),
"\uFFFD𐒋\uFFFD".to_string()); String::from_str("\uFFFD𐒋\uFFFD"));
} }
#[test] #[test]
@ -1752,27 +1757,29 @@ mod tests {
#[test] #[test]
fn test_escape_unicode() { fn test_escape_unicode() {
assert_eq!("abc".escape_unicode(), "\\x61\\x62\\x63".to_string()); assert_eq!("abc".escape_unicode(), String::from_str("\\x61\\x62\\x63"));
assert_eq!("a c".escape_unicode(), "\\x61\\x20\\x63".to_string()); assert_eq!("a c".escape_unicode(), String::from_str("\\x61\\x20\\x63"));
assert_eq!("\r\n\t".escape_unicode(), "\\x0d\\x0a\\x09".to_string()); assert_eq!("\r\n\t".escape_unicode(), String::from_str("\\x0d\\x0a\\x09"));
assert_eq!("'\"\\".escape_unicode(), "\\x27\\x22\\x5c".to_string()); assert_eq!("'\"\\".escape_unicode(), String::from_str("\\x27\\x22\\x5c"));
assert_eq!("\x00\x01\xfe\xff".escape_unicode(), "\\x00\\x01\\xfe\\xff".to_string()); assert_eq!("\x00\x01\xfe\xff".escape_unicode(), String::from_str("\\x00\\x01\\xfe\\xff"));
assert_eq!("\u0100\uffff".escape_unicode(), "\\u0100\\uffff".to_string()); assert_eq!("\u0100\uffff".escape_unicode(), String::from_str("\\u0100\\uffff"));
assert_eq!("\U00010000\U0010ffff".escape_unicode(), "\\U00010000\\U0010ffff".to_string()); assert_eq!("\U00010000\U0010ffff".escape_unicode(),
assert_eq!("ab\ufb00".escape_unicode(), "\\x61\\x62\\ufb00".to_string()); String::from_str("\\U00010000\\U0010ffff"));
assert_eq!("\U0001d4ea\r".escape_unicode(), "\\U0001d4ea\\x0d".to_string()); assert_eq!("ab\ufb00".escape_unicode(), String::from_str("\\x61\\x62\\ufb00"));
assert_eq!("\U0001d4ea\r".escape_unicode(), String::from_str("\\U0001d4ea\\x0d"));
} }
#[test] #[test]
fn test_escape_default() { fn test_escape_default() {
assert_eq!("abc".escape_default(), "abc".to_string()); assert_eq!("abc".escape_default(), String::from_str("abc"));
assert_eq!("a c".escape_default(), "a c".to_string()); assert_eq!("a c".escape_default(), String::from_str("a c"));
assert_eq!("\r\n\t".escape_default(), "\\r\\n\\t".to_string()); assert_eq!("\r\n\t".escape_default(), String::from_str("\\r\\n\\t"));
assert_eq!("'\"\\".escape_default(), "\\'\\\"\\\\".to_string()); assert_eq!("'\"\\".escape_default(), String::from_str("\\'\\\"\\\\"));
assert_eq!("\u0100\uffff".escape_default(), "\\u0100\\uffff".to_string()); assert_eq!("\u0100\uffff".escape_default(), String::from_str("\\u0100\\uffff"));
assert_eq!("\U00010000\U0010ffff".escape_default(), "\\U00010000\\U0010ffff".to_string()); assert_eq!("\U00010000\U0010ffff".escape_default(),
assert_eq!("ab\ufb00".escape_default(), "ab\\ufb00".to_string()); String::from_str("\\U00010000\\U0010ffff"));
assert_eq!("\U0001d4ea\r".escape_default(), "\\U0001d4ea\\r".to_string()); assert_eq!("ab\ufb00".escape_default(), String::from_str("ab\\ufb00"));
assert_eq!("\U0001d4ea\r".escape_default(), String::from_str("\\U0001d4ea\\r"));
} }
#[test] #[test]
@ -2013,30 +2020,39 @@ mod tests {
#[test] #[test]
fn test_nfd_chars() { fn test_nfd_chars() {
assert_eq!("abc".nfd_chars().collect::<String>(), "abc".to_string()); assert_eq!("abc".nfd_chars().collect::<String>(), String::from_str("abc"));
assert_eq!("\u1e0b\u01c4".nfd_chars().collect::<String>(), "d\u0307\u01c4".to_string()); assert_eq!("\u1e0b\u01c4".nfd_chars().collect::<String>(),
assert_eq!("\u2026".nfd_chars().collect::<String>(), "\u2026".to_string()); String::from_str("d\u0307\u01c4"));
assert_eq!("\u2126".nfd_chars().collect::<String>(), "\u03a9".to_string()); assert_eq!("\u2026".nfd_chars().collect::<String>(), String::from_str("\u2026"));
assert_eq!("\u1e0b\u0323".nfd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u2126".nfd_chars().collect::<String>(), String::from_str("\u03a9"));
assert_eq!("\u1e0d\u0307".nfd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u1e0b\u0323".nfd_chars().collect::<String>(),
assert_eq!("a\u0301".nfd_chars().collect::<String>(), "a\u0301".to_string()); String::from_str("d\u0323\u0307"));
assert_eq!("\u0301a".nfd_chars().collect::<String>(), "\u0301a".to_string()); assert_eq!("\u1e0d\u0307".nfd_chars().collect::<String>(),
assert_eq!("\ud4db".nfd_chars().collect::<String>(), "\u1111\u1171\u11b6".to_string()); String::from_str("d\u0323\u0307"));
assert_eq!("\uac1c".nfd_chars().collect::<String>(), "\u1100\u1162".to_string()); assert_eq!("a\u0301".nfd_chars().collect::<String>(), String::from_str("a\u0301"));
assert_eq!("\u0301a".nfd_chars().collect::<String>(), String::from_str("\u0301a"));
assert_eq!("\ud4db".nfd_chars().collect::<String>(),
String::from_str("\u1111\u1171\u11b6"));
assert_eq!("\uac1c".nfd_chars().collect::<String>(), String::from_str("\u1100\u1162"));
} }
#[test] #[test]
fn test_nfkd_chars() { fn test_nfkd_chars() {
assert_eq!("abc".nfkd_chars().collect::<String>(), "abc".to_string()); assert_eq!("abc".nfkd_chars().collect::<String>(), String::from_str("abc"));
assert_eq!("\u1e0b\u01c4".nfkd_chars().collect::<String>(), "d\u0307DZ\u030c".to_string()); assert_eq!("\u1e0b\u01c4".nfkd_chars().collect::<String>(),
assert_eq!("\u2026".nfkd_chars().collect::<String>(), "...".to_string()); String::from_str("d\u0307DZ\u030c"));
assert_eq!("\u2126".nfkd_chars().collect::<String>(), "\u03a9".to_string()); assert_eq!("\u2026".nfkd_chars().collect::<String>(), String::from_str("..."));
assert_eq!("\u1e0b\u0323".nfkd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u2126".nfkd_chars().collect::<String>(), String::from_str("\u03a9"));
assert_eq!("\u1e0d\u0307".nfkd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u1e0b\u0323".nfkd_chars().collect::<String>(),
assert_eq!("a\u0301".nfkd_chars().collect::<String>(), "a\u0301".to_string()); String::from_str("d\u0323\u0307"));
assert_eq!("\u0301a".nfkd_chars().collect::<String>(), "\u0301a".to_string()); assert_eq!("\u1e0d\u0307".nfkd_chars().collect::<String>(),
assert_eq!("\ud4db".nfkd_chars().collect::<String>(), "\u1111\u1171\u11b6".to_string()); String::from_str("d\u0323\u0307"));
assert_eq!("\uac1c".nfkd_chars().collect::<String>(), "\u1100\u1162".to_string()); assert_eq!("a\u0301".nfkd_chars().collect::<String>(), String::from_str("a\u0301"));
assert_eq!("\u0301a".nfkd_chars().collect::<String>(),
String::from_str("\u0301a"));
assert_eq!("\ud4db".nfkd_chars().collect::<String>(),
String::from_str("\u1111\u1171\u11b6"));
assert_eq!("\uac1c".nfkd_chars().collect::<String>(), String::from_str("\u1100\u1162"));
} }
#[test] #[test]
@ -2090,10 +2106,10 @@ mod tests {
v.iter().map(|x| x.len()).sum() v.iter().map(|x| x.len()).sum()
} }
let s = "01234".to_string(); let s = String::from_str("01234");
assert_eq!(5, sum_len(["012", "", "34"])); assert_eq!(5, sum_len(["012", "", "34"]));
assert_eq!(5, sum_len(["01".to_string(), "2".to_string(), assert_eq!(5, sum_len([String::from_str("01"), String::from_str("2"),
"34".to_string(), "".to_string()])); String::from_str("34"), String::from_str("")]));
assert_eq!(5, sum_len([s.as_slice()])); assert_eq!(5, sum_len([s.as_slice()]));
} }
@ -2112,10 +2128,10 @@ mod tests {
#[test] #[test]
fn test_str_from_utf8_owned() { fn test_str_from_utf8_owned() {
let xs = Vec::from_slice(b"hello"); let xs = Vec::from_slice(b"hello");
assert_eq!(from_utf8_owned(xs), Ok("hello".to_string())); assert_eq!(from_utf8_owned(xs), Ok(String::from_str("hello")));
let xs = Vec::from_slice("ศไทย中华Việt Nam".as_bytes()); let xs = Vec::from_slice("ศไทย中华Việt Nam".as_bytes());
assert_eq!(from_utf8_owned(xs), Ok("ศไทย中华Việt Nam".to_string())); assert_eq!(from_utf8_owned(xs), Ok(String::from_str("ศไทย中华Việt Nam")));
let xs = Vec::from_slice(b"hello\xFF"); let xs = Vec::from_slice(b"hello\xFF");
assert_eq!(from_utf8_owned(xs), assert_eq!(from_utf8_owned(xs),
@ -2131,34 +2147,30 @@ mod tests {
assert_eq!(from_utf8_lossy(xs), Slice("ศไทย中华Việt Nam")); assert_eq!(from_utf8_lossy(xs), Slice("ศไทย中华Việt Nam"));
let xs = b"Hello\xC2 There\xFF Goodbye"; let xs = b"Hello\xC2 There\xFF Goodbye";
assert_eq!(from_utf8_lossy(xs), Owned("Hello\uFFFD There\uFFFD Goodbye".to_string())); assert_eq!(from_utf8_lossy(xs), Owned(String::from_str("Hello\uFFFD There\uFFFD Goodbye")));
let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
assert_eq!(from_utf8_lossy(xs), Owned("Hello\uFFFD\uFFFD There\uFFFD Goodbye".to_string())); assert_eq!(from_utf8_lossy(xs),
Owned(String::from_str("Hello\uFFFD\uFFFD There\uFFFD Goodbye")));
let xs = b"\xF5foo\xF5\x80bar"; let xs = b"\xF5foo\xF5\x80bar";
assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFD\uFFFDbar".to_string())); assert_eq!(from_utf8_lossy(xs), Owned(String::from_str("\uFFFDfoo\uFFFD\uFFFDbar")));
let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz"; let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz";
assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFDbar\uFFFDbaz".to_string())); assert_eq!(from_utf8_lossy(xs), Owned(String::from_str("\uFFFDfoo\uFFFDbar\uFFFDbaz")));
let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz"; let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz";
assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFDbar\uFFFD\uFFFDbaz".to_string())); assert_eq!(from_utf8_lossy(xs),
Owned(String::from_str("\uFFFDfoo\uFFFDbar\uFFFD\uFFFDbaz")));
let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar"; let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar";
assert_eq!(from_utf8_lossy(xs), Owned("\uFFFD\uFFFD\uFFFD\uFFFD\ assert_eq!(from_utf8_lossy(xs), Owned(String::from_str("\uFFFD\uFFFD\uFFFD\uFFFD\
foo\U00010000bar".to_string())); foo\U00010000bar")));
// surrogates // surrogates
let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar"; let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar";
assert_eq!(from_utf8_lossy(xs), Owned("\uFFFD\uFFFD\uFFFDfoo\ assert_eq!(from_utf8_lossy(xs), Owned(String::from_str("\uFFFD\uFFFD\uFFFDfoo\
\uFFFD\uFFFD\uFFFDbar".to_string())); \uFFFD\uFFFD\uFFFDbar")));
}
#[test]
fn test_from_str() {
let owned: Option<::std::string::String> = from_str("string");
assert_eq!(owned.as_ref().map(|s| s.as_slice()), Some("string"));
} }
#[test] #[test]
@ -2166,18 +2178,18 @@ mod tests {
let s = Slice("abcde"); let s = Slice("abcde");
assert_eq!(s.len(), 5); assert_eq!(s.len(), 5);
assert_eq!(s.as_slice(), "abcde"); assert_eq!(s.as_slice(), "abcde");
assert_eq!(s.to_str().as_slice(), "abcde"); assert_eq!(String::from_str(s.as_slice()).as_slice(), "abcde");
assert_eq!(format!("{}", s).as_slice(), "abcde"); assert_eq!(format!("{}", s).as_slice(), "abcde");
assert!(s.lt(&Owned("bcdef".to_string()))); assert!(s.lt(&Owned(String::from_str("bcdef"))));
assert_eq!(Slice(""), Default::default()); assert_eq!(Slice(""), Default::default());
let o = Owned("abcde".to_string()); let o = Owned(String::from_str("abcde"));
assert_eq!(o.len(), 5); assert_eq!(o.len(), 5);
assert_eq!(o.as_slice(), "abcde"); assert_eq!(o.as_slice(), "abcde");
assert_eq!(o.to_str().as_slice(), "abcde"); assert_eq!(String::from_str(o.as_slice()).as_slice(), "abcde");
assert_eq!(format!("{}", o).as_slice(), "abcde"); assert_eq!(format!("{}", o).as_slice(), "abcde");
assert!(o.lt(&Slice("bcdef"))); assert!(o.lt(&Slice("bcdef")));
assert_eq!(Owned("".to_string()), Default::default()); assert_eq!(Owned(String::from_str("")), Default::default());
assert!(s.cmp(&o) == Equal); assert!(s.cmp(&o) == Equal);
assert!(s.equiv(&o)); assert!(s.equiv(&o));
@ -2192,31 +2204,33 @@ mod tests {
assert!(s.is_slice()); assert!(s.is_slice());
assert!(!s.is_owned()); assert!(!s.is_owned());
let o = Owned("abcde".to_string()); let o = Owned(String::from_str("abcde"));
assert!(!o.is_slice()); assert!(!o.is_slice());
assert!(o.is_owned()); assert!(o.is_owned());
} }
#[test] #[test]
fn test_maybe_owned_clone() { fn test_maybe_owned_clone() {
assert_eq!(Owned("abcde".to_string()), Slice("abcde").clone()); assert_eq!(Owned(String::from_str("abcde")), Slice("abcde").clone());
assert_eq!(Owned("abcde".to_string()), Owned("abcde".to_string()).clone()); assert_eq!(Owned(String::from_str("abcde")), Owned(String::from_str("abcde")).clone());
assert_eq!(Slice("abcde"), Slice("abcde").clone()); assert_eq!(Slice("abcde"), Slice("abcde").clone());
assert_eq!(Slice("abcde"), Owned("abcde".to_string()).clone()); assert_eq!(Slice("abcde"), Owned(String::from_str("abcde")).clone());
} }
#[test] #[test]
fn test_maybe_owned_into_string() { fn test_maybe_owned_into_string() {
assert_eq!(Slice("abcde").into_string(), "abcde".to_string()); assert_eq!(Slice("abcde").into_string(), String::from_str("abcde"));
assert_eq!(Owned("abcde".to_string()).into_string(), "abcde".to_string()); assert_eq!(Owned(String::from_str("abcde")).into_string(),
String::from_str("abcde"));
} }
#[test] #[test]
fn test_into_maybe_owned() { fn test_into_maybe_owned() {
assert_eq!("abcde".into_maybe_owned(), Slice("abcde")); assert_eq!("abcde".into_maybe_owned(), Slice("abcde"));
assert_eq!(("abcde".to_string()).into_maybe_owned(), Slice("abcde")); assert_eq!((String::from_str("abcde")).into_maybe_owned(), Slice("abcde"));
assert_eq!("abcde".into_maybe_owned(), Owned("abcde".to_string())); assert_eq!("abcde".into_maybe_owned(), Owned(String::from_str("abcde")));
assert_eq!(("abcde".to_string()).into_maybe_owned(), Owned("abcde".to_string())); assert_eq!((String::from_str("abcde")).into_maybe_owned(),
Owned(String::from_str("abcde")));
} }
} }
@ -2224,7 +2238,10 @@ mod tests {
mod bench { mod bench {
use test::Bencher; use test::Bencher;
use super::*; use super::*;
use std::prelude::*; use vec::Vec;
use std::iter::{Iterator, DoubleEndedIterator};
use std::collections::Collection;
use std::slice::Vector;
#[bench] #[bench]
fn char_iterator(b: &mut Bencher) { fn char_iterator(b: &mut Bencher) {

View file

@ -354,7 +354,7 @@ impl<'a, S: Str> Equiv<S> for String {
impl<S: Str> Add<S, String> for String { impl<S: Str> Add<S, String> for String {
fn add(&self, other: &S) -> String { fn add(&self, other: &S) -> String {
let mut s = self.to_string(); let mut s = String::from_str(self.as_slice());
s.push_str(other.as_slice()); s.push_str(other.as_slice());
return s; return s;
} }
@ -369,6 +369,12 @@ mod tests {
use str::{Str, StrSlice}; use str::{Str, StrSlice};
use super::String; use super::String;
#[test]
fn test_from_str() {
let owned: Option<::std::string::String> = from_str("string");
assert_eq!(owned.as_ref().map(|s| s.as_slice()), Some("string"));
}
#[bench] #[bench]
fn bench_with_capacity(b: &mut Bencher) { fn bench_with_capacity(b: &mut Bencher) {
b.iter(|| { b.iter(|| {

View file

@ -70,7 +70,7 @@ static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/** /**
* Converts a number to its string representation as a byte vector. * Converts a number to its string representation as a byte vector.
* This is meant to be a common base implementation for all numeric string * This is meant to be a common base implementation for all numeric string
* conversion functions like `to_str()` or `to_str_radix()`. * conversion functions like `to_string()` or `to_str_radix()`.
* *
* # Arguments * # Arguments
* - `num` - The number to convert. Accepts any number that * - `num` - The number to convert. Accepts any number that

View file

@ -167,12 +167,6 @@ fn test_escape_unicode() {
assert_eq!(s.as_slice(), "\\U0001d4b6"); assert_eq!(s.as_slice(), "\\U0001d4b6");
} }
#[test]
fn test_to_str() {
let s = 't'.to_str();
assert_eq!(s.as_slice(), "t");
}
#[test] #[test]
fn test_encode_utf8() { fn test_encode_utf8() {
fn check(input: char, expect: &[u8]) { fn check(input: char, expect: &[u8]) {

View file

@ -45,7 +45,7 @@ impl<T> Poly for T {
// If we have a specified width for formatting, then we have to make // If we have a specified width for formatting, then we have to make
// this allocation of a new string // this allocation of a new string
_ => { _ => {
let s = repr::repr_to_str(self); let s = repr::repr_to_string(self);
f.pad(s.as_slice()) f.pad(s.as_slice())
} }
} }

View file

@ -73,7 +73,7 @@ int_repr!(u64, "u64")
macro_rules! num_repr(($ty:ident, $suffix:expr) => (impl Repr for $ty { macro_rules! num_repr(($ty:ident, $suffix:expr) => (impl Repr for $ty {
fn write_repr(&self, writer: &mut io::Writer) -> io::IoResult<()> { fn write_repr(&self, writer: &mut io::Writer) -> io::IoResult<()> {
let s = self.to_str(); let s = self.to_string();
writer.write(s.as_bytes()).and_then(|()| { writer.write(s.as_bytes()).and_then(|()| {
writer.write($suffix) writer.write($suffix)
}) })
@ -564,7 +564,7 @@ pub fn write_repr<T>(writer: &mut io::Writer, object: &T) -> io::IoResult<()> {
} }
} }
pub fn repr_to_str<T>(t: &T) -> String { pub fn repr_to_string<T>(t: &T) -> String {
let mut result = io::MemWriter::new(); let mut result = io::MemWriter::new();
write_repr(&mut result as &mut io::Writer, t).unwrap(); write_repr(&mut result as &mut io::Writer, t).unwrap();
String::from_utf8(result.unwrap()).unwrap() String::from_utf8(result.unwrap()).unwrap()

View file

@ -59,7 +59,7 @@
//! ]; //! ];
//! let matches = match getopts(args.tail(), opts) { //! let matches = match getopts(args.tail(), opts) {
//! Ok(m) => { m } //! Ok(m) => { m }
//! Err(f) => { fail!(f.to_str()) } //! Err(f) => { fail!(f.to_string()) }
//! }; //! };
//! if matches.opt_present("h") { //! if matches.opt_present("h") {
//! print_usage(program.as_slice(), opts); //! print_usage(program.as_slice(), opts);
@ -222,9 +222,9 @@ impl Name {
} }
} }
fn to_str(&self) -> String { fn to_string(&self) -> String {
match *self { match *self {
Short(ch) => ch.to_str(), Short(ch) => ch.to_string(),
Long(ref s) => s.to_string() Long(ref s) => s.to_string()
} }
} }
@ -501,7 +501,7 @@ impl Fail_ {
/// Convert a `Fail_` enum into an error string. /// Convert a `Fail_` enum into an error string.
#[deprecated="use `Show` (`{}` format specifier)"] #[deprecated="use `Show` (`{}` format specifier)"]
pub fn to_err_msg(self) -> String { pub fn to_err_msg(self) -> String {
self.to_str() self.to_string()
} }
} }
@ -609,12 +609,12 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
name_pos += 1; name_pos += 1;
let optid = match find_opt(opts.as_slice(), (*nm).clone()) { let optid = match find_opt(opts.as_slice(), (*nm).clone()) {
Some(id) => id, Some(id) => id,
None => return Err(UnrecognizedOption(nm.to_str())) None => return Err(UnrecognizedOption(nm.to_string()))
}; };
match opts.get(optid).hasarg { match opts.get(optid).hasarg {
No => { No => {
if !i_arg.is_none() { if !i_arg.is_none() {
return Err(UnexpectedArgument(nm.to_str())); return Err(UnexpectedArgument(nm.to_string()));
} }
vals.get_mut(optid).push(Given); vals.get_mut(optid).push(Given);
} }
@ -635,7 +635,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
if !i_arg.is_none() { if !i_arg.is_none() {
vals.get_mut(optid).push(Val(i_arg.clone().unwrap())); vals.get_mut(optid).push(Val(i_arg.clone().unwrap()));
} else if i + 1 == l { } else if i + 1 == l {
return Err(ArgumentMissing(nm.to_str())); return Err(ArgumentMissing(nm.to_string()));
} else { } else {
i += 1; i += 1;
vals.get_mut(optid).push(Val(args[i].clone())); vals.get_mut(optid).push(Val(args[i].clone()));
@ -652,12 +652,12 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
let occ = opts.get(i).occur; let occ = opts.get(i).occur;
if occ == Req { if occ == Req {
if n == 0 { if n == 0 {
return Err(OptionMissing(opts.get(i).name.to_str())); return Err(OptionMissing(opts.get(i).name.to_string()));
} }
} }
if occ != Multi { if occ != Multi {
if n > 1 { if n > 1 {
return Err(OptionDuplicated(opts.get(i).name.to_str())); return Err(OptionDuplicated(opts.get(i).name.to_string()));
} }
} }
i += 1; i += 1;

View file

@ -703,11 +703,11 @@ mod test {
for &p in pats.iter() { for &p in pats.iter() {
let pat = Pattern::new(p); let pat = Pattern::new(p);
for c in "abcdefghijklmnopqrstuvwxyz".chars() { for c in "abcdefghijklmnopqrstuvwxyz".chars() {
assert!(pat.matches(c.to_str().as_slice())); assert!(pat.matches(c.to_string().as_slice()));
} }
for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() { for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() {
let options = MatchOptions {case_sensitive: false, .. MatchOptions::new()}; let options = MatchOptions {case_sensitive: false, .. MatchOptions::new()};
assert!(pat.matches_with(c.to_str().as_slice(), options)); assert!(pat.matches_with(c.to_string().as_slice(), options));
} }
assert!(pat.matches("1")); assert!(pat.matches("1"));
assert!(pat.matches("2")); assert!(pat.matches("2"));

View file

@ -666,7 +666,7 @@ mod tests {
let mut writer = MemWriter::new(); let mut writer = MemWriter::new();
render(&g, &mut writer).unwrap(); render(&g, &mut writer).unwrap();
let mut r = BufReader::new(writer.get_ref()); let mut r = BufReader::new(writer.get_ref());
match r.read_to_str() { match r.read_to_string() {
Ok(string) => Ok(string.to_string()), Ok(string) => Ok(string.to_string()),
Err(err) => Err(err), Err(err) => Err(err),
} }
@ -768,7 +768,7 @@ r#"digraph hasse_diagram {
render(&g, &mut writer).unwrap(); render(&g, &mut writer).unwrap();
let mut r = BufReader::new(writer.get_ref()); let mut r = BufReader::new(writer.get_ref());
let r = r.read_to_str(); let r = r.read_to_string();
assert_eq!(r.unwrap().as_slice(), assert_eq!(r.unwrap().as_slice(),
r#"digraph syntax_tree { r#"digraph syntax_tree {

View file

@ -259,7 +259,7 @@ pub fn to_utf16(s: &CString) -> IoResult<Vec<u16>> {
None => Err(IoError { None => Err(IoError {
code: libc::ERROR_INVALID_NAME as uint, code: libc::ERROR_INVALID_NAME as uint,
extra: 0, extra: 0,
detail: Some("valid unicode input required".to_str()), detail: Some("valid unicode input required".to_string()),
}) })
} }
} }

View file

@ -43,7 +43,7 @@ fn addr_to_sockaddr_un(addr: &CString) -> IoResult<(libc::sockaddr_storage, uint
return Err(IoError { return Err(IoError {
code: ERROR as uint, code: ERROR as uint,
extra: 0, extra: 0,
detail: Some("path must be smaller than SUN_LEN".to_str()), detail: Some("path must be smaller than SUN_LEN".to_string()),
}) })
} }
s.sun_family = libc::AF_UNIX as libc::sa_family_t; s.sun_family = libc::AF_UNIX as libc::sa_family_t;

View file

@ -479,7 +479,7 @@ impl rtio::RtioPipe for UnixStream {
Err(IoError { Err(IoError {
code: libc::ERROR_OPERATION_ABORTED as uint, code: libc::ERROR_OPERATION_ABORTED as uint,
extra: amt, extra: amt,
detail: Some("short write during write".to_str()), detail: Some("short write during write".to_string()),
}) })
} else { } else {
Err(util::timeout("write timed out")) Err(util::timeout("write timed out"))

View file

@ -170,7 +170,7 @@ impl rtio::RtioProcess for Process {
Some(..) => return Err(IoError { Some(..) => return Err(IoError {
code: ERROR as uint, code: ERROR as uint,
extra: 0, extra: 0,
detail: Some("can't kill an exited process".to_str()), detail: Some("can't kill an exited process".to_string()),
}), }),
None => {} None => {}
} }
@ -301,7 +301,7 @@ fn spawn_process_os(cfg: ProcessConfig,
return Err(IoError { return Err(IoError {
code: libc::ERROR_CALL_NOT_IMPLEMENTED as uint, code: libc::ERROR_CALL_NOT_IMPLEMENTED as uint,
extra: 0, extra: 0,
detail: Some("unsupported gid/uid requested on windows".to_str()), detail: Some("unsupported gid/uid requested on windows".to_string()),
}) })
} }

View file

@ -30,7 +30,7 @@ pub fn timeout(desc: &'static str) -> IoError {
IoError { IoError {
code: ERROR as uint, code: ERROR as uint,
extra: 0, extra: 0,
detail: Some(desc.to_str()), detail: Some(desc.to_string()),
} }
} }
@ -40,7 +40,7 @@ pub fn short_write(n: uint, desc: &'static str) -> IoError {
IoError { IoError {
code: ERROR as uint, code: ERROR as uint,
extra: n, extra: n,
detail: Some(desc.to_str()), detail: Some(desc.to_string()),
} }
} }

View file

@ -2737,7 +2737,7 @@ mod bigint_tests {
// attempt to allocate a vector of size (-1u) == huge. // attempt to allocate a vector of size (-1u) == huge.
let x: BigInt = let x: BigInt =
from_str(format!("1{}", "0".repeat(36)).as_slice()).unwrap(); from_str(format!("1{}", "0".repeat(36)).as_slice()).unwrap();
let _y = x.to_str(); let _y = x.to_string();
} }
#[test] #[test]
@ -2842,14 +2842,14 @@ mod bench {
} }
#[bench] #[bench]
fn to_str(b: &mut Bencher) { fn to_string(b: &mut Bencher) {
let fac = factorial(100); let fac = factorial(100);
let fib = fib(100); let fib = fib(100);
b.iter(|| { b.iter(|| {
fac.to_str(); fac.to_string();
}); });
b.iter(|| { b.iter(|| {
fib.to_str(); fib.to_string();
}); });
} }

View file

@ -347,9 +347,9 @@ mod test {
} }
#[test] #[test]
fn test_to_str() { fn test_to_string() {
fn test(c : Complex64, s: String) { fn test(c : Complex64, s: String) {
assert_eq!(c.to_str(), s); assert_eq!(c.to_string(), s);
} }
test(_0_0i, "0+0i".to_string()); test(_0_0i, "0+0i".to_string());
test(_1_0i, "1+0i".to_string()); test(_1_0i, "1+0i".to_string());

View file

@ -17,6 +17,7 @@ use std::fmt;
use std::from_str::FromStr; use std::from_str::FromStr;
use std::num; use std::num;
use std::num::{Zero, One, ToStrRadix, FromStrRadix}; use std::num::{Zero, One, ToStrRadix, FromStrRadix};
use bigint::{BigInt, BigUint, Sign, Plus, Minus}; use bigint::{BigInt, BigUint, Sign, Plus, Minus};
/// Represents the ratio between 2 numbers. /// Represents the ratio between 2 numbers.
@ -603,7 +604,7 @@ mod test {
fn test_to_from_str() { fn test_to_from_str() {
fn test(r: Rational, s: String) { fn test(r: Rational, s: String) {
assert_eq!(FromStr::from_str(s.as_slice()), Some(r)); assert_eq!(FromStr::from_str(s.as_slice()), Some(r));
assert_eq!(r.to_str(), s); assert_eq!(r.to_string(), s);
} }
test(_1, "1".to_string()); test(_1, "1".to_string());
test(_0, "0".to_string()); test(_0, "0".to_string());

View file

@ -87,7 +87,7 @@ fn native(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree])
let re = match Regex::new(regex.as_slice()) { let re = match Regex::new(regex.as_slice()) {
Ok(re) => re, Ok(re) => re,
Err(err) => { Err(err) => {
cx.span_err(sp, err.to_str().as_slice()); cx.span_err(sp, err.to_string().as_slice());
return DummyResult::any(sp) return DummyResult::any(sp)
} }
}; };
@ -621,11 +621,11 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> {
let regex = match entry.node { let regex = match entry.node {
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(ref s, _) => s.to_str(), ast::LitStr(ref s, _) => s.to_string(),
_ => { _ => {
cx.span_err(entry.span, format!( cx.span_err(entry.span, format!(
"expected string literal but got `{}`", "expected string literal but got `{}`",
pprust::lit_to_str(lit)).as_slice()); pprust::lit_to_string(lit)).as_slice());
return None return None
} }
} }
@ -633,7 +633,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> {
_ => { _ => {
cx.span_err(entry.span, format!( cx.span_err(entry.span, format!(
"expected string literal but got `{}`", "expected string literal but got `{}`",
pprust::expr_to_str(entry)).as_slice()); pprust::expr_to_string(entry)).as_slice());
return None return None
} }
}; };

View file

@ -773,7 +773,7 @@ pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext, pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext,
t: ty::t, t: ty::t,
name: &str) -> String { name: &str) -> String {
let s = ppaux::ty_to_str(ccx.tcx(), t); let s = ppaux::ty_to_string(ccx.tcx(), t);
let path = [PathName(token::intern(s.as_slice())), let path = [PathName(token::intern(s.as_slice())),
gensym_name(name)]; gensym_name(name)];
let hash = get_symbol_hash(ccx, t); let hash = get_symbol_hash(ccx, t);

View file

@ -340,13 +340,13 @@ mod svh_visitor {
// trees might be faster. Implementing this is far // trees might be faster. Implementing this is far
// easier in short term. // easier in short term.
let macro_defn_as_string = let macro_defn_as_string =
pprust::to_str(|pp_state| pp_state.print_mac(macro)); pprust::to_string(|pp_state| pp_state.print_mac(macro));
macro_defn_as_string.hash(self.st); macro_defn_as_string.hash(self.st);
} else { } else {
// It is not possible to observe any kind of macro // It is not possible to observe any kind of macro
// invocation at this stage except `macro_rules!`. // invocation at this stage except `macro_rules!`.
fail!("reached macro somehow: {}", fail!("reached macro somehow: {}",
pprust::to_str(|pp_state| pp_state.print_mac(macro))); pprust::to_string(|pp_state| pp_state.print_mac(macro)));
} }
visit::walk_mac(self, macro, e); visit::walk_mac(self, macro, e);

View file

@ -594,7 +594,7 @@ impl pprust::PpAnn for IdentifiedAnnotation {
match node { match node {
pprust::NodeItem(item) => { pprust::NodeItem(item) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
s.synth_comment(item.id.to_str()) s.synth_comment(item.id.to_string())
} }
pprust::NodeBlock(blk) => { pprust::NodeBlock(blk) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
@ -602,7 +602,7 @@ impl pprust::PpAnn for IdentifiedAnnotation {
} }
pprust::NodeExpr(expr) => { pprust::NodeExpr(expr) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
try!(s.synth_comment(expr.id.to_str())); try!(s.synth_comment(expr.id.to_string()));
s.pclose() s.pclose()
} }
pprust::NodePat(pat) => { pprust::NodePat(pat) => {
@ -636,7 +636,7 @@ impl pprust::PpAnn for TypedAnnotation {
try!(pp::word(&mut s.s, "as")); try!(pp::word(&mut s.s, "as"));
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s, try!(pp::word(&mut s.s,
ppaux::ty_to_str( ppaux::ty_to_string(
tcx, tcx,
ty::expr_ty(tcx, expr)).as_slice())); ty::expr_ty(tcx, expr)).as_slice()));
s.pclose() s.pclose()

View file

@ -251,7 +251,7 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) { match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) {
Ok(m) => m, Ok(m) => m,
Err(f) => { Err(f) => {
early_error(f.to_str().as_slice()); early_error(f.to_string().as_slice());
} }
}; };
@ -450,7 +450,7 @@ fn monitor(f: proc():Send) {
emitter.emit(None, note.as_slice(), diagnostic::Note) emitter.emit(None, note.as_slice(), diagnostic::Note)
} }
match r.read_to_str() { match r.read_to_string() {
Ok(s) => println!("{}", s), Ok(s) => println!("{}", s),
Err(e) => { Err(e) => {
emitter.emit(None, emitter.emit(None,

View file

@ -347,7 +347,7 @@ fn mk_test_module(cx: &TestCtxt) -> Gc<ast::Item> {
span: DUMMY_SP, span: DUMMY_SP,
}; };
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item)); debug!("Synthetic test module:\n{}\n", pprust::item_to_string(&item));
box(GC) item box(GC) item
} }

View file

@ -1883,7 +1883,7 @@ impl TypeNames {
self.named_types.borrow().find_equiv(&s).map(|x| Type::from_ref(*x)) self.named_types.borrow().find_equiv(&s).map(|x| Type::from_ref(*x))
} }
pub fn type_to_str(&self, ty: Type) -> String { pub fn type_to_string(&self, ty: Type) -> String {
unsafe { unsafe {
let s = llvm::LLVMTypeToString(ty.to_ref()); let s = llvm::LLVMTypeToString(ty.to_ref());
let ret = from_c_str(s); let ret = from_c_str(s);
@ -1893,11 +1893,11 @@ impl TypeNames {
} }
pub fn types_to_str(&self, tys: &[Type]) -> String { pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_str(*t)).collect(); let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
format!("[{}]", strs.connect(",")) format!("[{}]", strs.connect(","))
} }
pub fn val_to_str(&self, val: ValueRef) -> String { pub fn val_to_string(&self, val: ValueRef) -> String {
unsafe { unsafe {
let s = llvm::LLVMValueToString(val); let s = llvm::LLVMValueToString(val);
let ret = from_c_str(s); let ret = from_c_str(s);

View file

@ -31,7 +31,7 @@ use middle::trans::adt; // for `adt::is_ffi_safe`
use middle::typeck::astconv::ast_ty_to_ty; use middle::typeck::astconv::ast_ty_to_ty;
use middle::typeck::infer; use middle::typeck::infer;
use middle::{typeck, ty, def, pat_util, stability}; use middle::{typeck, ty, def, pat_util, stability};
use util::ppaux::{ty_to_str}; use util::ppaux::{ty_to_string};
use util::nodemap::NodeSet; use util::nodemap::NodeSet;
use lint::{Context, LintPass, LintArray}; use lint::{Context, LintPass, LintArray};
@ -412,14 +412,14 @@ impl HeapMemory {
}); });
if n_uniq > 0 { if n_uniq > 0 {
let s = ty_to_str(cx.tcx, ty); let s = ty_to_string(cx.tcx, ty);
let m = format!("type uses owned (Box type) pointers: {}", s); let m = format!("type uses owned (Box type) pointers: {}", s);
cx.span_lint(OWNED_HEAP_MEMORY, span, m.as_slice()); cx.span_lint(OWNED_HEAP_MEMORY, span, m.as_slice());
cx.span_lint(HEAP_MEMORY, span, m.as_slice()); cx.span_lint(HEAP_MEMORY, span, m.as_slice());
} }
if n_box > 0 { if n_box > 0 {
let s = ty_to_str(cx.tcx, ty); let s = ty_to_string(cx.tcx, ty);
let m = format!("type uses managed (@ type) pointers: {}", s); let m = format!("type uses managed (@ type) pointers: {}", s);
cx.span_lint(MANAGED_HEAP_MEMORY, span, m.as_slice()); cx.span_lint(MANAGED_HEAP_MEMORY, span, m.as_slice());
cx.span_lint(HEAP_MEMORY, span, m.as_slice()); cx.span_lint(HEAP_MEMORY, span, m.as_slice());

View file

@ -666,7 +666,7 @@ pub fn check_crate(tcx: &ty::ctxt,
for &(lint, span, ref msg) in v.iter() { for &(lint, span, ref msg) in v.iter() {
tcx.sess.span_bug(span, tcx.sess.span_bug(span,
format!("unprocessed lint {} at {}: {}", format!("unprocessed lint {} at {}: {}",
lint.as_str(), tcx.map.node_to_str(*id), *msg).as_slice()) lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice())
} }
} }

View file

@ -148,12 +148,12 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
ident, path_opt); ident, path_opt);
let name = match *path_opt { let name = match *path_opt {
Some((ref path_str, _)) => { Some((ref path_str, _)) => {
let name = path_str.get().to_str(); let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(), validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span)); Some(i.span));
name name
} }
None => ident.get().to_str(), None => ident.get().to_string(),
}; };
Some(CrateInfo { Some(CrateInfo {
ident: ident.get().to_string(), ident: ident.get().to_string(),

View file

@ -1067,7 +1067,7 @@ fn list_crate_attributes(md: ebml::Doc, hash: &Svh,
let r = get_attributes(md); let r = get_attributes(md);
for attr in r.iter() { for attr in r.iter() {
try!(write!(out, "{}\n", pprust::attribute_to_str(attr))); try!(write!(out, "{}\n", pprust::attribute_to_string(attr)));
} }
write!(out, "\n\n") write!(out, "\n\n")

View file

@ -101,7 +101,7 @@ fn encode_impl_type_basename(ebml_w: &mut Encoder, name: Ident) {
} }
pub fn encode_def_id(ebml_w: &mut Encoder, id: DefId) { pub fn encode_def_id(ebml_w: &mut Encoder, id: DefId) {
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id).as_slice()); ebml_w.wr_tagged_str(tag_def_id, def_to_string(id).as_slice());
} }
#[deriving(Clone)] #[deriving(Clone)]
@ -116,7 +116,7 @@ fn encode_trait_ref(ebml_w: &mut Encoder,
tag: uint) { tag: uint) {
let ty_str_ctxt = &tyencode::ctxt { let ty_str_ctxt = &tyencode::ctxt {
diag: ecx.diag, diag: ecx.diag,
ds: def_to_str, ds: def_to_string,
tcx: ecx.tcx, tcx: ecx.tcx,
abbrevs: &ecx.type_abbrevs abbrevs: &ecx.type_abbrevs
}; };
@ -141,7 +141,7 @@ fn encode_family(ebml_w: &mut Encoder, c: char) {
ebml_w.end_tag(); ebml_w.end_tag();
} }
pub fn def_to_str(did: DefId) -> String { pub fn def_to_string(did: DefId) -> String {
format!("{}:{}", did.krate, did.node) format!("{}:{}", did.krate, did.node)
} }
@ -151,7 +151,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut Encoder,
tag: uint) { tag: uint) {
let ty_str_ctxt = &tyencode::ctxt { let ty_str_ctxt = &tyencode::ctxt {
diag: ecx.diag, diag: ecx.diag,
ds: def_to_str, ds: def_to_string,
tcx: ecx.tcx, tcx: ecx.tcx,
abbrevs: &ecx.type_abbrevs abbrevs: &ecx.type_abbrevs
}; };
@ -172,7 +172,7 @@ fn encode_region_param_defs(ebml_w: &mut Encoder,
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.wr_tagged_str(tag_region_param_def_def_id, ebml_w.wr_tagged_str(tag_region_param_def_def_id,
def_to_str(param.def_id).as_slice()); def_to_string(param.def_id).as_slice());
ebml_w.wr_tagged_u64(tag_region_param_def_space, ebml_w.wr_tagged_u64(tag_region_param_def_space,
param.space.to_uint() as u64); param.space.to_uint() as u64);
@ -204,7 +204,7 @@ fn encode_bounds_and_type(ebml_w: &mut Encoder,
fn encode_variant_id(ebml_w: &mut Encoder, vid: DefId) { fn encode_variant_id(ebml_w: &mut Encoder, vid: DefId) {
ebml_w.start_tag(tag_items_data_item_variant); ebml_w.start_tag(tag_items_data_item_variant);
let s = def_to_str(vid); let s = def_to_string(vid);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -214,7 +214,7 @@ pub fn write_type(ecx: &EncodeContext,
typ: ty::t) { typ: ty::t) {
let ty_str_ctxt = &tyencode::ctxt { let ty_str_ctxt = &tyencode::ctxt {
diag: ecx.diag, diag: ecx.diag,
ds: def_to_str, ds: def_to_string,
tcx: ecx.tcx, tcx: ecx.tcx,
abbrevs: &ecx.type_abbrevs abbrevs: &ecx.type_abbrevs
}; };
@ -236,7 +236,7 @@ fn encode_method_fty(ecx: &EncodeContext,
let ty_str_ctxt = &tyencode::ctxt { let ty_str_ctxt = &tyencode::ctxt {
diag: ecx.diag, diag: ecx.diag,
ds: def_to_str, ds: def_to_string,
tcx: ecx.tcx, tcx: ecx.tcx,
abbrevs: &ecx.type_abbrevs abbrevs: &ecx.type_abbrevs
}; };
@ -266,14 +266,14 @@ fn encode_disr_val(_: &EncodeContext,
ebml_w: &mut Encoder, ebml_w: &mut Encoder,
disr_val: ty::Disr) { disr_val: ty::Disr) {
ebml_w.start_tag(tag_disr_val); ebml_w.start_tag(tag_disr_val);
let s = disr_val.to_str(); let s = disr_val.to_string();
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
fn encode_parent_item(ebml_w: &mut Encoder, id: DefId) { fn encode_parent_item(ebml_w: &mut Encoder, id: DefId) {
ebml_w.start_tag(tag_items_data_parent_item); ebml_w.start_tag(tag_items_data_parent_item);
let s = def_to_str(id); let s = def_to_string(id);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -291,7 +291,7 @@ fn encode_struct_fields(ebml_w: &mut Encoder,
encode_struct_field_family(ebml_w, f.vis); encode_struct_field_family(ebml_w, f.vis);
encode_def_id(ebml_w, f.id); encode_def_id(ebml_w, f.id);
ebml_w.start_tag(tag_item_field_origin); ebml_w.start_tag(tag_item_field_origin);
let s = def_to_str(origin); let s = def_to_string(origin);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
@ -382,7 +382,7 @@ fn encode_reexported_static_method(ebml_w: &mut Encoder,
exp.name, token::get_ident(method_ident)); exp.name, token::get_ident(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id).as_slice()); ebml_w.wr_str(def_to_string(method_def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(format!("{}::{}", ebml_w.wr_str(format!("{}::{}",
@ -529,7 +529,7 @@ fn encode_reexports(ecx: &EncodeContext,
id); id);
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id).as_slice()); ebml_w.wr_str(def_to_string(exp.def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(exp.name.as_slice()); ebml_w.wr_str(exp.name.as_slice());
@ -562,12 +562,12 @@ fn encode_info_for_mod(ecx: &EncodeContext,
// Encode info about all the module children. // Encode info about all the module children.
for item in md.items.iter() { for item in md.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice()); ebml_w.wr_str(def_to_string(local_def(item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
each_auxiliary_node_id(*item, |auxiliary_node_id| { each_auxiliary_node_id(*item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def( ebml_w.wr_str(def_to_string(local_def(
auxiliary_node_id)).as_slice()); auxiliary_node_id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
true true
@ -579,10 +579,10 @@ fn encode_info_for_mod(ecx: &EncodeContext,
debug!("(encoding info for module) ... encoding impl {} \ debug!("(encoding info for module) ... encoding impl {} \
({:?}/{:?})", ({:?}/{:?})",
token::get_ident(ident), token::get_ident(ident),
did, ecx.tcx.map.node_to_str(did)); did, ecx.tcx.map.node_to_string(did));
ebml_w.start_tag(tag_mod_impl); ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)).as_slice()); ebml_w.wr_str(def_to_string(local_def(did)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
_ => {} _ => {}
@ -659,7 +659,7 @@ fn encode_provided_source(ebml_w: &mut Encoder,
source_opt: Option<DefId>) { source_opt: Option<DefId>) {
for source in source_opt.iter() { for source in source_opt.iter() {
ebml_w.start_tag(tag_item_method_provided_source); ebml_w.start_tag(tag_item_method_provided_source);
let s = def_to_str(*source); let s = def_to_string(*source);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -906,7 +906,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
} }
debug!("encoding info for item at {}", debug!("encoding info for item at {}",
tcx.sess.codemap().span_to_str(item.span)); tcx.sess.codemap().span_to_string(item.span));
let def_id = local_def(item.id); let def_id = local_def(item.id);
let stab = stability::lookup(tcx, ast_util::local_def(item.id)); let stab = stability::lookup(tcx, ast_util::local_def(item.id));
@ -977,7 +977,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// Encode all the items in this module. // Encode all the items in this module.
for foreign_item in fm.items.iter() { for foreign_item in fm.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)).as_slice()); ebml_w.wr_str(def_to_string(local_def(foreign_item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
encode_visibility(ebml_w, vis); encode_visibility(ebml_w, vis);
@ -1101,7 +1101,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
} }
for &method_def_id in methods.iter() { for &method_def_id in methods.iter() {
ebml_w.start_tag(tag_item_impl_method); ebml_w.start_tag(tag_item_impl_method);
let s = def_to_str(method_def_id); let s = def_to_string(method_def_id);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1161,7 +1161,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(method_def_id).as_slice()); ebml_w.wr_str(def_to_string(method_def_id).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
} }
encode_path(ebml_w, path.clone()); encode_path(ebml_w, path.clone());
@ -1314,7 +1314,7 @@ fn my_visit_foreign_item(ni: &ForeignItem,
// See above // See above
let ecx: &EncodeContext = unsafe { mem::transmute(ecx_ptr) }; let ecx: &EncodeContext = unsafe { mem::transmute(ecx_ptr) };
debug!("writing foreign item {}::{}", debug!("writing foreign item {}::{}",
ecx.tcx.map.path_to_str(ni.id), ecx.tcx.map.path_to_string(ni.id),
token::get_ident(ni.ident)); token::get_ident(ni.ident));
let mut ebml_w = unsafe { let mut ebml_w = unsafe {
@ -1680,12 +1680,12 @@ fn encode_misc_info(ecx: &EncodeContext,
ebml_w.start_tag(tag_misc_info_crate_items); ebml_w.start_tag(tag_misc_info_crate_items);
for &item in krate.module.items.iter() { for &item in krate.module.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice()); ebml_w.wr_str(def_to_string(local_def(item.id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
each_auxiliary_node_id(item, |auxiliary_node_id| { each_auxiliary_node_id(item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def( ebml_w.wr_str(def_to_string(local_def(
auxiliary_node_id)).as_slice()); auxiliary_node_id)).as_slice());
ebml_w.end_tag(); ebml_w.end_tag();
true true
@ -1926,7 +1926,7 @@ pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> String {
let mut wr = MemWriter::new(); let mut wr = MemWriter::new();
tyencode::enc_ty(&mut wr, &tyencode::ctxt { tyencode::enc_ty(&mut wr, &tyencode::ctxt {
diag: tcx.sess.diagnostic(), diag: tcx.sess.diagnostic(),
ds: def_to_str, ds: def_to_string,
tcx: tcx, tcx: tcx,
abbrevs: &RefCell::new(HashMap::new()) abbrevs: &RefCell::new(HashMap::new())
}, t); }, t);

View file

@ -28,7 +28,7 @@ use middle::subst;
use middle::subst::VecPerParamSpace; use middle::subst::VecPerParamSpace;
use middle::typeck::{MethodCall, MethodCallee, MethodOrigin}; use middle::typeck::{MethodCall, MethodCallee, MethodOrigin};
use middle::{ty, typeck}; use middle::{ty, typeck};
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
use syntax::{ast, ast_map, ast_util, codemap, fold}; use syntax::{ast, ast_map, ast_util, codemap, fold};
use syntax::codemap::Span; use syntax::codemap::Span;
@ -86,7 +86,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
e::IIMethodRef(_, _, m) => m.id, e::IIMethodRef(_, _, m) => m.id,
}; };
debug!("> Encoding inlined item: {} ({})", debug!("> Encoding inlined item: {} ({})",
ecx.tcx.map.path_to_str(id), ecx.tcx.map.path_to_string(id),
ebml_w.writer.tell()); ebml_w.writer.tell());
let ii = simplify_ast(ii); let ii = simplify_ast(ii);
@ -99,7 +99,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
ebml_w.end_tag(); ebml_w.end_tag();
debug!("< Encoded inlined fn: {} ({})", debug!("< Encoded inlined fn: {} ({})",
ecx.tcx.map.path_to_str(id), ecx.tcx.map.path_to_string(id),
ebml_w.writer.tell()); ebml_w.writer.tell());
} }
@ -119,7 +119,7 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata,
debug!("> Decoding inlined fn: {}::?", debug!("> Decoding inlined fn: {}::?",
{ {
// Do an Option dance to use the path after it is moved below. // Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_str(ast_map::Values(path.iter())); let s = ast_map::path_to_string(ast_map::Values(path.iter()));
path_as_str = Some(s); path_as_str = Some(s);
path_as_str.as_ref().map(|x| x.as_slice()) path_as_str.as_ref().map(|x| x.as_slice())
}); });
@ -147,7 +147,7 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata,
match ii { match ii {
ast::IIItem(i) => { ast::IIItem(i) => {
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<", debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_str(&*i)); syntax::print::pprust::item_to_string(&*i));
} }
_ => { } _ => { }
} }
@ -826,7 +826,7 @@ impl<'a> get_ty_str_ctxt for e::EncodeContext<'a> {
fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a> { fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a> {
tyencode::ctxt { tyencode::ctxt {
diag: self.tcx.sess.diagnostic(), diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str, ds: e::def_to_string,
tcx: self.tcx, tcx: self.tcx,
abbrevs: &self.type_abbrevs abbrevs: &self.type_abbrevs
} }
@ -1391,7 +1391,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
c::tag_table_node_type => { c::tag_table_node_type => {
let ty = val_dsr.read_ty(xcx); let ty = val_dsr.read_ty(xcx);
debug!("inserting ty for node {:?}: {}", debug!("inserting ty for node {:?}: {}",
id, ty_to_str(dcx.tcx, ty)); id, ty_to_string(dcx.tcx, ty));
dcx.tcx.node_types.borrow_mut().insert(id as uint, ty); dcx.tcx.node_types.borrow_mut().insert(id as uint, ty);
} }
c::tag_table_item_subst => { c::tag_table_item_subst => {
@ -1561,7 +1561,7 @@ fn test_simplification() {
).unwrap()); ).unwrap());
match (item_out, item_exp) { match (item_out, item_exp) {
(ast::IIItem(item_out), ast::IIItem(item_exp)) => { (ast::IIItem(item_out), ast::IIItem(item_exp)) => {
assert!(pprust::item_to_str(item_out) == pprust::item_to_str(item_exp)); assert!(pprust::item_to_string(item_out) == pprust::item_to_string(item_exp));
} }
_ => fail!() _ => fail!()
} }

View file

@ -351,7 +351,7 @@ impl<'a> CheckLoanCtxt<'a> {
"it".to_string() "it".to_string()
} else { } else {
format!("`{}`", format!("`{}`",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_string(&*old_loan.loan_path))
}; };
match (new_loan.kind, old_loan.kind) { match (new_loan.kind, old_loan.kind) {
@ -360,7 +360,7 @@ impl<'a> CheckLoanCtxt<'a> {
new_loan.span, new_loan.span,
format!("cannot borrow `{}` as mutable \ format!("cannot borrow `{}` as mutable \
more than once at a time", more than once at a time",
self.bccx.loan_path_to_str( self.bccx.loan_path_to_string(
&*new_loan.loan_path)).as_slice()); &*new_loan.loan_path)).as_slice());
} }
@ -369,7 +369,7 @@ impl<'a> CheckLoanCtxt<'a> {
new_loan.span, new_loan.span,
format!("closure requires unique access to `{}` \ format!("closure requires unique access to `{}` \
but {} is already borrowed", but {} is already borrowed",
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_string(&*new_loan.loan_path),
old_pronoun).as_slice()); old_pronoun).as_slice());
} }
@ -378,7 +378,7 @@ impl<'a> CheckLoanCtxt<'a> {
new_loan.span, new_loan.span,
format!("cannot borrow `{}` as {} because \ format!("cannot borrow `{}` as {} because \
previous closure requires unique access", previous closure requires unique access",
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_string(&*new_loan.loan_path),
new_loan.kind.to_user_str()).as_slice()); new_loan.kind.to_user_str()).as_slice());
} }
@ -387,7 +387,7 @@ impl<'a> CheckLoanCtxt<'a> {
new_loan.span, new_loan.span,
format!("cannot borrow `{}` as {} because \ format!("cannot borrow `{}` as {} because \
{} is also borrowed as {}", {} is also borrowed as {}",
self.bccx.loan_path_to_str(&*new_loan.loan_path), self.bccx.loan_path_to_string(&*new_loan.loan_path),
new_loan.kind.to_user_str(), new_loan.kind.to_user_str(),
old_pronoun, old_pronoun,
old_loan.kind.to_user_str()).as_slice()); old_loan.kind.to_user_str()).as_slice());
@ -399,7 +399,7 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_note( self.bccx.span_note(
span, span,
format!("borrow occurs due to use of `{}` in closure", format!("borrow occurs due to use of `{}` in closure",
self.bccx.loan_path_to_str( self.bccx.loan_path_to_string(
&*new_loan.loan_path)).as_slice()); &*new_loan.loan_path)).as_slice());
} }
_ => { } _ => { }
@ -410,7 +410,7 @@ impl<'a> CheckLoanCtxt<'a> {
format!("the mutable borrow prevents subsequent \ format!("the mutable borrow prevents subsequent \
moves, borrows, or modification of `{0}` \ moves, borrows, or modification of `{0}` \
until the borrow ends", until the borrow ends",
self.bccx.loan_path_to_str( self.bccx.loan_path_to_string(
&*old_loan.loan_path)) &*old_loan.loan_path))
} }
@ -418,14 +418,14 @@ impl<'a> CheckLoanCtxt<'a> {
format!("the immutable borrow prevents subsequent \ format!("the immutable borrow prevents subsequent \
moves or mutable borrows of `{0}` \ moves or mutable borrows of `{0}` \
until the borrow ends", until the borrow ends",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_string(&*old_loan.loan_path))
} }
ty::UniqueImmBorrow => { ty::UniqueImmBorrow => {
format!("the unique capture prevents subsequent \ format!("the unique capture prevents subsequent \
moves or borrows of `{0}` \ moves or borrows of `{0}` \
until the borrow ends", until the borrow ends",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_string(&*old_loan.loan_path))
} }
}; };
@ -433,7 +433,7 @@ impl<'a> CheckLoanCtxt<'a> {
euv::ClosureCapture(_) => { euv::ClosureCapture(_) => {
format!("previous borrow of `{}` occurs here due to \ format!("previous borrow of `{}` occurs here due to \
use in closure", use in closure",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_string(&*old_loan.loan_path))
} }
euv::OverloadedOperator(..) | euv::OverloadedOperator(..) |
@ -442,7 +442,7 @@ impl<'a> CheckLoanCtxt<'a> {
euv::ClosureInvocation(..) | euv::ClosureInvocation(..) |
euv::RefBinding(..) => { euv::RefBinding(..) => {
format!("previous borrow of `{}` occurs here", format!("previous borrow of `{}` occurs here",
self.bccx.loan_path_to_str(&*old_loan.loan_path)) self.bccx.loan_path_to_string(&*old_loan.loan_path))
} }
}; };
@ -518,12 +518,12 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_err( self.bccx.span_err(
span, span,
format!("cannot use `{}` because it was mutably borrowed", format!("cannot use `{}` because it was mutably borrowed",
self.bccx.loan_path_to_str(copy_path).as_slice()) self.bccx.loan_path_to_string(copy_path).as_slice())
.as_slice()); .as_slice());
self.bccx.span_note( self.bccx.span_note(
loan_span, loan_span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(&*loan_path).as_slice()) self.bccx.loan_path_to_string(&*loan_path).as_slice())
.as_slice()); .as_slice());
} }
} }
@ -543,19 +543,19 @@ impl<'a> CheckLoanCtxt<'a> {
let err_message = match move_kind { let err_message = match move_kind {
move_data::Captured => move_data::Captured =>
format!("cannot move `{}` into closure because it is borrowed", format!("cannot move `{}` into closure because it is borrowed",
self.bccx.loan_path_to_str(move_path).as_slice()), self.bccx.loan_path_to_string(move_path).as_slice()),
move_data::Declared | move_data::Declared |
move_data::MoveExpr | move_data::MoveExpr |
move_data::MovePat => move_data::MovePat =>
format!("cannot move out of `{}` because it is borrowed", format!("cannot move out of `{}` because it is borrowed",
self.bccx.loan_path_to_str(move_path).as_slice()) self.bccx.loan_path_to_string(move_path).as_slice())
}; };
self.bccx.span_err(span, err_message.as_slice()); self.bccx.span_err(span, err_message.as_slice());
self.bccx.span_note( self.bccx.span_note(
loan_span, loan_span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(&*loan_path).as_slice()) self.bccx.loan_path_to_string(&*loan_path).as_slice())
.as_slice()); .as_slice());
} }
} }
@ -567,7 +567,7 @@ impl<'a> CheckLoanCtxt<'a> {
borrow_kind: ty::BorrowKind) borrow_kind: ty::BorrowKind)
-> UseError { -> UseError {
debug!("analyze_restrictions_on_use(expr_id={:?}, use_path={})", debug!("analyze_restrictions_on_use(expr_id={:?}, use_path={})",
self.tcx().map.node_to_str(expr_id), self.tcx().map.node_to_string(expr_id),
use_path.repr(self.tcx())); use_path.repr(self.tcx()));
let mut ret = UseOk; let mut ret = UseOk;
@ -690,15 +690,15 @@ impl<'a> CheckLoanCtxt<'a> {
assignment_span, assignment_span,
format!("cannot assign to {} {} `{}`", format!("cannot assign to {} {} `{}`",
assignee_cmt.mutbl.to_user_str(), assignee_cmt.mutbl.to_user_str(),
self.bccx.cmt_to_str(&*assignee_cmt), self.bccx.cmt_to_string(&*assignee_cmt),
self.bccx.loan_path_to_str(&*lp)).as_slice()); self.bccx.loan_path_to_string(&*lp)).as_slice());
} }
None => { None => {
self.bccx.span_err( self.bccx.span_err(
assignment_span, assignment_span,
format!("cannot assign to {} {}", format!("cannot assign to {} {}",
assignee_cmt.mutbl.to_user_str(), assignee_cmt.mutbl.to_user_str(),
self.bccx.cmt_to_str(&*assignee_cmt)).as_slice()); self.bccx.cmt_to_string(&*assignee_cmt)).as_slice());
} }
} }
return; return;
@ -824,10 +824,10 @@ impl<'a> CheckLoanCtxt<'a> {
self.bccx.span_err( self.bccx.span_err(
span, span,
format!("cannot assign to `{}` because it is borrowed", format!("cannot assign to `{}` because it is borrowed",
self.bccx.loan_path_to_str(loan_path)).as_slice()); self.bccx.loan_path_to_string(loan_path)).as_slice());
self.bccx.span_note( self.bccx.span_note(
loan.span, loan.span,
format!("borrow of `{}` occurs here", format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(loan_path)).as_slice()); self.bccx.loan_path_to_string(loan_path)).as_slice());
} }
} }

View file

@ -120,7 +120,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) {
bccx.span_err( bccx.span_err(
move_from.span, move_from.span,
format!("cannot move out of {}", format!("cannot move out of {}",
bccx.cmt_to_str(&*move_from)).as_slice()); bccx.cmt_to_string(&*move_from)).as_slice());
} }
mc::cat_downcast(ref b) | mc::cat_downcast(ref b) |
@ -145,7 +145,7 @@ fn note_move_destination(bccx: &BorrowckCtxt,
move_to_span: codemap::Span, move_to_span: codemap::Span,
pat_ident: &ast::Ident, pat_ident: &ast::Ident,
is_first_note: bool) { is_first_note: bool) {
let pat_name = pprust::ident_to_str(pat_ident); let pat_name = pprust::ident_to_string(pat_ident);
if is_first_note { if is_first_note {
bccx.span_note( bccx.span_note(
move_to_span, move_to_span,

View file

@ -418,7 +418,7 @@ impl<'a> BorrowckCtxt<'a> {
pub fn report(&self, err: BckError) { pub fn report(&self, err: BckError) {
self.span_err( self.span_err(
err.span, err.span,
self.bckerr_to_str(&err).as_slice()); self.bckerr_to_string(&err).as_slice());
self.note_and_explain_bckerr(err); self.note_and_explain_bckerr(err);
} }
@ -439,7 +439,7 @@ impl<'a> BorrowckCtxt<'a> {
use_span, use_span,
format!("{} of possibly uninitialized variable: `{}`", format!("{} of possibly uninitialized variable: `{}`",
verb, verb,
self.loan_path_to_str(lp)).as_slice()); self.loan_path_to_string(lp)).as_slice());
} }
_ => { _ => {
let partially = if lp == moved_lp {""} else {"partially "}; let partially = if lp == moved_lp {""} else {"partially "};
@ -448,7 +448,7 @@ impl<'a> BorrowckCtxt<'a> {
format!("{} of {}moved value: `{}`", format!("{} of {}moved value: `{}`",
verb, verb,
partially, partially,
self.loan_path_to_str(lp)).as_slice()); self.loan_path_to_string(lp)).as_slice());
} }
} }
@ -472,7 +472,7 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_note( self.tcx.sess.span_note(
expr_span, expr_span,
format!("`{}` moved here because it has type `{}`, which is {}", format!("`{}` moved here because it has type `{}`, which is {}",
self.loan_path_to_str(moved_lp), self.loan_path_to_string(moved_lp),
expr_ty.user_string(self.tcx), expr_ty.user_string(self.tcx),
suggestion).as_slice()); suggestion).as_slice());
} }
@ -483,7 +483,7 @@ impl<'a> BorrowckCtxt<'a> {
format!("`{}` moved here because it has type `{}`, \ format!("`{}` moved here because it has type `{}`, \
which is moved by default (use `ref` to \ which is moved by default (use `ref` to \
override)", override)",
self.loan_path_to_str(moved_lp), self.loan_path_to_string(moved_lp),
pat_ty.user_string(self.tcx)).as_slice()); pat_ty.user_string(self.tcx)).as_slice());
} }
@ -506,7 +506,7 @@ impl<'a> BorrowckCtxt<'a> {
expr_span, expr_span,
format!("`{}` moved into closure environment here because it \ format!("`{}` moved into closure environment here because it \
has type `{}`, which is {}", has type `{}`, which is {}",
self.loan_path_to_str(moved_lp), self.loan_path_to_string(moved_lp),
expr_ty.user_string(self.tcx), expr_ty.user_string(self.tcx),
suggestion).as_slice()); suggestion).as_slice());
} }
@ -536,7 +536,7 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
format!("re-assignment of immutable variable `{}`", format!("re-assignment of immutable variable `{}`",
self.loan_path_to_str(lp)).as_slice()); self.loan_path_to_string(lp)).as_slice());
self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); self.tcx.sess.span_note(assign.span, "prior assignment occurs here");
} }
@ -552,20 +552,20 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_end_note(s, m); self.tcx.sess.span_end_note(s, m);
} }
pub fn bckerr_to_str(&self, err: &BckError) -> String { pub fn bckerr_to_string(&self, err: &BckError) -> String {
match err.code { match err.code {
err_mutbl => { err_mutbl => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
None => { None => {
format!("{} {}", format!("{} {}",
err.cmt.mutbl.to_user_str(), err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt)) self.cmt_to_string(&*err.cmt))
} }
Some(lp) => { Some(lp) => {
format!("{} {} `{}`", format!("{} {} `{}`",
err.cmt.mutbl.to_user_str(), err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt), self.cmt_to_string(&*err.cmt),
self.loan_path_to_str(&*lp)) self.loan_path_to_string(&*lp))
} }
}; };
@ -589,7 +589,7 @@ impl<'a> BorrowckCtxt<'a> {
let msg = match opt_loan_path(&err.cmt) { let msg = match opt_loan_path(&err.cmt) {
None => "borrowed value".to_string(), None => "borrowed value".to_string(),
Some(lp) => { Some(lp) => {
format!("`{}`", self.loan_path_to_str(&*lp)) format!("`{}`", self.loan_path_to_string(&*lp))
} }
}; };
format!("{} does not live long enough", msg) format!("{} does not live long enough", msg)
@ -597,9 +597,9 @@ impl<'a> BorrowckCtxt<'a> {
err_borrowed_pointer_too_short(..) => { err_borrowed_pointer_too_short(..) => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
Some(lp) => { Some(lp) => {
format!("`{}`", self.loan_path_to_str(&*lp)) format!("`{}`", self.loan_path_to_string(&*lp))
} }
None => self.cmt_to_str(&*err.cmt), None => self.cmt_to_string(&*err.cmt),
}; };
format!("lifetime of {} is too short to guarantee \ format!("lifetime of {} is too short to guarantee \
@ -691,9 +691,9 @@ impl<'a> BorrowckCtxt<'a> {
err_borrowed_pointer_too_short(loan_scope, ptr_scope) => { err_borrowed_pointer_too_short(loan_scope, ptr_scope) => {
let descr = match opt_loan_path(&err.cmt) { let descr = match opt_loan_path(&err.cmt) {
Some(lp) => { Some(lp) => {
format!("`{}`", self.loan_path_to_str(&*lp)) format!("`{}`", self.loan_path_to_string(&*lp))
} }
None => self.cmt_to_str(&*err.cmt), None => self.cmt_to_string(&*err.cmt),
}; };
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
@ -710,7 +710,7 @@ impl<'a> BorrowckCtxt<'a> {
} }
} }
pub fn append_loan_path_to_str(&self, pub fn append_loan_path_to_string(&self,
loan_path: &LoanPath, loan_path: &LoanPath,
out: &mut String) { out: &mut String) {
match *loan_path { match *loan_path {
@ -720,7 +720,7 @@ impl<'a> BorrowckCtxt<'a> {
} }
LpExtend(ref lp_base, _, LpInterior(mc::InteriorField(fname))) => { LpExtend(ref lp_base, _, LpInterior(mc::InteriorField(fname))) => {
self.append_autoderefd_loan_path_to_str(&**lp_base, out); self.append_autoderefd_loan_path_to_string(&**lp_base, out);
match fname { match fname {
mc::NamedField(fname) => { mc::NamedField(fname) => {
out.push_char('.'); out.push_char('.');
@ -728,24 +728,24 @@ impl<'a> BorrowckCtxt<'a> {
} }
mc::PositionalField(idx) => { mc::PositionalField(idx) => {
out.push_char('#'); // invent a notation here out.push_char('#'); // invent a notation here
out.push_str(idx.to_str().as_slice()); out.push_str(idx.to_string().as_slice());
} }
} }
} }
LpExtend(ref lp_base, _, LpInterior(mc::InteriorElement(_))) => { LpExtend(ref lp_base, _, LpInterior(mc::InteriorElement(_))) => {
self.append_autoderefd_loan_path_to_str(&**lp_base, out); self.append_autoderefd_loan_path_to_string(&**lp_base, out);
out.push_str("[..]"); out.push_str("[..]");
} }
LpExtend(ref lp_base, _, LpDeref(_)) => { LpExtend(ref lp_base, _, LpDeref(_)) => {
out.push_char('*'); out.push_char('*');
self.append_loan_path_to_str(&**lp_base, out); self.append_loan_path_to_string(&**lp_base, out);
} }
} }
} }
pub fn append_autoderefd_loan_path_to_str(&self, pub fn append_autoderefd_loan_path_to_string(&self,
loan_path: &LoanPath, loan_path: &LoanPath,
out: &mut String) { out: &mut String) {
match *loan_path { match *loan_path {
@ -753,23 +753,23 @@ impl<'a> BorrowckCtxt<'a> {
// For a path like `(*x).f` or `(*x)[3]`, autoderef // For a path like `(*x).f` or `(*x)[3]`, autoderef
// rules would normally allow users to omit the `*x`. // rules would normally allow users to omit the `*x`.
// So just serialize such paths to `x.f` or x[3]` respectively. // So just serialize such paths to `x.f` or x[3]` respectively.
self.append_autoderefd_loan_path_to_str(&**lp_base, out) self.append_autoderefd_loan_path_to_string(&**lp_base, out)
} }
LpVar(..) | LpUpvar(..) | LpExtend(_, _, LpInterior(..)) => { LpVar(..) | LpUpvar(..) | LpExtend(_, _, LpInterior(..)) => {
self.append_loan_path_to_str(loan_path, out) self.append_loan_path_to_string(loan_path, out)
} }
} }
} }
pub fn loan_path_to_str(&self, loan_path: &LoanPath) -> String { pub fn loan_path_to_string(&self, loan_path: &LoanPath) -> String {
let mut result = String::new(); let mut result = String::new();
self.append_loan_path_to_str(loan_path, &mut result); self.append_loan_path_to_string(loan_path, &mut result);
result result
} }
pub fn cmt_to_str(&self, cmt: &mc::cmt_) -> String { pub fn cmt_to_string(&self, cmt: &mc::cmt_) -> String {
self.mc().cmt_to_str(cmt) self.mc().cmt_to_string(cmt)
} }
} }
@ -815,11 +815,11 @@ impl Repr for LoanPath {
fn repr(&self, tcx: &ty::ctxt) -> String { fn repr(&self, tcx: &ty::ctxt) -> String {
match self { match self {
&LpVar(id) => { &LpVar(id) => {
format!("$({})", tcx.map.node_to_str(id)) format!("$({})", tcx.map.node_to_string(id))
} }
&LpUpvar(ty::UpvarId{ var_id, closure_expr_id }) => { &LpUpvar(ty::UpvarId{ var_id, closure_expr_id }) => {
let s = tcx.map.node_to_str(var_id); let s = tcx.map.node_to_string(var_id);
format!("$({} captured by id={})", s, closure_expr_id) format!("$({} captured by id={})", s, closure_expr_id)
} }

View file

@ -64,7 +64,7 @@ impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> {
} else if n.data.id == ast::DUMMY_NODE_ID { } else if n.data.id == ast::DUMMY_NODE_ID {
dot::LabelStr("(dummy_node)".into_maybe_owned()) dot::LabelStr("(dummy_node)".into_maybe_owned())
} else { } else {
let s = self.ast_map.node_to_str(n.data.id); let s = self.ast_map.node_to_string(n.data.id);
// left-aligns the lines // left-aligns the lines
let s = replace_newline_with_backslash_l(s); let s = replace_newline_with_backslash_l(s);
dot::EscStr(s.into_maybe_owned()) dot::EscStr(s.into_maybe_owned())
@ -80,7 +80,7 @@ impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> {
} else { } else {
put_one = true; put_one = true;
} }
let s = self.ast_map.node_to_str(node_id); let s = self.ast_map.node_to_string(node_id);
// left-aligns the lines // left-aligns the lines
let s = replace_newline_with_backslash_l(s); let s = replace_newline_with_backslash_l(s);
label = label.append(format!("exiting scope_{} {}", label = label.append(format!("exiting scope_{} {}",

View file

@ -108,7 +108,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr, is_const: bool) {
.span_err(e.span, .span_err(e.span,
format!("can not cast to `{}` in a constant \ format!("can not cast to `{}` in a constant \
expression", expression",
ppaux::ty_to_str(v.tcx, ety)).as_slice()) ppaux::ty_to_string(v.tcx, ety)).as_slice())
} }
} }
ExprPath(ref pth) => { ExprPath(ref pth) => {

View file

@ -22,10 +22,10 @@ use syntax::ast::*;
use syntax::ast_util::{is_unguarded, walk_pat}; use syntax::ast_util::{is_unguarded, walk_pat};
use syntax::codemap::{Span, Spanned, DUMMY_SP}; use syntax::codemap::{Span, Spanned, DUMMY_SP};
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::print::pprust::pat_to_str; use syntax::print::pprust::pat_to_string;
use syntax::visit; use syntax::visit;
use syntax::visit::{Visitor, FnKind}; use syntax::visit::{Visitor, FnKind};
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
struct Matrix(Vec<Vec<Gc<Pat>>>); struct Matrix(Vec<Vec<Gc<Pat>>>);
@ -47,7 +47,7 @@ impl fmt::Show for Matrix {
let &Matrix(ref m) = self; let &Matrix(ref m) = self;
let pretty_printed_matrix: Vec<Vec<String>> = m.iter().map(|row| { let pretty_printed_matrix: Vec<Vec<String>> = m.iter().map(|row| {
row.iter().map(|&pat| pat_to_str(pat)).collect::<Vec<String>>() row.iter().map(|&pat| pat_to_string(pat)).collect::<Vec<String>>()
}).collect(); }).collect();
let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0u); let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0u);
@ -147,7 +147,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) {
// We know the type is inhabited, so this must be wrong // We know the type is inhabited, so this must be wrong
cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \ cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \
type {} is non-empty", type {} is non-empty",
ty_to_str(cx.tcx, pat_ty)).as_slice()); ty_to_string(cx.tcx, pat_ty)).as_slice());
} }
// If the type *is* empty, it's vacuously exhaustive // If the type *is* empty, it's vacuously exhaustive
return; return;
@ -222,7 +222,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, m: &Matrix) {
[] => wild(), [] => wild(),
_ => unreachable!() _ => unreachable!()
}; };
let msg = format!("non-exhaustive patterns: `{0}` not covered", pat_to_str(&*witness)); let msg = format!("non-exhaustive patterns: `{0}` not covered",
pat_to_string(&*witness));
cx.tcx.sess.span_err(sp, msg.as_slice()); cx.tcx.sess.span_err(sp, msg.as_slice());
} }
NotUseful => { NotUseful => {
@ -780,7 +781,7 @@ fn check_local(cx: &mut MatchCheckCtxt, loc: &Local) {
Some(pat) => { Some(pat) => {
let msg = format!( let msg = format!(
"refutable pattern in {} binding: `{}` not covered", "refutable pattern in {} binding: `{}` not covered",
name, pat_to_str(&*pat) name, pat_to_string(&*pat)
); );
cx.tcx.sess.span_err(loc.pat.span, msg.as_slice()); cx.tcx.sess.span_err(loc.pat.span, msg.as_slice());
}, },
@ -802,7 +803,7 @@ fn check_fn(cx: &mut MatchCheckCtxt,
Some(pat) => { Some(pat) => {
let msg = format!( let msg = format!(
"refutable pattern in function argument: `{}` not covered", "refutable pattern in function argument: `{}` not covered",
pat_to_str(&*pat) pat_to_string(&*pat)
); );
cx.tcx.sess.span_err(input.pat.span, msg.as_slice()); cx.tcx.sess.span_err(input.pat.span, msg.as_slice());
}, },

View file

@ -75,7 +75,7 @@ impl<'a> CheckStaticVisitor<'a> {
impl<'a> Visitor<bool> for CheckStaticVisitor<'a> { impl<'a> Visitor<bool> for CheckStaticVisitor<'a> {
fn visit_item(&mut self, i: &ast::Item, _is_const: bool) { fn visit_item(&mut self, i: &ast::Item, _is_const: bool) {
debug!("visit_item(item={})", pprust::item_to_str(i)); debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node { match i.node {
ast::ItemStatic(_, mutability, ref expr) => { ast::ItemStatic(_, mutability, ref expr) => {
match mutability { match mutability {
@ -99,7 +99,7 @@ impl<'a> Visitor<bool> for CheckStaticVisitor<'a> {
/// of a static item, this method does nothing but walking /// of a static item, this method does nothing but walking
/// down through it. /// down through it.
fn visit_expr(&mut self, e: &ast::Expr, is_const: bool) { fn visit_expr(&mut self, e: &ast::Expr, is_const: bool) {
debug!("visit_expr(expr={})", pprust::expr_to_str(e)); debug!("visit_expr(expr={})", pprust::expr_to_string(e));
if !is_const { if !is_const {
return visit::walk_expr(self, e, is_const); return visit::walk_expr(self, e, is_const);

View file

@ -164,18 +164,18 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index); let cfgidx = to_cfgidx_or_die(id, &self.nodeid_to_index);
let (start, end) = self.compute_id_range_frozen(cfgidx); let (start, end) = self.compute_id_range_frozen(cfgidx);
let on_entry = self.on_entry.slice(start, end); let on_entry = self.on_entry.slice(start, end);
let entry_str = bits_to_str(on_entry); let entry_str = bits_to_string(on_entry);
let gens = self.gens.slice(start, end); let gens = self.gens.slice(start, end);
let gens_str = if gens.iter().any(|&u| u != 0) { let gens_str = if gens.iter().any(|&u| u != 0) {
format!(" gen: {}", bits_to_str(gens)) format!(" gen: {}", bits_to_string(gens))
} else { } else {
"".to_string() "".to_string()
}; };
let kills = self.kills.slice(start, end); let kills = self.kills.slice(start, end);
let kills_str = if kills.iter().any(|&u| u != 0) { let kills_str = if kills.iter().any(|&u| u != 0) {
format!(" kill: {}", bits_to_str(kills)) format!(" kill: {}", bits_to_string(kills))
} else { } else {
"".to_string() "".to_string()
}; };
@ -289,7 +289,7 @@ impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
fn apply_gen_kill(&mut self, cfgidx: CFGIndex, bits: &mut [uint]) { fn apply_gen_kill(&mut self, cfgidx: CFGIndex, bits: &mut [uint]) {
//! Applies the gen and kill sets for `id` to `bits` //! Applies the gen and kill sets for `id` to `bits`
debug!("{:s} apply_gen_kill(cfgidx={}, bits={}) [before]", debug!("{:s} apply_gen_kill(cfgidx={}, bits={}) [before]",
self.analysis_name, cfgidx, mut_bits_to_str(bits)); self.analysis_name, cfgidx, mut_bits_to_string(bits));
let (start, end) = self.compute_id_range(cfgidx); let (start, end) = self.compute_id_range(cfgidx);
let gens = self.gens.slice(start, end); let gens = self.gens.slice(start, end);
bitwise(bits, gens, &Union); bitwise(bits, gens, &Union);
@ -297,7 +297,7 @@ impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
bitwise(bits, kills, &Subtract); bitwise(bits, kills, &Subtract);
debug!("{:s} apply_gen_kill(cfgidx={}, bits={}) [after]", debug!("{:s} apply_gen_kill(cfgidx={}, bits={}) [after]",
self.analysis_name, cfgidx, mut_bits_to_str(bits)); self.analysis_name, cfgidx, mut_bits_to_string(bits));
} }
fn compute_id_range_frozen(&self, cfgidx: CFGIndex) -> (uint, uint) { fn compute_id_range_frozen(&self, cfgidx: CFGIndex) -> (uint, uint) {
@ -334,7 +334,7 @@ impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
let (start, end) = self.compute_id_range_frozen(cfgidx); let (start, end) = self.compute_id_range_frozen(cfgidx);
let on_entry = self.on_entry.slice(start, end); let on_entry = self.on_entry.slice(start, end);
debug!("{:s} each_bit_on_entry_frozen(id={:?}, on_entry={})", debug!("{:s} each_bit_on_entry_frozen(id={:?}, on_entry={})",
self.analysis_name, id, bits_to_str(on_entry)); self.analysis_name, id, bits_to_string(on_entry));
self.each_bit(on_entry, f) self.each_bit(on_entry, f)
} }
@ -348,7 +348,7 @@ impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
let (start, end) = self.compute_id_range_frozen(cfgidx); let (start, end) = self.compute_id_range_frozen(cfgidx);
let gens = self.gens.slice(start, end); let gens = self.gens.slice(start, end);
debug!("{:s} each_gen_bit(id={:?}, gens={})", debug!("{:s} each_gen_bit(id={:?}, gens={})",
self.analysis_name, id, bits_to_str(gens)); self.analysis_name, id, bits_to_string(gens));
self.each_bit(gens, f) self.each_bit(gens, f)
} }
@ -426,10 +426,10 @@ impl<'a, O:DataFlowOperator> DataFlowContext<'a, O> {
if changed { if changed {
let bits = self.kills.mut_slice(start, end); let bits = self.kills.mut_slice(start, end);
debug!("{:s} add_kills_from_flow_exits flow_exit={} bits={} [before]", debug!("{:s} add_kills_from_flow_exits flow_exit={} bits={} [before]",
self.analysis_name, flow_exit, mut_bits_to_str(bits)); self.analysis_name, flow_exit, mut_bits_to_string(bits));
bits.copy_from(orig_kills.as_slice()); bits.copy_from(orig_kills.as_slice());
debug!("{:s} add_kills_from_flow_exits flow_exit={} bits={} [after]", debug!("{:s} add_kills_from_flow_exits flow_exit={} bits={} [after]",
self.analysis_name, flow_exit, mut_bits_to_str(bits)); self.analysis_name, flow_exit, mut_bits_to_string(bits));
} }
true true
}); });
@ -483,10 +483,10 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
cfg: &cfg::CFG, cfg: &cfg::CFG,
in_out: &mut [uint]) { in_out: &mut [uint]) {
debug!("DataFlowContext::walk_cfg(in_out={}) {:s}", debug!("DataFlowContext::walk_cfg(in_out={}) {:s}",
bits_to_str(in_out), self.dfcx.analysis_name); bits_to_string(in_out), self.dfcx.analysis_name);
cfg.graph.each_node(|node_index, node| { cfg.graph.each_node(|node_index, node| {
debug!("DataFlowContext::walk_cfg idx={} id={} begin in_out={}", debug!("DataFlowContext::walk_cfg idx={} id={} begin in_out={}",
node_index, node.data.id, bits_to_str(in_out)); node_index, node.data.id, bits_to_string(in_out));
let (start, end) = self.dfcx.compute_id_range(node_index); let (start, end) = self.dfcx.compute_id_range(node_index);
@ -526,7 +526,7 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
let source = edge.source(); let source = edge.source();
let cfgidx = edge.target(); let cfgidx = edge.target();
debug!("{:s} propagate_bits_into_entry_set_for(pred_bits={}, {} to {})", debug!("{:s} propagate_bits_into_entry_set_for(pred_bits={}, {} to {})",
self.dfcx.analysis_name, bits_to_str(pred_bits), source, cfgidx); self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx);
let (start, end) = self.dfcx.compute_id_range(cfgidx); let (start, end) = self.dfcx.compute_id_range(cfgidx);
let changed = { let changed = {
// (scoping mutable borrow of self.dfcx.on_entry) // (scoping mutable borrow of self.dfcx.on_entry)
@ -536,17 +536,17 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
if changed { if changed {
debug!("{:s} changed entry set for {:?} to {}", debug!("{:s} changed entry set for {:?} to {}",
self.dfcx.analysis_name, cfgidx, self.dfcx.analysis_name, cfgidx,
bits_to_str(self.dfcx.on_entry.slice(start, end))); bits_to_string(self.dfcx.on_entry.slice(start, end)));
self.changed = true; self.changed = true;
} }
} }
} }
fn mut_bits_to_str(words: &mut [uint]) -> String { fn mut_bits_to_string(words: &mut [uint]) -> String {
bits_to_str(words) bits_to_string(words)
} }
fn bits_to_str(words: &[uint]) -> String { fn bits_to_string(words: &[uint]) -> String {
let mut result = String::new(); let mut result = String::new();
let mut sep = '['; let mut sep = '[';
@ -582,7 +582,7 @@ fn bitwise<Op:BitwiseOperator>(out_vec: &mut [uint],
fn set_bit(words: &mut [uint], bit: uint) -> bool { fn set_bit(words: &mut [uint], bit: uint) -> bool {
debug!("set_bit: words={} bit={}", debug!("set_bit: words={} bit={}",
mut_bits_to_str(words), bit_str(bit)); mut_bits_to_string(words), bit_str(bit));
let word = bit / uint::BITS; let word = bit / uint::BITS;
let bit_in_word = bit % uint::BITS; let bit_in_word = bit % uint::BITS;
let bit_mask = 1 << bit_in_word; let bit_mask = 1 << bit_in_word;

View file

@ -68,7 +68,7 @@ impl<'a> EffectCheckVisitor<'a> {
_ => return _ => return
}; };
debug!("effect: checking index with base type {}", debug!("effect: checking index with base type {}",
ppaux::ty_to_str(self.tcx, base_type)); ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty { match ty::get(base_type).sty {
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => match ty::get(ty).sty { ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => match ty::get(ty).sty {
ty::ty_str => { ty::ty_str => {
@ -147,7 +147,7 @@ impl<'a> Visitor<()> for EffectCheckVisitor<'a> {
let method_call = MethodCall::expr(expr.id); let method_call = MethodCall::expr(expr.id);
let base_type = self.tcx.method_map.borrow().get(&method_call).ty; let base_type = self.tcx.method_map.borrow().get(&method_call).ty;
debug!("effect: method call case, base type is {}", debug!("effect: method call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type)); ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) { if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, self.require_unsafe(expr.span,
"invocation of unsafe method") "invocation of unsafe method")
@ -156,7 +156,7 @@ impl<'a> Visitor<()> for EffectCheckVisitor<'a> {
ast::ExprCall(base, _) => { ast::ExprCall(base, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id); let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is {}", debug!("effect: call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type)); ppaux::ty_to_string(self.tcx, base_type));
if type_is_unsafe_function(base_type) { if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, "call to unsafe function") self.require_unsafe(expr.span, "call to unsafe function")
} }
@ -164,7 +164,7 @@ impl<'a> Visitor<()> for EffectCheckVisitor<'a> {
ast::ExprUnary(ast::UnDeref, base) => { ast::ExprUnary(ast::UnDeref, base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id); let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: unary case, base type is {}", debug!("effect: unary case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type)); ppaux::ty_to_string(self.tcx, base_type));
match ty::get(base_type).sty { match ty::get(base_type).sty {
ty::ty_ptr(_) => { ty::ty_ptr(_) => {
self.require_unsafe(expr.span, self.require_unsafe(expr.span,

View file

@ -15,13 +15,13 @@ use middle::subst;
use middle::ty; use middle::ty;
use middle::typeck::{MethodCall, NoAdjustment}; use middle::typeck::{MethodCall, NoAdjustment};
use middle::typeck; use middle::typeck;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_string};
use util::ppaux::UserString; use util::ppaux::UserString;
use syntax::ast::*; use syntax::ast::*;
use syntax::attr; use syntax::attr;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::print::pprust::{expr_to_str, ident_to_str}; use syntax::print::pprust::{expr_to_string, ident_to_string};
use syntax::{visit}; use syntax::{visit};
use syntax::visit::Visitor; use syntax::visit::Visitor;
@ -126,7 +126,7 @@ fn check_impl_of_trait(cx: &mut Context, it: &Item, trait_ref: &TraitRef, self_t
cx.tcx.sess.span_err(self_type.span, cx.tcx.sess.span_err(self_type.span,
format!("the type `{}', which does not fulfill `{}`, cannot implement this \ format!("the type `{}', which does not fulfill `{}`, cannot implement this \
trait", trait",
ty_to_str(cx.tcx, self_ty), ty_to_string(cx.tcx, self_ty),
missing.user_string(cx.tcx)).as_slice()); missing.user_string(cx.tcx)).as_slice());
cx.tcx.sess.span_note(self_type.span, cx.tcx.sess.span_note(self_type.span,
format!("types implementing this trait must fulfill `{}`", format!("types implementing this trait must fulfill `{}`",
@ -246,7 +246,7 @@ fn check_fn(
} }
pub fn check_expr(cx: &mut Context, e: &Expr) { pub fn check_expr(cx: &mut Context, e: &Expr) {
debug!("kind::check_expr({})", expr_to_str(e)); debug!("kind::check_expr({})", expr_to_string(e));
// Handle any kind bounds on type parameters // Handle any kind bounds on type parameters
check_bounds_on_type_parameters(cx, e); check_bounds_on_type_parameters(cx, e);
@ -492,7 +492,7 @@ pub fn check_typaram_bounds(cx: &Context,
sp, sp,
format!("instantiating a type parameter with an incompatible type \ format!("instantiating a type parameter with an incompatible type \
`{}`, which does not fulfill `{}`", `{}`, which does not fulfill `{}`",
ty_to_str(cx.tcx, ty), ty_to_string(cx.tcx, ty),
missing.user_string(cx.tcx)).as_slice()); missing.user_string(cx.tcx)).as_slice());
}); });
} }
@ -509,14 +509,14 @@ pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
format!("cannot implicitly borrow variable of type `{}` in a \ format!("cannot implicitly borrow variable of type `{}` in a \
bounded stack closure (implicit reference does not \ bounded stack closure (implicit reference does not \
fulfill `{}`)", fulfill `{}`)",
ty_to_str(cx.tcx, rty), ty_to_string(cx.tcx, rty),
missing.user_string(cx.tcx)).as_slice()) missing.user_string(cx.tcx)).as_slice())
} }
None => { None => {
cx.tcx.sess.span_err(sp, cx.tcx.sess.span_err(sp,
format!("cannot capture variable of type `{}`, which does \ format!("cannot capture variable of type `{}`, which does \
not fulfill `{}`, in a bounded closure", not fulfill `{}`, in a bounded closure",
ty_to_str(cx.tcx, ty), ty_to_string(cx.tcx, ty),
missing.user_string(cx.tcx)).as_slice()) missing.user_string(cx.tcx)).as_slice())
} }
} }
@ -533,20 +533,20 @@ pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
cx.tcx.sess.span_err(sp, cx.tcx.sess.span_err(sp,
format!("cannot pack type `{}`, which does not fulfill \ format!("cannot pack type `{}`, which does not fulfill \
`{}`, as a trait bounded by {}", `{}`, as a trait bounded by {}",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx), ty_to_string(cx.tcx, ty), missing.user_string(cx.tcx),
bounds.user_string(cx.tcx)).as_slice()); bounds.user_string(cx.tcx)).as_slice());
}); });
} }
fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) { fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) {
debug!("type_contents({})={}", debug!("type_contents({})={}",
ty_to_str(cx.tcx, ty), ty_to_string(cx.tcx, ty),
ty::type_contents(cx.tcx, ty).to_str()); ty::type_contents(cx.tcx, ty).to_string());
if ty::type_moves_by_default(cx.tcx, ty) { if ty::type_moves_by_default(cx.tcx, ty) {
cx.tcx.sess.span_err( cx.tcx.sess.span_err(
sp, sp,
format!("copying a value of non-copyable type `{}`", format!("copying a value of non-copyable type `{}`",
ty_to_str(cx.tcx, ty)).as_slice()); ty_to_string(cx.tcx, ty)).as_slice());
cx.tcx.sess.span_note(sp, format!("{}", reason).as_slice()); cx.tcx.sess.span_note(sp, format!("{}", reason).as_slice());
} }
} }
@ -558,7 +558,7 @@ pub fn check_static(tcx: &ty::ctxt, ty: ty::t, sp: Span) -> bool {
tcx.sess.span_err(sp, tcx.sess.span_err(sp,
format!("value may contain references; \ format!("value may contain references; \
add `'static` bound to `{}`", add `'static` bound to `{}`",
ty_to_str(tcx, ty)).as_slice()); ty_to_string(tcx, ty)).as_slice());
} }
_ => { _ => {
tcx.sess.span_err(sp, "value may contain references"); tcx.sess.span_err(sp, "value may contain references");
@ -643,7 +643,7 @@ pub fn check_cast_for_escaping_regions(
// source_span, // source_span,
// format!("source contains reference with lifetime \ // format!("source contains reference with lifetime \
// not found in the target type `{}`", // not found in the target type `{}`",
// ty_to_str(cx.tcx, target_ty))); // ty_to_string(cx.tcx, target_ty)));
// note_and_explain_region( // note_and_explain_region(
// cx.tcx, "source data is only valid for ", r, ""); // cx.tcx, "source data is only valid for ", r, "");
// } // }
@ -683,7 +683,7 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: String, sp: Span) {
format!("variable `{}` has dynamically sized type \ format!("variable `{}` has dynamically sized type \
`{}`", `{}`",
name, name,
ty_to_str(tcx, ty)).as_slice()); ty_to_string(tcx, ty)).as_slice());
} }
} }
@ -691,7 +691,7 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: String, sp: Span) {
fn check_pat(cx: &mut Context, pat: &Pat) { fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node { let var_name = match pat.node {
PatWild => Some("_".to_string()), PatWild => Some("_".to_string()),
PatIdent(_, ref path1, _) => Some(ident_to_str(&path1.node).to_string()), PatIdent(_, ref path1, _) => Some(ident_to_string(&path1.node).to_string()),
_ => None _ => None
}; };
@ -702,7 +702,7 @@ fn check_pat(cx: &mut Context, pat: &Pat) {
match ty { match ty {
Some(ty) => { Some(ty) => {
debug!("kind: checking sized-ness of variable {}: {}", debug!("kind: checking sized-ness of variable {}: {}",
name, ty_to_str(cx.tcx, *ty)); name, ty_to_string(cx.tcx, *ty));
check_sized(cx.tcx, *ty, name, pat.span); check_sized(cx.tcx, *ty, name, pat.span);
} }
None => {} // extern fn args None => {} // extern fn args

View file

@ -121,7 +121,7 @@ use syntax::ast::*;
use syntax::codemap::{BytePos, original_sp, Span}; use syntax::codemap::{BytePos, original_sp, Span};
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
use syntax::parse::token; use syntax::parse::token;
use syntax::print::pprust::{expr_to_str, block_to_str}; use syntax::print::pprust::{expr_to_string, block_to_string};
use syntax::{visit, ast_util}; use syntax::{visit, ast_util};
use syntax::visit::{Visitor, FnKind}; use syntax::visit::{Visitor, FnKind};
@ -152,17 +152,17 @@ enum LiveNodeKind {
ExitNode ExitNode
} }
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> String { fn live_node_kind_to_string(lnk: LiveNodeKind, cx: &ty::ctxt) -> String {
let cm = cx.sess.codemap(); let cm = cx.sess.codemap();
match lnk { match lnk {
FreeVarNode(s) => { FreeVarNode(s) => {
format!("Free var node [{}]", cm.span_to_str(s)) format!("Free var node [{}]", cm.span_to_string(s))
} }
ExprNode(s) => { ExprNode(s) => {
format!("Expr node [{}]", cm.span_to_str(s)) format!("Expr node [{}]", cm.span_to_string(s))
} }
VarDefNode(s) => { VarDefNode(s) => {
format!("Var def node [{}]", cm.span_to_str(s)) format!("Var def node [{}]", cm.span_to_string(s))
} }
ExitNode => "Exit node".to_string(), ExitNode => "Exit node".to_string(),
} }
@ -272,8 +272,8 @@ impl<'a> IrMaps<'a> {
self.lnks.push(lnk); self.lnks.push(lnk);
self.num_live_nodes += 1; self.num_live_nodes += 1;
debug!("{} is of kind {}", ln.to_str(), debug!("{} is of kind {}", ln.to_string(),
live_node_kind_to_str(lnk, self.tcx)); live_node_kind_to_string(lnk, self.tcx));
ln ln
} }
@ -282,7 +282,7 @@ impl<'a> IrMaps<'a> {
let ln = self.add_live_node(lnk); let ln = self.add_live_node(lnk);
self.live_node_map.insert(node_id, ln); self.live_node_map.insert(node_id, ln);
debug!("{} is node {}", ln.to_str(), node_id); debug!("{} is node {}", ln.to_string(), node_id);
} }
fn add_variable(&mut self, vk: VarKind) -> Variable { fn add_variable(&mut self, vk: VarKind) -> Variable {
@ -297,7 +297,7 @@ impl<'a> IrMaps<'a> {
ImplicitRet => {} ImplicitRet => {}
} }
debug!("{} is {:?}", v.to_str(), vk); debug!("{} is {:?}", v.to_string(), vk);
v v
} }
@ -317,7 +317,7 @@ impl<'a> IrMaps<'a> {
fn variable_name(&self, var: Variable) -> String { fn variable_name(&self, var: Variable) -> String {
match self.var_kinds.get(var.get()) { match self.var_kinds.get(var.get()) {
&Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => { &Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
token::get_ident(nm).get().to_str() token::get_ident(nm).get().to_string()
}, },
&ImplicitRet => "<implicit-ret>".to_string() &ImplicitRet => "<implicit-ret>".to_string()
} }
@ -675,7 +675,7 @@ impl<'a> Liveness<'a> {
for var_idx in range(0u, self.ir.num_vars) { for var_idx in range(0u, self.ir.num_vars) {
let idx = node_base_idx + var_idx; let idx = node_base_idx + var_idx;
if test(idx).is_valid() { if test(idx).is_valid() {
try!(write!(wr, " {}", Variable(var_idx).to_str())); try!(write!(wr, " {}", Variable(var_idx).to_string()));
} }
} }
Ok(()) Ok(())
@ -717,7 +717,7 @@ impl<'a> Liveness<'a> {
self.write_vars(wr, ln, |idx| self.users.get(idx).reader); self.write_vars(wr, ln, |idx| self.users.get(idx).reader);
write!(wr, " writes"); write!(wr, " writes");
self.write_vars(wr, ln, |idx| self.users.get(idx).writer); self.write_vars(wr, ln, |idx| self.users.get(idx).writer);
write!(wr, " precedes {}]", self.successors.get(ln.get()).to_str()); write!(wr, " precedes {}]", self.successors.get(ln.get()).to_string());
} }
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_string() str::from_utf8(wr.unwrap().as_slice()).unwrap().to_string()
} }
@ -766,7 +766,7 @@ impl<'a> Liveness<'a> {
}); });
debug!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})", debug!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed); ln.to_string(), self.ln_str(succ_ln), first_merge, changed);
return changed; return changed;
fn copy_if_invalid(src: LiveNode, dst: &mut LiveNode) -> bool { fn copy_if_invalid(src: LiveNode, dst: &mut LiveNode) -> bool {
@ -787,14 +787,14 @@ impl<'a> Liveness<'a> {
self.users.get_mut(idx).reader = invalid_node(); self.users.get_mut(idx).reader = invalid_node();
self.users.get_mut(idx).writer = invalid_node(); self.users.get_mut(idx).writer = invalid_node();
debug!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(), debug!("{} defines {} (idx={}): {}", writer.to_string(), var.to_string(),
idx, self.ln_str(writer)); idx, self.ln_str(writer));
} }
// Either read, write, or both depending on the acc bitset // Either read, write, or both depending on the acc bitset
fn acc(&mut self, ln: LiveNode, var: Variable, acc: uint) { fn acc(&mut self, ln: LiveNode, var: Variable, acc: uint) {
debug!("{} accesses[{:x}] {}: {}", debug!("{} accesses[{:x}] {}: {}",
ln.to_str(), acc, var.to_str(), self.ln_str(ln)); ln.to_string(), acc, var.to_string(), self.ln_str(ln));
let idx = self.idx(ln, var); let idx = self.idx(ln, var);
let user = self.users.get_mut(idx); let user = self.users.get_mut(idx);
@ -822,7 +822,7 @@ impl<'a> Liveness<'a> {
// effectively a return---this only occurs in `for` loops, // effectively a return---this only occurs in `for` loops,
// where the body is really a closure. // where the body is really a closure.
debug!("compute: using id for block, {}", block_to_str(body)); debug!("compute: using id for block, {}", block_to_string(body));
let exit_ln = self.s.exit_ln; let exit_ln = self.s.exit_ln;
let entry_ln: LiveNode = let entry_ln: LiveNode =
@ -837,7 +837,7 @@ impl<'a> Liveness<'a> {
} }
body.id body.id
}, },
entry_ln.to_str()); entry_ln.to_string());
entry_ln entry_ln
} }
@ -928,7 +928,7 @@ impl<'a> Liveness<'a> {
fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode)
-> LiveNode { -> LiveNode {
debug!("propagate_through_expr: {}", expr_to_str(expr)); debug!("propagate_through_expr: {}", expr_to_string(expr));
match expr.node { match expr.node {
// Interesting cases with control flow or which gen/kill // Interesting cases with control flow or which gen/kill
@ -942,7 +942,7 @@ impl<'a> Liveness<'a> {
} }
ExprFnBlock(_, ref blk) | ExprProc(_, ref blk) => { ExprFnBlock(_, ref blk) | ExprProc(_, ref blk) => {
debug!("{} is an ExprFnBlock or ExprProc", expr_to_str(expr)); debug!("{} is an ExprFnBlock or ExprProc", expr_to_string(expr));
/* /*
The next-node for a break is the successor of the entire The next-node for a break is the successor of the entire
@ -1314,7 +1314,7 @@ impl<'a> Liveness<'a> {
first_merge = false; first_merge = false;
} }
debug!("propagate_through_loop: using id for loop body {} {}", debug!("propagate_through_loop: using id for loop body {} {}",
expr.id, block_to_str(body)); expr.id, block_to_string(body));
let cond_ln = self.propagate_through_opt_expr(cond, ln); let cond_ln = self.propagate_through_opt_expr(cond, ln);
let body_ln = self.with_loop_nodes(expr.id, succ, ln, |this| { let body_ln = self.with_loop_nodes(expr.id, succ, ln, |this| {

View file

@ -66,7 +66,7 @@ use middle::def;
use middle::ty; use middle::ty;
use middle::typeck; use middle::typeck;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use util::ppaux::{ty_to_str, Repr}; use util::ppaux::{ty_to_string, Repr};
use syntax::ast::{MutImmutable, MutMutable}; use syntax::ast::{MutImmutable, MutMutable};
use syntax::ast; use syntax::ast;
@ -217,7 +217,7 @@ pub fn deref_kind(tcx: &ty::ctxt, t: ty::t) -> deref_kind {
None => { None => {
tcx.sess.bug( tcx.sess.bug(
format!("deref_cat() invoked on non-derefable type {}", format!("deref_cat() invoked on non-derefable type {}",
ty_to_str(tcx, t)).as_slice()); ty_to_string(tcx, t)).as_slice());
} }
} }
} }
@ -980,7 +980,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
// get the type of the *subpattern* and use that. // get the type of the *subpattern* and use that.
debug!("cat_pattern: id={} pat={} cmt={}", debug!("cat_pattern: id={} pat={} cmt={}",
pat.id, pprust::pat_to_str(pat), pat.id, pprust::pat_to_string(pat),
cmt.repr(self.tcx())); cmt.repr(self.tcx()));
op(self, cmt.clone(), pat); op(self, cmt.clone(), pat);
@ -1105,7 +1105,7 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
Ok(()) Ok(())
} }
pub fn cmt_to_str(&self, cmt: &cmt_) -> String { pub fn cmt_to_string(&self, cmt: &cmt_) -> String {
match cmt.cat { match cmt.cat {
cat_static_item => { cat_static_item => {
"static item".to_string() "static item".to_string()
@ -1151,10 +1151,10 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
"captured outer variable".to_string() "captured outer variable".to_string()
} }
cat_discr(ref cmt, _) => { cat_discr(ref cmt, _) => {
self.cmt_to_str(&**cmt) self.cmt_to_string(&**cmt)
} }
cat_downcast(ref cmt) => { cat_downcast(ref cmt) => {
self.cmt_to_str(&**cmt) self.cmt_to_string(&**cmt)
} }
} }
} }
@ -1311,7 +1311,7 @@ impl Repr for InteriorKind {
fn repr(&self, _tcx: &ty::ctxt) -> String { fn repr(&self, _tcx: &ty::ctxt) -> String {
match *self { match *self {
InteriorField(NamedField(fld)) => { InteriorField(NamedField(fld)) => {
token::get_name(fld).get().to_str() token::get_name(fld).get().to_string()
} }
InteriorField(PositionalField(i)) => format!("#{:?}", i), InteriorField(PositionalField(i)) => format!("#{:?}", i),
InteriorElement(_) => "[]".to_string(), InteriorElement(_) => "[]".to_string(),

View file

@ -375,7 +375,7 @@ enum FieldName {
impl<'a> PrivacyVisitor<'a> { impl<'a> PrivacyVisitor<'a> {
// used when debugging // used when debugging
fn nodestr(&self, id: ast::NodeId) -> String { fn nodestr(&self, id: ast::NodeId) -> String {
self.tcx.map.node_to_str(id).to_string() self.tcx.map.node_to_string(id).to_string()
} }
// Determines whether the given definition is public from the point of view // Determines whether the given definition is public from the point of view
@ -423,7 +423,7 @@ impl<'a> PrivacyVisitor<'a> {
} }
debug!("privacy - local {} not public all the way down", debug!("privacy - local {} not public all the way down",
self.tcx.map.node_to_str(did.node)); self.tcx.map.node_to_string(did.node));
// return quickly for things in the same module // return quickly for things in the same module
if self.parents.find(&did.node) == self.parents.find(&self.curitem) { if self.parents.find(&did.node) == self.parents.find(&self.curitem) {
debug!("privacy - same parent, we're done here"); debug!("privacy - same parent, we're done here");

View file

@ -336,7 +336,7 @@ impl<'a> ReachableContext<'a> {
.bug(format!("found unexpected thingy in worklist: {}", .bug(format!("found unexpected thingy in worklist: {}",
self.tcx self.tcx
.map .map
.node_to_str(search_item)).as_slice()) .node_to_string(search_item)).as_slice())
} }
} }
} }

View file

@ -821,7 +821,7 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
body.id={}, \ body.id={}, \
cx.parent={})", cx.parent={})",
id, id,
visitor.sess.codemap().span_to_str(sp), visitor.sess.codemap().span_to_string(sp),
body.id, body.id,
cx.parent); cx.parent);

View file

@ -790,7 +790,7 @@ impl PrimitiveTypeTable {
} }
fn namespace_error_to_str(ns: NamespaceError) -> &'static str { fn namespace_error_to_string(ns: NamespaceError) -> &'static str {
match ns { match ns {
NoError => "", NoError => "",
ModuleError | TypeError => "type or module", ModuleError | TypeError => "type or module",
@ -1071,14 +1071,14 @@ impl<'a> Resolver<'a> {
let ns = ns.unwrap(); let ns = ns.unwrap();
self.resolve_error(sp, self.resolve_error(sp,
format!("duplicate definition of {} `{}`", format!("duplicate definition of {} `{}`",
namespace_error_to_str(duplicate_type), namespace_error_to_string(duplicate_type),
token::get_ident(name)).as_slice()); token::get_ident(name)).as_slice());
{ {
let r = child.span_for_namespace(ns); let r = child.span_for_namespace(ns);
for sp in r.iter() { for sp in r.iter() {
self.session.span_note(*sp, self.session.span_note(*sp,
format!("first definition of {} `{}` here", format!("first definition of {} `{}` here",
namespace_error_to_str(duplicate_type), namespace_error_to_string(duplicate_type),
token::get_ident(name)).as_slice()); token::get_ident(name)).as_slice());
} }
} }
@ -1508,7 +1508,7 @@ impl<'a> Resolver<'a> {
false, false,
true)); true));
debug!("(build reduced graph for item) found extern `{}`", debug!("(build reduced graph for item) found extern `{}`",
self.module_to_str(&*external_module)); self.module_to_string(&*external_module));
parent.module().external_module_children.borrow_mut() parent.module().external_module_children.borrow_mut()
.insert(name.name, external_module.clone()); .insert(name.name, external_module.clone());
self.build_reduced_graph_for_external_crate(external_module); self.build_reduced_graph_for_external_crate(external_module);
@ -1862,7 +1862,7 @@ impl<'a> Resolver<'a> {
/// Builds the reduced graph rooted at the given external module. /// Builds the reduced graph rooted at the given external module.
fn populate_external_module(&mut self, module: Rc<Module>) { fn populate_external_module(&mut self, module: Rc<Module>) {
debug!("(populating external module) attempting to populate {}", debug!("(populating external module) attempting to populate {}",
self.module_to_str(&*module)); self.module_to_string(&*module));
let def_id = match module.def_id.get() { let def_id = match module.def_id.get() {
None => { None => {
@ -1930,7 +1930,7 @@ impl<'a> Resolver<'a> {
SingleImport(target, _) => { SingleImport(target, _) => {
debug!("(building import directive) building import \ debug!("(building import directive) building import \
directive: {}::{}", directive: {}::{}",
self.idents_to_str(module_.imports.borrow().last().unwrap() self.idents_to_string(module_.imports.borrow().last().unwrap()
.module_path.as_slice()), .module_path.as_slice()),
token::get_ident(target)); token::get_ident(target));
@ -2003,7 +2003,7 @@ impl<'a> Resolver<'a> {
/// submodules. /// submodules.
fn resolve_imports_for_module_subtree(&mut self, module_: Rc<Module>) { fn resolve_imports_for_module_subtree(&mut self, module_: Rc<Module>) {
debug!("(resolving imports for module subtree) resolving {}", debug!("(resolving imports for module subtree) resolving {}",
self.module_to_str(&*module_)); self.module_to_string(&*module_));
let orig_module = replace(&mut self.current_module, module_.clone()); let orig_module = replace(&mut self.current_module, module_.clone());
self.resolve_imports_for_module(module_.clone()); self.resolve_imports_for_module(module_.clone());
self.current_module = orig_module; self.current_module = orig_module;
@ -2030,7 +2030,7 @@ impl<'a> Resolver<'a> {
if module.all_imports_resolved() { if module.all_imports_resolved() {
debug!("(resolving imports for module) all imports resolved for \ debug!("(resolving imports for module) all imports resolved for \
{}", {}",
self.module_to_str(&*module)); self.module_to_string(&*module));
return; return;
} }
@ -2047,7 +2047,7 @@ impl<'a> Resolver<'a> {
None => (import_directive.span, String::new()) None => (import_directive.span, String::new())
}; };
let msg = format!("unresolved import `{}`{}", let msg = format!("unresolved import `{}`{}",
self.import_path_to_str( self.import_path_to_string(
import_directive.module_path import_directive.module_path
.as_slice(), .as_slice(),
import_directive.subclass), import_directive.subclass),
@ -2063,7 +2063,7 @@ impl<'a> Resolver<'a> {
} }
} }
fn idents_to_str(&self, idents: &[Ident]) -> String { fn idents_to_string(&self, idents: &[Ident]) -> String {
let mut first = true; let mut first = true;
let mut result = String::new(); let mut result = String::new();
for ident in idents.iter() { for ident in idents.iter() {
@ -2077,15 +2077,15 @@ impl<'a> Resolver<'a> {
result result
} }
fn path_idents_to_str(&self, path: &Path) -> String { fn path_idents_to_string(&self, path: &Path) -> String {
let identifiers: Vec<ast::Ident> = path.segments let identifiers: Vec<ast::Ident> = path.segments
.iter() .iter()
.map(|seg| seg.identifier) .map(|seg| seg.identifier)
.collect(); .collect();
self.idents_to_str(identifiers.as_slice()) self.idents_to_string(identifiers.as_slice())
} }
fn import_directive_subclass_to_str(&mut self, fn import_directive_subclass_to_string(&mut self,
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> String { -> String {
match subclass { match subclass {
@ -2096,16 +2096,16 @@ impl<'a> Resolver<'a> {
} }
} }
fn import_path_to_str(&mut self, fn import_path_to_string(&mut self,
idents: &[Ident], idents: &[Ident],
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> String { -> String {
if idents.is_empty() { if idents.is_empty() {
self.import_directive_subclass_to_str(subclass) self.import_directive_subclass_to_string(subclass)
} else { } else {
(format!("{}::{}", (format!("{}::{}",
self.idents_to_str(idents), self.idents_to_string(idents),
self.import_directive_subclass_to_str( self.import_directive_subclass_to_string(
subclass))).to_string() subclass))).to_string()
} }
} }
@ -2124,8 +2124,8 @@ impl<'a> Resolver<'a> {
debug!("(resolving import for module) resolving import `{}::...` in \ debug!("(resolving import for module) resolving import `{}::...` in \
`{}`", `{}`",
self.idents_to_str(module_path.as_slice()), self.idents_to_string(module_path.as_slice()),
self.module_to_str(&*module_)); self.module_to_string(&*module_));
// First, resolve the module path for the directive, if necessary. // First, resolve the module path for the directive, if necessary.
let container = if module_path.len() == 0 { let container = if module_path.len() == 0 {
@ -2231,9 +2231,9 @@ impl<'a> Resolver<'a> {
debug!("(resolving single import) resolving `{}` = `{}::{}` from \ debug!("(resolving single import) resolving `{}` = `{}::{}` from \
`{}` id {}, last private {:?}", `{}` id {}, last private {:?}",
token::get_ident(target), token::get_ident(target),
self.module_to_str(&*containing_module), self.module_to_string(&*containing_module),
token::get_ident(source), token::get_ident(source),
self.module_to_str(module_), self.module_to_string(module_),
directive.id, directive.id,
lp); lp);
@ -2420,7 +2420,7 @@ impl<'a> Resolver<'a> {
if value_result.is_unbound() && type_result.is_unbound() { if value_result.is_unbound() && type_result.is_unbound() {
let msg = format!("There is no `{}` in `{}`", let msg = format!("There is no `{}` in `{}`",
token::get_ident(source), token::get_ident(source),
self.module_to_str(&*containing_module)); self.module_to_string(&*containing_module));
return Failed(Some((directive.span, msg))); return Failed(Some((directive.span, msg)));
} }
let value_used_public = value_used_reexport || value_used_public; let value_used_public = value_used_reexport || value_used_public;
@ -2494,7 +2494,7 @@ impl<'a> Resolver<'a> {
debug!("(resolving glob import) writing module resolution \ debug!("(resolving glob import) writing module resolution \
{:?} into `{}`", {:?} into `{}`",
target_import_resolution.type_target.is_none(), target_import_resolution.type_target.is_none(),
self.module_to_str(module_)); self.module_to_string(module_));
if !target_import_resolution.is_public { if !target_import_resolution.is_public {
debug!("(resolving glob import) nevermind, just kidding"); debug!("(resolving glob import) nevermind, just kidding");
@ -2590,9 +2590,9 @@ impl<'a> Resolver<'a> {
debug!("(resolving glob import) writing resolution `{}` in `{}` \ debug!("(resolving glob import) writing resolution `{}` in `{}` \
to `{}`", to `{}`",
token::get_name(name).get().to_str(), token::get_name(name).get().to_string(),
self.module_to_str(&*containing_module), self.module_to_string(&*containing_module),
self.module_to_str(module_)); self.module_to_string(module_));
// Merge the child item into the import resolution. // Merge the child item into the import resolution.
if name_bindings.defined_in_public_namespace(ValueNS) { if name_bindings.defined_in_public_namespace(ValueNS) {
@ -2652,7 +2652,7 @@ impl<'a> Resolver<'a> {
false) { false) {
Failed(None) => { Failed(None) => {
let segment_name = token::get_ident(name); let segment_name = token::get_ident(name);
let module_name = self.module_to_str(&*search_module); let module_name = self.module_to_string(&*search_module);
let mut span = span; let mut span = span;
let msg = if "???" == module_name.as_slice() { let msg = if "???" == module_name.as_slice() {
span.hi = span.lo + Pos::from_uint(segment_name.get().len()); span.hi = span.lo + Pos::from_uint(segment_name.get().len());
@ -2660,10 +2660,10 @@ impl<'a> Resolver<'a> {
match search_parent_externals(name.name, match search_parent_externals(name.name,
&self.current_module) { &self.current_module) {
Some(module) => { Some(module) => {
let path_str = self.idents_to_str(module_path); let path_str = self.idents_to_string(module_path);
let target_mod_str = self.module_to_str(&*module); let target_mod_str = self.module_to_string(&*module);
let current_mod_str = let current_mod_str =
self.module_to_str(&*self.current_module); self.module_to_string(&*self.current_module);
let prefix = if target_mod_str == current_mod_str { let prefix = if target_mod_str == current_mod_str {
"self::".to_string() "self::".to_string()
@ -2771,8 +2771,8 @@ impl<'a> Resolver<'a> {
debug!("(resolving module path for import) processing `{}` rooted at \ debug!("(resolving module path for import) processing `{}` rooted at \
`{}`", `{}`",
self.idents_to_str(module_path), self.idents_to_string(module_path),
self.module_to_str(&*module_)); self.module_to_string(&*module_));
// Resolve the module prefix, if any. // Resolve the module prefix, if any.
let module_prefix_result = self.resolve_module_prefix(module_.clone(), let module_prefix_result = self.resolve_module_prefix(module_.clone(),
@ -2783,7 +2783,7 @@ impl<'a> Resolver<'a> {
let last_private; let last_private;
match module_prefix_result { match module_prefix_result {
Failed(None) => { Failed(None) => {
let mpath = self.idents_to_str(module_path); let mpath = self.idents_to_string(module_path);
let mpath = mpath.as_slice(); let mpath = mpath.as_slice();
match mpath.rfind(':') { match mpath.rfind(':') {
Some(idx) => { Some(idx) => {
@ -2865,7 +2865,7 @@ impl<'a> Resolver<'a> {
namespace {:?} in `{}`", namespace {:?} in `{}`",
token::get_ident(name), token::get_ident(name),
namespace, namespace,
self.module_to_str(&*module_)); self.module_to_string(&*module_));
// The current module node is handled specially. First, check for // The current module node is handled specially. First, check for
// its immediate children. // its immediate children.
@ -3098,7 +3098,7 @@ impl<'a> Resolver<'a> {
break break
} }
debug!("(resolving module prefix) resolving `super` at {}", debug!("(resolving module prefix) resolving `super` at {}",
self.module_to_str(&*containing_module)); self.module_to_string(&*containing_module));
match self.get_nearest_normal_module_parent(containing_module) { match self.get_nearest_normal_module_parent(containing_module) {
None => return Failed(None), None => return Failed(None),
Some(new_module) => { Some(new_module) => {
@ -3109,7 +3109,7 @@ impl<'a> Resolver<'a> {
} }
debug!("(resolving module prefix) finished resolving prefix at {}", debug!("(resolving module prefix) finished resolving prefix at {}",
self.module_to_str(&*containing_module)); self.module_to_string(&*containing_module));
return Success(PrefixFound(containing_module, i)); return Success(PrefixFound(containing_module, i));
} }
@ -3129,7 +3129,7 @@ impl<'a> Resolver<'a> {
-> ResolveResult<(Target, bool)> { -> ResolveResult<(Target, bool)> {
debug!("(resolving name in module) resolving `{}` in `{}`", debug!("(resolving name in module) resolving `{}` in `{}`",
token::get_name(name).get(), token::get_name(name).get(),
self.module_to_str(&*module_)); self.module_to_string(&*module_));
// First, check the direct children of the module. // First, check the direct children of the module.
self.populate_module_if_necessary(&module_); self.populate_module_if_necessary(&module_);
@ -3262,19 +3262,19 @@ impl<'a> Resolver<'a> {
// OK. Continue. // OK. Continue.
debug!("(recording exports for module subtree) recording \ debug!("(recording exports for module subtree) recording \
exports for local module `{}`", exports for local module `{}`",
self.module_to_str(&*module_)); self.module_to_string(&*module_));
} }
None => { None => {
// Record exports for the root module. // Record exports for the root module.
debug!("(recording exports for module subtree) recording \ debug!("(recording exports for module subtree) recording \
exports for root module `{}`", exports for root module `{}`",
self.module_to_str(&*module_)); self.module_to_string(&*module_));
} }
Some(_) => { Some(_) => {
// Bail out. // Bail out.
debug!("(recording exports for module subtree) not recording \ debug!("(recording exports for module subtree) not recording \
exports for `{}`", exports for `{}`",
self.module_to_str(&*module_)); self.module_to_string(&*module_));
return; return;
} }
} }
@ -3390,7 +3390,7 @@ impl<'a> Resolver<'a> {
None => { None => {
debug!("!!! (with scope) didn't find `{}` in `{}`", debug!("!!! (with scope) didn't find `{}` in `{}`",
token::get_ident(name), token::get_ident(name),
self.module_to_str(&*orig_module)); self.module_to_string(&*orig_module));
} }
Some(name_bindings) => { Some(name_bindings) => {
match (*name_bindings).get_module_if_available() { match (*name_bindings).get_module_if_available() {
@ -3398,7 +3398,7 @@ impl<'a> Resolver<'a> {
debug!("!!! (with scope) didn't find module \ debug!("!!! (with scope) didn't find module \
for `{}` in `{}`", for `{}` in `{}`",
token::get_ident(name), token::get_ident(name),
self.module_to_str(&*orig_module)); self.module_to_string(&*orig_module));
} }
Some(module_) => { Some(module_) => {
self.current_module = module_; self.current_module = module_;
@ -3903,7 +3903,7 @@ impl<'a> Resolver<'a> {
reference_type: TraitReferenceType) { reference_type: TraitReferenceType) {
match self.resolve_path(id, &trait_reference.path, TypeNS, true) { match self.resolve_path(id, &trait_reference.path, TypeNS, true) {
None => { None => {
let path_str = self.path_idents_to_str(&trait_reference.path); let path_str = self.path_idents_to_string(&trait_reference.path);
let usage_str = match reference_type { let usage_str = match reference_type {
TraitBoundingTypeParameter => "bound type parameter with", TraitBoundingTypeParameter => "bound type parameter with",
TraitImplementation => "implement", TraitImplementation => "implement",
@ -3922,7 +3922,7 @@ impl<'a> Resolver<'a> {
(def, _) => { (def, _) => {
self.resolve_error(trait_reference.path.span, self.resolve_error(trait_reference.path.span,
format!("`{}` is not a trait", format!("`{}` is not a trait",
self.path_idents_to_str( self.path_idents_to_string(
&trait_reference.path))); &trait_reference.path)));
// If it's a typedef, give a note // If it's a typedef, give a note
@ -3970,7 +3970,7 @@ impl<'a> Resolver<'a> {
.identifier), .identifier),
def); def);
debug!("(resolving struct) writing resolution for `{}` (id {})", debug!("(resolving struct) writing resolution for `{}` (id {})",
this.path_idents_to_str(path), this.path_idents_to_string(path),
path_id); path_id);
this.record_def(path_id, (def, lp)); this.record_def(path_id, (def, lp));
} }
@ -4082,7 +4082,7 @@ impl<'a> Resolver<'a> {
let method_name = method.ident.name; let method_name = method.ident.name;
if self.method_map.borrow().find(&(method_name, did)).is_none() { if self.method_map.borrow().find(&(method_name, did)).is_none() {
let path_str = self.path_idents_to_str(&trait_ref.path); let path_str = self.path_idents_to_string(&trait_ref.path);
self.resolve_error(method.span, self.resolve_error(method.span,
format!("method `{}` is not a member of trait `{}`", format!("method `{}` is not a member of trait `{}`",
token::get_name(method_name), token::get_name(method_name),
@ -4292,13 +4292,13 @@ impl<'a> Resolver<'a> {
// Write the result into the def map. // Write the result into the def map.
debug!("(resolving type) writing resolution for `{}` \ debug!("(resolving type) writing resolution for `{}` \
(id {})", (id {})",
self.path_idents_to_str(path), self.path_idents_to_string(path),
path_id); path_id);
self.record_def(path_id, def); self.record_def(path_id, def);
} }
None => { None => {
let msg = format!("use of undeclared type name `{}`", let msg = format!("use of undeclared type name `{}`",
self.path_idents_to_str(path)); self.path_idents_to_string(path));
self.resolve_error(ty.span, msg.as_slice()); self.resolve_error(ty.span, msg.as_slice());
} }
} }
@ -4499,7 +4499,7 @@ impl<'a> Resolver<'a> {
debug!("(resolving pattern) didn't find struct \ debug!("(resolving pattern) didn't find struct \
def: {:?}", result); def: {:?}", result);
let msg = format!("`{}` does not name a structure", let msg = format!("`{}` does not name a structure",
self.path_idents_to_str(path)); self.path_idents_to_string(path));
self.resolve_error(path.span, msg.as_slice()); self.resolve_error(path.span, msg.as_slice());
} }
} }
@ -4729,7 +4729,7 @@ impl<'a> Resolver<'a> {
Some((span, msg)) => (span, msg), Some((span, msg)) => (span, msg),
None => { None => {
let msg = format!("Use of undeclared module `{}`", let msg = format!("Use of undeclared module `{}`",
self.idents_to_str( self.idents_to_string(
module_path_idents.as_slice())); module_path_idents.as_slice()));
(path.span, msg) (path.span, msg)
} }
@ -4805,7 +4805,7 @@ impl<'a> Resolver<'a> {
Some((span, msg)) => (span, msg), Some((span, msg)) => (span, msg),
None => { None => {
let msg = format!("Use of undeclared module `::{}`", let msg = format!("Use of undeclared module `::{}`",
self.idents_to_str( self.idents_to_string(
module_path_idents.as_slice())); module_path_idents.as_slice()));
(path.span, msg) (path.span, msg)
} }
@ -5010,7 +5010,7 @@ impl<'a> Resolver<'a> {
match get_module(self, path.span, ident_path.as_slice()) { match get_module(self, path.span, ident_path.as_slice()) {
Some(module) => match module.children.borrow().find(&name) { Some(module) => match module.children.borrow().find(&name) {
Some(binding) => { Some(binding) => {
let p_str = self.path_idents_to_str(&path); let p_str = self.path_idents_to_string(&path);
match binding.def_for_namespace(ValueNS) { match binding.def_for_namespace(ValueNS) {
Some(DefStaticMethod(_, provenance, _)) => { Some(DefStaticMethod(_, provenance, _)) => {
match provenance { match provenance {
@ -5032,7 +5032,7 @@ impl<'a> Resolver<'a> {
let method_map = self.method_map.borrow(); let method_map = self.method_map.borrow();
match self.current_trait_ref { match self.current_trait_ref {
Some((did, ref trait_ref)) => { Some((did, ref trait_ref)) => {
let path_str = self.path_idents_to_str(&trait_ref.path); let path_str = self.path_idents_to_string(&trait_ref.path);
match method_map.find(&(name, did)) { match method_map.find(&(name, did)) {
Some(&SelfStatic) => return StaticTraitMethod(path_str), Some(&SelfStatic) => return StaticTraitMethod(path_str),
@ -5105,7 +5105,7 @@ impl<'a> Resolver<'a> {
Some(def) => { Some(def) => {
// Write the result into the def map. // Write the result into the def map.
debug!("(resolving expr) resolved `{}`", debug!("(resolving expr) resolved `{}`",
self.path_idents_to_str(path)); self.path_idents_to_string(path));
// First-class methods are not supported yet; error // First-class methods are not supported yet; error
// out here. // out here.
@ -5125,7 +5125,7 @@ impl<'a> Resolver<'a> {
self.record_def(expr.id, def); self.record_def(expr.id, def);
} }
None => { None => {
let wrong_name = self.path_idents_to_str(path); let wrong_name = self.path_idents_to_string(path);
// Be helpful if the name refers to a struct // Be helpful if the name refers to a struct
// (The pattern matching def_tys where the id is in self.structs // (The pattern matching def_tys where the id is in self.structs
// matches on regular structs while excluding tuple- and enum-like // matches on regular structs while excluding tuple- and enum-like
@ -5221,7 +5221,7 @@ impl<'a> Resolver<'a> {
debug!("(resolving expression) didn't find struct \ debug!("(resolving expression) didn't find struct \
def: {:?}", result); def: {:?}", result);
let msg = format!("`{}` does not name a structure", let msg = format!("`{}` does not name a structure",
self.path_idents_to_str(path)); self.path_idents_to_string(path));
self.resolve_error(path.span, msg.as_slice()); self.resolve_error(path.span, msg.as_slice());
} }
} }
@ -5521,7 +5521,7 @@ impl<'a> Resolver<'a> {
// //
/// A somewhat inefficient routine to obtain the name of a module. /// A somewhat inefficient routine to obtain the name of a module.
fn module_to_str(&self, module: &Module) -> String { fn module_to_string(&self, module: &Module) -> String {
let mut idents = Vec::new(); let mut idents = Vec::new();
fn collect_mod(idents: &mut Vec<ast::Ident>, module: &Module) { fn collect_mod(idents: &mut Vec<ast::Ident>, module: &Module) {
@ -5542,14 +5542,14 @@ impl<'a> Resolver<'a> {
if idents.len() == 0 { if idents.len() == 0 {
return "???".to_string(); return "???".to_string();
} }
self.idents_to_str(idents.move_iter().rev() self.idents_to_string(idents.move_iter().rev()
.collect::<Vec<ast::Ident>>() .collect::<Vec<ast::Ident>>()
.as_slice()) .as_slice())
} }
#[allow(dead_code)] // useful for debugging #[allow(dead_code)] // useful for debugging
fn dump_module(&mut self, module_: Rc<Module>) { fn dump_module(&mut self, module_: Rc<Module>) {
debug!("Dump of module `{}`:", self.module_to_str(&*module_)); debug!("Dump of module `{}`:", self.module_to_string(&*module_));
debug!("Children:"); debug!("Children:");
self.populate_module_if_necessary(&module_); self.populate_module_if_necessary(&module_);

View file

@ -24,7 +24,7 @@ use syntax::codemap::Span;
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
use syntax::parse::token; use syntax::parse::token;
use syntax::print::pprust::{lifetime_to_str}; use syntax::print::pprust::{lifetime_to_string};
use syntax::visit; use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
@ -372,7 +372,7 @@ impl<'a> LifetimeContext<'a> {
} }
debug!("lifetime_ref={} id={} resolved to {:?}", debug!("lifetime_ref={} id={} resolved to {:?}",
lifetime_to_str(lifetime_ref), lifetime_to_string(lifetime_ref),
lifetime_ref.id, lifetime_ref.id,
def); def);
self.named_region_map.insert(lifetime_ref.id, def); self.named_region_map.insert(lifetime_ref.id, def);

View file

@ -51,7 +51,7 @@ use syntax::parse::token;
use syntax::parse::token::{get_ident,keywords}; use syntax::parse::token::{get_ident,keywords};
use syntax::visit; use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use syntax::print::pprust::{path_to_str,ty_to_str}; use syntax::print::pprust::{path_to_string,ty_to_string};
use middle::save::span_utils::SpanUtils; use middle::save::span_utils::SpanUtils;
use middle::save::recorder::Recorder; use middle::save::recorder::Recorder;
@ -108,7 +108,7 @@ impl <'l> DxrVisitor<'l> {
if spans.len() < path.segments.len() { if spans.len() < path.segments.len() {
error!("Mis-calculated spans for path '{}'. \ error!("Mis-calculated spans for path '{}'. \
Found {} spans, expected {}. Found spans:", Found {} spans, expected {}. Found spans:",
path_to_str(path), spans.len(), path.segments.len()); path_to_string(path), spans.len(), path.segments.len());
for s in spans.iter() { for s in spans.iter() {
let loc = self.sess.codemap().lookup_char_pos(s.lo); let loc = self.sess.codemap().lookup_char_pos(s.lo);
error!(" '{}' in {}, line {}", error!(" '{}' in {}, line {}",
@ -126,7 +126,7 @@ impl <'l> DxrVisitor<'l> {
let sub_path = ast::Path{span: *span, // span for the last segment let sub_path = ast::Path{span: *span, // span for the last segment
global: path.global, global: path.global,
segments: segs}; segments: segs};
let qualname = path_to_str(&sub_path); let qualname = path_to_string(&sub_path);
result.push((*span, qualname)); result.push((*span, qualname));
segs = sub_path.segments; segs = sub_path.segments;
} }
@ -249,7 +249,7 @@ impl <'l> DxrVisitor<'l> {
self.collecting = false; self.collecting = false;
let span_utils = self.span; let span_utils = self.span;
for &(id, ref p, _, _) in self.collected_paths.iter() { for &(id, ref p, _, _) in self.collected_paths.iter() {
let typ = ppaux::ty_to_str(&self.analysis.ty_cx, let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
*self.analysis.ty_cx.node_types.borrow().get(&(id as uint))); *self.analysis.ty_cx.node_types.borrow().get(&(id as uint)));
// get the span only for the name of the variable (I hope the path is only ever a // get the span only for the name of the variable (I hope the path is only ever a
// variable name, but who knows?) // variable name, but who knows?)
@ -257,7 +257,7 @@ impl <'l> DxrVisitor<'l> {
span_utils.span_for_last_ident(p.span), span_utils.span_for_last_ident(p.span),
id, id,
qualname, qualname,
path_to_str(p).as_slice(), path_to_string(p).as_slice(),
typ.as_slice()); typ.as_slice());
} }
self.collected_paths.clear(); self.collected_paths.clear();
@ -280,7 +280,7 @@ impl <'l> DxrVisitor<'l> {
match item.node { match item.node {
ast::ItemImpl(_, _, ty, _) => { ast::ItemImpl(_, _, ty, _) => {
let mut result = String::from_str("<"); let mut result = String::from_str("<");
result.push_str(ty_to_str(&*ty).as_slice()); result.push_str(ty_to_string(&*ty).as_slice());
match ty::trait_of_method(&self.analysis.ty_cx, match ty::trait_of_method(&self.analysis.ty_cx,
ast_util::local_def(method.id)) { ast_util::local_def(method.id)) {
@ -400,7 +400,7 @@ impl <'l> DxrVisitor<'l> {
ast::NamedField(ident, _) => { ast::NamedField(ident, _) => {
let name = get_ident(ident); let name = get_ident(ident);
let qualname = format!("{}::{}", qualname, name); let qualname = format!("{}::{}", qualname, name);
let typ = ppaux::ty_to_str(&self.analysis.ty_cx, let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
*self.analysis.ty_cx.node_types.borrow().get(&(field.node.id as uint))); *self.analysis.ty_cx.node_types.borrow().get(&(field.node.id as uint)));
match self.span.sub_span_before_token(field.span, token::COLON) { match self.span.sub_span_before_token(field.span, token::COLON) {
Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span) => self.fmt.field_str(field.span,
@ -452,7 +452,7 @@ impl <'l> DxrVisitor<'l> {
decl: ast::P<ast::FnDecl>, decl: ast::P<ast::FnDecl>,
ty_params: &ast::Generics, ty_params: &ast::Generics,
body: ast::P<ast::Block>) { body: ast::P<ast::Block>) {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Fn); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Fn);
self.fmt.fn_str(item.span, self.fmt.fn_str(item.span,
@ -482,7 +482,7 @@ impl <'l> DxrVisitor<'l> {
mt: ast::Mutability, mt: ast::Mutability,
expr: &ast::Expr) expr: &ast::Expr)
{ {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
// If the variable is immutable, save the initialising expression. // If the variable is immutable, save the initialising expression.
let value = match mt { let value = match mt {
@ -497,7 +497,7 @@ impl <'l> DxrVisitor<'l> {
get_ident(item.ident).get(), get_ident(item.ident).get(),
qualname.as_slice(), qualname.as_slice(),
value.as_slice(), value.as_slice(),
ty_to_str(&*typ).as_slice(), ty_to_string(&*typ).as_slice(),
e.cur_scope); e.cur_scope);
// walk type and init value // walk type and init value
@ -510,7 +510,7 @@ impl <'l> DxrVisitor<'l> {
e: DxrVisitorEnv, e: DxrVisitorEnv,
def: &ast::StructDef, def: &ast::StructDef,
ty_params: &ast::Generics) { ty_params: &ast::Generics) {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let ctor_id = match def.ctor_id { let ctor_id = match def.ctor_id {
Some(node_id) => node_id, Some(node_id) => node_id,
@ -538,7 +538,7 @@ impl <'l> DxrVisitor<'l> {
e: DxrVisitorEnv, e: DxrVisitorEnv,
enum_definition: &ast::EnumDef, enum_definition: &ast::EnumDef,
ty_params: &ast::Generics) { ty_params: &ast::Generics) {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
match self.span.sub_span_after_keyword(item.span, keywords::Enum) { match self.span.sub_span_after_keyword(item.span, keywords::Enum) {
Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span) => self.fmt.enum_str(item.span,
Some(sub_span), Some(sub_span),
@ -639,7 +639,7 @@ impl <'l> DxrVisitor<'l> {
generics: &ast::Generics, generics: &ast::Generics,
trait_refs: &Vec<ast::TraitRef>, trait_refs: &Vec<ast::TraitRef>,
methods: &Vec<ast::TraitMethod>) { methods: &Vec<ast::TraitMethod>) {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait);
self.fmt.trait_str(item.span, self.fmt.trait_str(item.span,
@ -678,7 +678,7 @@ impl <'l> DxrVisitor<'l> {
item: &ast::Item, // The module in question, represented as an item. item: &ast::Item, // The module in question, represented as an item.
e: DxrVisitorEnv, e: DxrVisitorEnv,
m: &ast::Mod) { m: &ast::Mod) {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let cm = self.sess.codemap(); let cm = self.sess.codemap();
let filename = cm.span_to_filename(m.inner); let filename = cm.span_to_filename(m.inner);
@ -971,8 +971,8 @@ impl<'l> Visitor<DxrVisitorEnv> for DxrVisitor<'l> {
self.process_trait(item, e, generics, trait_refs, methods), self.process_trait(item, e, generics, trait_refs, methods),
ast::ItemMod(ref m) => self.process_mod(item, e, m), ast::ItemMod(ref m) => self.process_mod(item, e, m),
ast::ItemTy(ty, ref ty_params) => { ast::ItemTy(ty, ref ty_params) => {
let qualname = self.analysis.ty_cx.map.path_to_str(item.id); let qualname = self.analysis.ty_cx.map.path_to_string(item.id);
let value = ty_to_str(&*ty); let value = ty_to_string(&*ty);
let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Type); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Type);
self.fmt.typedef_str(item.span, self.fmt.typedef_str(item.span,
sub_span, sub_span,
@ -1231,7 +1231,7 @@ impl<'l> Visitor<DxrVisitorEnv> for DxrVisitor<'l> {
return return
} }
let id = String::from_str("$").append(ex.id.to_str().as_slice()); let id = String::from_str("$").append(ex.id.to_string().as_slice());
self.process_formals(&decl.inputs, id.as_slice(), e); self.process_formals(&decl.inputs, id.as_slice(), e);
// walk arg and return types // walk arg and return types
@ -1288,7 +1288,7 @@ impl<'l> Visitor<DxrVisitorEnv> for DxrVisitor<'l> {
def::DefBinding(id, _) => self.fmt.variable_str(p.span, def::DefBinding(id, _) => self.fmt.variable_str(p.span,
sub_span, sub_span,
id, id,
path_to_str(p).as_slice(), path_to_string(p).as_slice(),
value.as_slice(), value.as_slice(),
""), ""),
def::DefVariant(_,id,_) => self.fmt.ref_str(ref_kind, def::DefVariant(_,id,_) => self.fmt.ref_str(ref_kind,
@ -1331,7 +1331,7 @@ impl<'l> Visitor<DxrVisitorEnv> for DxrVisitor<'l> {
for &(id, ref p, ref immut, _) in self.collected_paths.iter() { for &(id, ref p, ref immut, _) in self.collected_paths.iter() {
let value = if *immut { value.to_owned() } else { "<mutable>".to_owned() }; let value = if *immut { value.to_owned() } else { "<mutable>".to_owned() };
let types = self.analysis.ty_cx.node_types.borrow(); let types = self.analysis.ty_cx.node_types.borrow();
let typ = ppaux::ty_to_str(&self.analysis.ty_cx, *types.get(&(id as uint))); let typ = ppaux::ty_to_string(&self.analysis.ty_cx, *types.get(&(id as uint)));
// Get the span only for the name of the variable (I hope the path // Get the span only for the name of the variable (I hope the path
// is only ever a variable name, but who knows?). // is only ever a variable name, but who knows?).
let sub_span = self.span.span_for_last_ident(p.span); let sub_span = self.span.span_for_last_ident(p.span);
@ -1339,7 +1339,7 @@ impl<'l> Visitor<DxrVisitorEnv> for DxrVisitor<'l> {
self.fmt.variable_str(p.span, self.fmt.variable_str(p.span,
sub_span, sub_span,
id, id,
path_to_str(p).as_slice(), path_to_string(p).as_slice(),
value.as_slice(), value.as_slice(),
typ.as_slice()); typ.as_slice());
} }

View file

@ -252,7 +252,7 @@ impl<'a> FmtStrs<'a> {
// the local case they can be overridden in one block and there is no nice way // the local case they can be overridden in one block and there is no nice way
// to refer to such a scope in english, so we just hack it by appending the // to refer to such a scope in english, so we just hack it by appending the
// variable def's node id // variable def's node id
let qualname = String::from_str(name).append("$").append(id.to_str().as_slice()); let qualname = String::from_str(name).append("$").append(id.to_string().as_slice());
self.check_and_record(Variable, self.check_and_record(Variable,
span, span,
sub_span, sub_span,

View file

@ -214,7 +214,7 @@ use middle::trans::type_of;
use middle::trans::debuginfo; use middle::trans::debuginfo;
use middle::ty; use middle::ty;
use util::common::indenter; use util::common::indenter;
use util::ppaux::{Repr, vec_map_to_str}; use util::ppaux::{Repr, vec_map_to_string};
use std; use std;
use std::collections::HashMap; use std::collections::HashMap;
@ -409,7 +409,7 @@ fn expand_nested_bindings<'a, 'b>(
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
col, col,
bcx.val_to_str(val)); bcx.val_to_string(val));
let _indenter = indenter(); let _indenter = indenter();
m.iter().map(|br| { m.iter().map(|br| {
@ -449,7 +449,7 @@ fn enter_match<'a, 'b>(
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
col, col,
bcx.val_to_str(val)); bcx.val_to_string(val));
let _indenter = indenter(); let _indenter = indenter();
m.iter().filter_map(|br| { m.iter().filter_map(|br| {
@ -485,7 +485,7 @@ fn enter_default<'a, 'b>(
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
col, col,
bcx.val_to_str(val)); bcx.val_to_string(val));
let _indenter = indenter(); let _indenter = indenter();
// Collect all of the matches that can match against anything. // Collect all of the matches that can match against anything.
@ -541,7 +541,7 @@ fn enter_opt<'a, 'b>(
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
*opt, *opt,
col, col,
bcx.val_to_str(val)); bcx.val_to_string(val));
let _indenter = indenter(); let _indenter = indenter();
let ctor = match opt { let ctor = match opt {
@ -922,7 +922,7 @@ fn compare_values<'a>(
let did = langcall(cx, let did = langcall(cx,
None, None,
format!("comparison of `{}`", format!("comparison of `{}`",
cx.ty_to_str(rhs_t)).as_slice(), cx.ty_to_string(rhs_t)).as_slice(),
StrEqFnLangItem); StrEqFnLangItem);
callee::trans_lang_call(cx, did, [lhs, rhs], None) callee::trans_lang_call(cx, did, [lhs, rhs], None)
} }
@ -988,7 +988,7 @@ fn insert_lllocals<'a>(mut bcx: &'a Block<'a>, bindings_map: &BindingsMap,
debug!("binding {:?} to {}", debug!("binding {:?} to {}",
binding_info.id, binding_info.id,
bcx.val_to_str(llval)); bcx.val_to_string(llval));
bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum); bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
if bcx.sess().opts.debuginfo == FullDebugInfo { if bcx.sess().opts.debuginfo == FullDebugInfo {
@ -1011,9 +1011,9 @@ fn compile_guard<'a, 'b>(
-> &'b Block<'b> { -> &'b Block<'b> {
debug!("compile_guard(bcx={}, guard_expr={}, m={}, vals={})", debug!("compile_guard(bcx={}, guard_expr={}, m={}, vals={})",
bcx.to_str(), bcx.to_str(),
bcx.expr_to_str(guard_expr), bcx.expr_to_string(guard_expr),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
vec_map_to_str(vals, |v| bcx.val_to_str(*v))); vec_map_to_string(vals, |v| bcx.val_to_string(*v)));
let _indenter = indenter(); let _indenter = indenter();
let mut bcx = insert_lllocals(bcx, &data.bindings_map, None); let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
@ -1050,7 +1050,7 @@ fn compile_submatch<'a, 'b>(
debug!("compile_submatch(bcx={}, m={}, vals={})", debug!("compile_submatch(bcx={}, m={}, vals={})",
bcx.to_str(), bcx.to_str(),
m.repr(bcx.tcx()), m.repr(bcx.tcx()),
vec_map_to_str(vals, |v| bcx.val_to_str(*v))); vec_map_to_string(vals, |v| bcx.val_to_string(*v)));
let _indenter = indenter(); let _indenter = indenter();
let _icx = push_ctxt("match::compile_submatch"); let _icx = push_ctxt("match::compile_submatch");
let mut bcx = bcx; let mut bcx = bcx;
@ -1155,7 +1155,7 @@ fn compile_submatch_continue<'a, 'b>(
debug!("options={:?}", opts); debug!("options={:?}", opts);
let mut kind = no_branch; let mut kind = no_branch;
let mut test_val = val; let mut test_val = val;
debug!("test_val={}", bcx.val_to_str(test_val)); debug!("test_val={}", bcx.val_to_string(test_val));
if opts.len() > 0u { if opts.len() > 0u {
match *opts.get(0) { match *opts.get(0) {
var(_, ref repr, _) => { var(_, ref repr, _) => {

View file

@ -63,7 +63,7 @@ use syntax::abi::{X86, X86_64, Arm, Mips, Mipsel};
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::attr::IntType; use syntax::attr::IntType;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
type Hint = attr::ReprAttr; type Hint = attr::ReprAttr;
@ -135,7 +135,7 @@ pub fn represent_node(bcx: &Block, node: ast::NodeId) -> Rc<Repr> {
/// Decides how to represent a given type. /// Decides how to represent a given type.
pub fn represent_type(cx: &CrateContext, t: ty::t) -> Rc<Repr> { pub fn represent_type(cx: &CrateContext, t: ty::t) -> Rc<Repr> {
debug!("Representing: {}", ty_to_str(cx.tcx(), t)); debug!("Representing: {}", ty_to_string(cx.tcx(), t));
match cx.adt_reprs.borrow().find(&t) { match cx.adt_reprs.borrow().find(&t) {
Some(repr) => return repr.clone(), Some(repr) => return repr.clone(),
None => {} None => {}

View file

@ -71,7 +71,7 @@ use middle::trans::value::Value;
use middle::ty; use middle::ty;
use middle::typeck; use middle::typeck;
use util::common::indenter; use util::common::indenter;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_string};
use util::sha2::Sha256; use util::sha2::Sha256;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
@ -301,7 +301,7 @@ fn require_alloc_fn(bcx: &Block, info_ty: ty::t, it: LangItem) -> ast::DefId {
Ok(id) => id, Ok(id) => id,
Err(s) => { Err(s) => {
bcx.sess().fatal(format!("allocation of `{}` {}", bcx.sess().fatal(format!("allocation of `{}` {}",
bcx.ty_to_str(info_ty), bcx.ty_to_string(info_ty),
s).as_slice()); s).as_slice());
} }
} }
@ -706,7 +706,7 @@ pub fn iter_structural_ty<'r,
let variant_cx = let variant_cx =
fcx.new_temp_block( fcx.new_temp_block(
format!("enum-iter-variant-{}", format!("enum-iter-variant-{}",
variant.disr_val.to_str().as_slice()) variant.disr_val.to_string().as_slice())
.as_slice()); .as_slice());
match adt::trans_case(cx, &*repr, variant.disr_val) { match adt::trans_case(cx, &*repr, variant.disr_val) {
_match::single_result(r) => { _match::single_result(r) => {
@ -809,7 +809,7 @@ pub fn fail_if_zero_or_overflows<'a>(
} }
_ => { _ => {
cx.sess().bug(format!("fail-if-zero on unexpected type: {}", cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
ty_to_str(cx.tcx(), rhs_t)).as_slice()); ty_to_string(cx.tcx(), rhs_t)).as_slice());
} }
}; };
let bcx = with_cond(cx, is_zero, |bcx| { let bcx = with_cond(cx, is_zero, |bcx| {
@ -903,7 +903,7 @@ pub fn invoke<'a>(
debug!("invoke at ???"); debug!("invoke at ???");
} }
Some(id) => { Some(id) => {
debug!("invoke at {}", bcx.tcx().map.node_to_str(id)); debug!("invoke at {}", bcx.tcx().map.node_to_string(id));
} }
} }
@ -1173,7 +1173,7 @@ pub fn new_fn_ctxt<'a>(ccx: &'a CrateContext,
if id == -1 { if id == -1 {
"".to_string() "".to_string()
} else { } else {
ccx.tcx.map.path_to_str(id).to_string() ccx.tcx.map.path_to_string(id).to_string()
}, },
id, param_substs.repr(ccx.tcx())); id, param_substs.repr(ccx.tcx()));
@ -1474,7 +1474,7 @@ pub fn trans_fn(ccx: &CrateContext,
param_substs: &param_substs, param_substs: &param_substs,
id: ast::NodeId, id: ast::NodeId,
attrs: &[ast::Attribute]) { attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_string()); let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_string(id).to_string());
debug!("trans_fn(param_substs={})", param_substs.repr(ccx.tcx())); debug!("trans_fn(param_substs={})", param_substs.repr(ccx.tcx()));
let _icx = push_ctxt("trans_fn"); let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id)); let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
@ -1527,7 +1527,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: &CrateContext,
_ => ccx.sess().bug( _ => ccx.sess().bug(
format!("trans_enum_variant_or_tuple_like_struct: \ format!("trans_enum_variant_or_tuple_like_struct: \
unexpected ctor return type {}", unexpected ctor return type {}",
ty_to_str(ccx.tcx(), ctor_ty)).as_slice()) ty_to_string(ccx.tcx(), ctor_ty)).as_slice())
}; };
let arena = TypedArena::new(); let arena = TypedArena::new();
@ -2010,7 +2010,7 @@ fn exported_name(ccx: &CrateContext, id: ast::NodeId,
_ => ccx.tcx.map.with_path(id, |mut path| { _ => ccx.tcx.map.with_path(id, |mut path| {
if attr::contains_name(attrs, "no_mangle") { if attr::contains_name(attrs, "no_mangle") {
// Don't mangle // Don't mangle
path.last().unwrap().to_str() path.last().unwrap().to_string()
} else { } else {
match weak_lang_items::link_name(attrs) { match weak_lang_items::link_name(attrs) {
Some(name) => name.get().to_string(), Some(name) => name.get().to_string(),

View file

@ -122,8 +122,8 @@ pub fn Invoke(cx: &Block,
check_not_terminated(cx); check_not_terminated(cx);
terminate(cx, "Invoke"); terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))", debug!("Invoke({} with arguments ({}))",
cx.val_to_str(fn_), cx.val_to_string(fn_),
args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<String>>().connect(", ")); args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
B(cx).invoke(fn_, args, then, catch, attributes) B(cx).invoke(fn_, args, then, catch, attributes)
} }

View file

@ -161,9 +161,9 @@ impl<'a> Builder<'a> {
self.count_insn("invoke"); self.count_insn("invoke");
debug!("Invoke {} with args ({})", debug!("Invoke {} with args ({})",
self.ccx.tn.val_to_str(llfn), self.ccx.tn.val_to_string(llfn),
args.iter() args.iter()
.map(|&v| self.ccx.tn.val_to_str(v)) .map(|&v| self.ccx.tn.val_to_string(v))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(", ")); .connect(", "));
@ -497,8 +497,8 @@ impl<'a> Builder<'a> {
pub fn store(&self, val: ValueRef, ptr: ValueRef) { pub fn store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store {} -> {}", debug!("Store {} -> {}",
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_str(ptr)); self.ccx.tn.val_to_string(ptr));
assert!(self.llbuilder.is_not_null()); assert!(self.llbuilder.is_not_null());
self.count_insn("store"); self.count_insn("store");
unsafe { unsafe {
@ -508,8 +508,8 @@ impl<'a> Builder<'a> {
pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) { pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store {} -> {}", debug!("Store {} -> {}",
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_str(ptr)); self.ccx.tn.val_to_string(ptr));
assert!(self.llbuilder.is_not_null()); assert!(self.llbuilder.is_not_null());
self.count_insn("store.volatile"); self.count_insn("store.volatile");
unsafe { unsafe {
@ -520,8 +520,8 @@ impl<'a> Builder<'a> {
pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) { pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
debug!("Store {} -> {}", debug!("Store {} -> {}",
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_str(ptr)); self.ccx.tn.val_to_string(ptr));
self.count_insn("store.atomic"); self.count_insn("store.atomic");
unsafe { unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(ptr)); let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
@ -760,7 +760,7 @@ impl<'a> Builder<'a> {
if self.ccx.sess().asm_comments() { if self.ccx.sess().asm_comments() {
let s = format!("{} ({})", let s = format!("{} ({})",
text, text,
self.ccx.sess().codemap().span_to_str(sp)); self.ccx.sess().codemap().span_to_string(sp));
debug!("{}", s.as_slice()); debug!("{}", s.as_slice());
self.add_comment(s.as_slice()); self.add_comment(s.as_slice());
} }
@ -794,11 +794,11 @@ impl<'a> Builder<'a> {
else { lib::llvm::False }; else { lib::llvm::False };
let argtys = inputs.iter().map(|v| { let argtys = inputs.iter().map(|v| {
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v)); debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_string(*v));
val_ty(*v) val_ty(*v)
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output)); debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_string(output));
let fty = Type::func(argtys.as_slice(), &output); let fty = Type::func(argtys.as_slice(), &output);
unsafe { unsafe {
let v = llvm::LLVMInlineAsm( let v = llvm::LLVMInlineAsm(
@ -812,9 +812,9 @@ impl<'a> Builder<'a> {
self.count_insn("call"); self.count_insn("call");
debug!("Call {} with args ({})", debug!("Call {} with args ({})",
self.ccx.tn.val_to_str(llfn), self.ccx.tn.val_to_string(llfn),
args.iter() args.iter()
.map(|&v| self.ccx.tn.val_to_str(v)) .map(|&v| self.ccx.tn.val_to_string(v))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(", ")); .connect(", "));

View file

@ -108,7 +108,7 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
expr.span, expr.span,
format!("type of callee is neither bare-fn nor closure: \ format!("type of callee is neither bare-fn nor closure: \
{}", {}",
bcx.ty_to_str(datum.ty)).as_slice()); bcx.ty_to_string(datum.ty)).as_slice());
} }
} }
} }
@ -905,7 +905,7 @@ pub fn trans_arg_datum<'a>(
let arg_datum_ty = arg_datum.ty; let arg_datum_ty = arg_datum.ty;
debug!(" arg datum: {}", arg_datum.to_str(bcx.ccx())); debug!(" arg datum: {}", arg_datum.to_string(bcx.ccx()));
let mut val; let mut val;
if ty::type_is_bot(arg_datum_ty) { if ty::type_is_bot(arg_datum_ty) {
@ -949,11 +949,11 @@ pub fn trans_arg_datum<'a>(
// this could happen due to e.g. subtyping // this could happen due to e.g. subtyping
let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty); let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty);
debug!("casting actual type ({}) to match formal ({})", debug!("casting actual type ({}) to match formal ({})",
bcx.val_to_str(val), bcx.llty_str(llformal_arg_ty)); bcx.val_to_string(val), bcx.llty_str(llformal_arg_ty));
val = PointerCast(bcx, val, llformal_arg_ty); val = PointerCast(bcx, val, llformal_arg_ty);
} }
} }
debug!("--- trans_arg_datum passing {}", bcx.val_to_str(val)); debug!("--- trans_arg_datum passing {}", bcx.val_to_string(val));
Result::new(bcx, val) Result::new(bcx, val)
} }

View file

@ -85,7 +85,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/ */
debug!("push_ast_cleanup_scope({})", debug!("push_ast_cleanup_scope({})",
self.ccx.tcx.map.node_to_str(id)); self.ccx.tcx.map.node_to_string(id));
// FIXME(#2202) -- currently closure bodies have a parent // FIXME(#2202) -- currently closure bodies have a parent
// region, which messes up the assertion below, since there // region, which messes up the assertion below, since there
@ -109,7 +109,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
id: ast::NodeId, id: ast::NodeId,
exits: [&'a Block<'a>, ..EXIT_MAX]) { exits: [&'a Block<'a>, ..EXIT_MAX]) {
debug!("push_loop_cleanup_scope({})", debug!("push_loop_cleanup_scope({})",
self.ccx.tcx.map.node_to_str(id)); self.ccx.tcx.map.node_to_string(id));
assert_eq!(Some(id), self.top_ast_scope()); assert_eq!(Some(id), self.top_ast_scope());
self.push_scope(CleanupScope::new(LoopScopeKind(id, exits))); self.push_scope(CleanupScope::new(LoopScopeKind(id, exits)));
@ -133,7 +133,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/ */
debug!("pop_and_trans_ast_cleanup_scope({})", debug!("pop_and_trans_ast_cleanup_scope({})",
self.ccx.tcx.map.node_to_str(cleanup_scope)); self.ccx.tcx.map.node_to_string(cleanup_scope));
assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope))); assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope)));
@ -152,7 +152,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/ */
debug!("pop_loop_cleanup_scope({})", debug!("pop_loop_cleanup_scope({})",
self.ccx.tcx.map.node_to_str(cleanup_scope)); self.ccx.tcx.map.node_to_string(cleanup_scope));
assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope))); assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope)));
@ -246,7 +246,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
debug!("schedule_drop_mem({:?}, val={}, ty={})", debug!("schedule_drop_mem({:?}, val={}, ty={})",
cleanup_scope, cleanup_scope,
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
ty.repr(self.ccx.tcx())); ty.repr(self.ccx.tcx()));
self.schedule_clean(cleanup_scope, drop as Box<Cleanup>); self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
@ -272,7 +272,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
debug!("schedule_drop_and_zero_mem({:?}, val={}, ty={}, zero={})", debug!("schedule_drop_and_zero_mem({:?}, val={}, ty={}, zero={})",
cleanup_scope, cleanup_scope,
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
ty.repr(self.ccx.tcx()), ty.repr(self.ccx.tcx()),
true); true);
@ -298,7 +298,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
debug!("schedule_drop_immediate({:?}, val={}, ty={})", debug!("schedule_drop_immediate({:?}, val={}, ty={})",
cleanup_scope, cleanup_scope,
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
ty.repr(self.ccx.tcx())); ty.repr(self.ccx.tcx()));
self.schedule_clean(cleanup_scope, drop as Box<Cleanup>); self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
@ -318,7 +318,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
debug!("schedule_free_value({:?}, val={}, heap={:?})", debug!("schedule_free_value({:?}, val={}, heap={:?})",
cleanup_scope, cleanup_scope,
self.ccx.tn.val_to_str(val), self.ccx.tn.val_to_string(val),
heap); heap);
self.schedule_clean(cleanup_scope, drop as Box<Cleanup>); self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
@ -358,7 +358,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
self.ccx.sess().bug( self.ccx.sess().bug(
format!("no cleanup scope {} found", format!("no cleanup scope {} found",
self.ccx.tcx.map.node_to_str(cleanup_scope)).as_slice()); self.ccx.tcx.map.node_to_string(cleanup_scope)).as_slice());
} }
fn schedule_clean_in_custom_scope(&self, fn schedule_clean_in_custom_scope(&self,

View file

@ -27,7 +27,7 @@ use middle::trans::type_of::*;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use middle::ty; use middle::ty;
use util::ppaux::Repr; use util::ppaux::Repr;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
use arena::TypedArena; use arena::TypedArena;
use syntax::ast; use syntax::ast;
@ -104,8 +104,8 @@ pub struct EnvValue {
} }
impl EnvValue { impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> String { pub fn to_string(&self, ccx: &CrateContext) -> String {
format!("{}({})", self.action, self.datum.to_str(ccx)) format!("{}({})", self.action, self.datum.to_string(ccx))
} }
} }
@ -124,7 +124,7 @@ pub fn mk_closure_tys(tcx: &ty::ctxt,
} }
}).collect(); }).collect();
let cdata_ty = ty::mk_tup(tcx, bound_tys); let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty)); debug!("cdata_ty={}", ty_to_string(tcx, cdata_ty));
return cdata_ty; return cdata_ty;
} }
@ -196,16 +196,16 @@ pub fn store_environment<'a>(
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty); let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty); let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_str(tcx, cbox_ty)); debug!("tuplify_box_ty = {}", ty_to_string(tcx, cbox_ty));
// Copy expr values into boxed bindings. // Copy expr values into boxed bindings.
let mut bcx = bcx; let mut bcx = bcx;
for (i, bv) in bound_values.move_iter().enumerate() { for (i, bv) in bound_values.move_iter().enumerate() {
debug!("Copy {} into closure", bv.to_str(ccx)); debug!("Copy {} into closure", bv.to_string(ccx));
if ccx.sess().asm_comments() { if ccx.sess().asm_comments() {
add_comment(bcx, format!("Copy {} into closure", add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx)).as_slice()); bv.to_string(ccx)).as_slice());
} }
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]); let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);

View file

@ -196,13 +196,13 @@ impl param_substs {
} }
} }
fn param_substs_to_str(this: &param_substs, tcx: &ty::ctxt) -> String { fn param_substs_to_string(this: &param_substs, tcx: &ty::ctxt) -> String {
format!("param_substs({})", this.substs.repr(tcx)) format!("param_substs({})", this.substs.repr(tcx))
} }
impl Repr for param_substs { impl Repr for param_substs {
fn repr(&self, tcx: &ty::ctxt) -> String { fn repr(&self, tcx: &ty::ctxt) -> String {
param_substs_to_str(self, tcx) param_substs_to_string(self, tcx)
} }
} }
@ -436,11 +436,11 @@ impl<'a> Block<'a> {
token::get_ident(ident).get().to_string() token::get_ident(ident).get().to_string()
} }
pub fn node_id_to_str(&self, id: ast::NodeId) -> String { pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
self.tcx().map.node_to_str(id).to_string() self.tcx().map.node_to_string(id).to_string()
} }
pub fn expr_to_str(&self, e: &ast::Expr) -> String { pub fn expr_to_string(&self, e: &ast::Expr) -> String {
e.repr(self.tcx()) e.repr(self.tcx())
} }
@ -454,15 +454,15 @@ impl<'a> Block<'a> {
} }
} }
pub fn val_to_str(&self, val: ValueRef) -> String { pub fn val_to_string(&self, val: ValueRef) -> String {
self.ccx().tn.val_to_str(val) self.ccx().tn.val_to_string(val)
} }
pub fn llty_str(&self, ty: Type) -> String { pub fn llty_str(&self, ty: Type) -> String {
self.ccx().tn.type_to_str(ty) self.ccx().tn.type_to_string(ty)
} }
pub fn ty_to_str(&self, t: ty::t) -> String { pub fn ty_to_string(&self, t: ty::t) -> String {
t.repr(self.tcx()) t.repr(self.tcx())
} }
@ -645,7 +645,7 @@ pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint); let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
debug!("const_get_elt(v={}, us={:?}, r={})", debug!("const_get_elt(v={}, us={:?}, r={})",
cx.tn.val_to_str(v), us, cx.tn.val_to_str(r)); cx.tn.val_to_string(v), us, cx.tn.val_to_string(r));
return r; return r;
} }

View file

@ -31,7 +31,7 @@ use middle::trans::type_::Type;
use middle::trans::type_of; use middle::trans::type_of;
use middle::trans::debuginfo; use middle::trans::debuginfo;
use middle::ty; use middle::ty;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_string};
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::gc::Gc; use std::gc::Gc;
@ -59,7 +59,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
_ => cx.sess().span_bug(lit.span, _ => cx.sess().span_bug(lit.span,
format!("integer literal has type {} (expected int \ format!("integer literal has type {} (expected int \
or uint)", or uint)",
ty_to_str(cx.tcx(), lit_int_ty)).as_slice()) ty_to_string(cx.tcx(), lit_int_ty)).as_slice())
} }
} }
ast::LitFloat(ref fs, t) => { ast::LitFloat(ref fs, t) => {
@ -155,14 +155,14 @@ fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool)
} }
_ => { _ => {
cx.sess().bug(format!("unexpected dereferenceable type {}", cx.sess().bug(format!("unexpected dereferenceable type {}",
ty_to_str(cx.tcx(), t)).as_slice()) ty_to_string(cx.tcx(), t)).as_slice())
} }
}; };
(dv, mt.ty) (dv, mt.ty)
} }
None => { None => {
cx.sess().bug(format!("can't dereference const of type {}", cx.sess().bug(format!("can't dereference const of type {}",
ty_to_str(cx.tcx(), t)).as_slice()) ty_to_string(cx.tcx(), t)).as_slice())
} }
} }
} }
@ -285,7 +285,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef
llvm::LLVMDumpValue(C_undef(llty)); llvm::LLVMDumpValue(C_undef(llty));
} }
cx.sess().bug(format!("const {} of type {} has size {} instead of {}", cx.sess().bug(format!("const {} of type {} has size {} instead of {}",
e.repr(cx.tcx()), ty_to_str(cx.tcx(), ety), e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety),
csize, tsize).as_slice()); csize, tsize).as_slice());
} }
(llconst, inlineable) (llconst, inlineable)

View file

@ -126,8 +126,8 @@ pub fn trans_if<'a>(bcx: &'a Block<'a>,
dest: expr::Dest) dest: expr::Dest)
-> &'a Block<'a> { -> &'a Block<'a> {
debug!("trans_if(bcx={}, if_id={}, cond={}, thn={:?}, dest={})", debug!("trans_if(bcx={}, if_id={}, cond={}, thn={:?}, dest={})",
bcx.to_str(), if_id, bcx.expr_to_str(cond), thn.id, bcx.to_str(), if_id, bcx.expr_to_string(cond), thn.id,
dest.to_str(bcx.ccx())); dest.to_string(bcx.ccx()));
let _icx = push_ctxt("trans_if"); let _icx = push_ctxt("trans_if");
let mut bcx = bcx; let mut bcx = bcx;

View file

@ -23,7 +23,7 @@ use middle::trans::glue;
use middle::trans::tvec; use middle::trans::tvec;
use middle::trans::type_of; use middle::trans::type_of;
use middle::ty; use middle::ty;
use util::ppaux::{ty_to_str}; use util::ppaux::{ty_to_string};
use syntax::ast; use syntax::ast;
@ -596,10 +596,10 @@ impl<K:KindOps> Datum<K> {
} }
#[allow(dead_code)] // useful for debugging #[allow(dead_code)] // useful for debugging
pub fn to_str(&self, ccx: &CrateContext) -> String { pub fn to_string(&self, ccx: &CrateContext) -> String {
format!("Datum({}, {}, {:?})", format!("Datum({}, {}, {:?})",
ccx.tn.val_to_str(self.val), ccx.tn.val_to_string(self.val),
ty_to_str(ccx.tcx(), self.ty), ty_to_string(ccx.tcx(), self.ty),
self.kind) self.kind)
} }

View file

@ -270,7 +270,7 @@ impl TypeMap {
metadata: DIType) { metadata: DIType) {
if !self.type_to_metadata.insert(ty::type_id(type_), metadata) { if !self.type_to_metadata.insert(ty::type_id(type_), metadata) {
cx.sess().bug(format!("Type metadata for ty::t '{}' is already in the TypeMap!", cx.sess().bug(format!("Type metadata for ty::t '{}' is already in the TypeMap!",
ppaux::ty_to_str(cx.tcx(), type_)).as_slice()); ppaux::ty_to_string(cx.tcx(), type_)).as_slice());
} }
} }
@ -504,7 +504,7 @@ impl TypeMap {
}, },
_ => { _ => {
cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}", cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}",
ppaux::ty_to_str(cx.tcx(), type_).as_slice(), ppaux::ty_to_string(cx.tcx(), type_).as_slice(),
ty::get(type_).sty).as_slice()) ty::get(type_).sty).as_slice())
} }
}; };
@ -808,7 +808,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let type_metadata = type_metadata(cx, variable_type, span); let type_metadata = type_metadata(cx, variable_type, span);
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_ident(ident).get().to_str(); let var_name = token::get_ident(ident).get().to_string();
let linkage_name = let linkage_name =
namespace_node.mangled_name_of_contained_item(var_name.as_slice()); namespace_node.mangled_name_of_contained_item(var_name.as_slice());
let var_scope = namespace_node.scope; let var_scope = namespace_node.scope;
@ -1056,7 +1056,7 @@ pub fn set_source_location(fcx: &FunctionContext,
FunctionDebugContext(box ref function_debug_context) => { FunctionDebugContext(box ref function_debug_context) => {
let cx = fcx.ccx; let cx = fcx.ccx;
debug!("set_source_location: {}", cx.sess().codemap().span_to_str(span)); debug!("set_source_location: {}", cx.sess().codemap().span_to_string(span));
if function_debug_context.source_locations_enabled.get() { if function_debug_context.source_locations_enabled.get() {
let loc = span_start(cx, span); let loc = span_start(cx, span);
@ -1812,7 +1812,7 @@ impl RecursiveTypeDescription {
type_map.find_metadata_for_type(unfinished_type).is_none() { type_map.find_metadata_for_type(unfinished_type).is_none() {
cx.sess().bug(format!("Forward declaration of potentially recursive type \ cx.sess().bug(format!("Forward declaration of potentially recursive type \
'{}' was not found in TypeMap!", '{}' was not found in TypeMap!",
ppaux::ty_to_str(cx.tcx(), unfinished_type)) ppaux::ty_to_string(cx.tcx(), unfinished_type))
.as_slice()); .as_slice());
} }
} }
@ -2245,7 +2245,7 @@ fn describe_enum_variant(cx: &CrateContext,
Some(ref names) => { Some(ref names) => {
names.iter() names.iter()
.map(|ident| { .map(|ident| {
token::get_ident(*ident).get().to_str().into_string() token::get_ident(*ident).get().to_string().into_string()
}).collect() }).collect()
} }
None => variant_info.args.iter().map(|_| "".to_string()).collect() None => variant_info.args.iter().map(|_| "".to_string()).collect()
@ -2872,7 +2872,7 @@ fn trait_pointer_metadata(cx: &CrateContext,
ty::ty_uniq(pointee_type) => pointee_type, ty::ty_uniq(pointee_type) => pointee_type,
ty::ty_rptr(_, ty::mt { ty, .. }) => ty, ty::ty_rptr(_, ty::mt { ty, .. }) => ty,
_ => { _ => {
let pp_type_name = ppaux::ty_to_str(cx.tcx(), trait_pointer_type); let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_pointer_type);
cx.sess().bug(format!("debuginfo: Unexpected trait-pointer type in \ cx.sess().bug(format!("debuginfo: Unexpected trait-pointer type in \
trait_pointer_metadata(): {}", trait_pointer_metadata(): {}",
pp_type_name.as_slice()).as_slice()); pp_type_name.as_slice()).as_slice());
@ -2882,7 +2882,7 @@ fn trait_pointer_metadata(cx: &CrateContext,
let def_id = match ty::get(trait_object_type).sty { let def_id = match ty::get(trait_object_type).sty {
ty::ty_trait(box ty::TyTrait { def_id, .. }) => def_id, ty::ty_trait(box ty::TyTrait { def_id, .. }) => def_id,
_ => { _ => {
let pp_type_name = ppaux::ty_to_str(cx.tcx(), trait_object_type); let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_object_type);
cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \
trait_pointer_metadata(): {}", trait_pointer_metadata(): {}",
pp_type_name.as_slice()).as_slice()); pp_type_name.as_slice()).as_slice());
@ -3064,7 +3064,7 @@ fn type_metadata(cx: &CrateContext,
the debuginfo::TypeMap but it \ the debuginfo::TypeMap but it \
was not. (ty::t = {})", was not. (ty::t = {})",
unique_type_id_str.as_slice(), unique_type_id_str.as_slice(),
ppaux::ty_to_str(cx.tcx(), t)); ppaux::ty_to_string(cx.tcx(), t));
cx.sess().span_bug(usage_site_span, error_message.as_slice()); cx.sess().span_bug(usage_site_span, error_message.as_slice());
} }
}; };
@ -3079,7 +3079,7 @@ fn type_metadata(cx: &CrateContext,
debuginfo::TypeMap. \ debuginfo::TypeMap. \
UniqueTypeId={}, ty::t={}", UniqueTypeId={}, ty::t={}",
unique_type_id_str.as_slice(), unique_type_id_str.as_slice(),
ppaux::ty_to_str(cx.tcx(), t)); ppaux::ty_to_string(cx.tcx(), t));
cx.sess().span_bug(usage_site_span, error_message.as_slice()); cx.sess().span_bug(usage_site_span, error_message.as_slice());
} }
} }
@ -3879,7 +3879,7 @@ fn push_debuginfo_type_name(cx: &CrateContext,
ty::ty_infer(_) | ty::ty_infer(_) |
ty::ty_param(_) => { ty::ty_param(_) => {
cx.sess().bug(format!("debuginfo: Trying to create type name for \ cx.sess().bug(format!("debuginfo: Trying to create type name for \
unexpected type: {}", ppaux::ty_to_str(cx.tcx(), t)).as_slice()); unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).as_slice());
} }
} }

View file

@ -75,7 +75,7 @@ use middle::trans::type_::Type;
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
use syntax::print::pprust::{expr_to_str}; use syntax::print::pprust::{expr_to_string};
use std::gc::Gc; use std::gc::Gc;
@ -91,9 +91,9 @@ pub enum Dest {
} }
impl Dest { impl Dest {
pub fn to_str(&self, ccx: &CrateContext) -> String { pub fn to_string(&self, ccx: &CrateContext) -> String {
match *self { match *self {
SaveIn(v) => format!("SaveIn({})", ccx.tn.val_to_str(v)), SaveIn(v) => format!("SaveIn({})", ccx.tn.val_to_string(v)),
Ignore => "Ignore".to_string() Ignore => "Ignore".to_string()
} }
} }
@ -148,7 +148,7 @@ pub fn trans<'a>(bcx: &'a Block<'a>,
* the stack. * the stack.
*/ */
debug!("trans(expr={})", bcx.expr_to_str(expr)); debug!("trans(expr={})", bcx.expr_to_string(expr));
let mut bcx = bcx; let mut bcx = bcx;
let fcx = bcx.fcx; let fcx = bcx.fcx;
@ -178,7 +178,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
Some(adj) => { adj } Some(adj) => { adj }
}; };
debug!("unadjusted datum for expr {}: {}", debug!("unadjusted datum for expr {}: {}",
expr.id, datum.to_str(bcx.ccx())); expr.id, datum.to_string(bcx.ccx()));
match adjustment { match adjustment {
AutoAddEnv(..) => { AutoAddEnv(..) => {
datum = unpack_datum!(bcx, add_env(bcx, expr, datum)); datum = unpack_datum!(bcx, add_env(bcx, expr, datum));
@ -216,7 +216,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
datum = scratch.to_expr_datum(); datum = scratch.to_expr_datum();
} }
} }
debug!("after adjustments, datum={}", datum.to_str(bcx.ccx())); debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
return DatumBlock {bcx: bcx, datum: datum}; return DatumBlock {bcx: bcx, datum: datum};
fn auto_slice<'a>( fn auto_slice<'a>(
@ -325,7 +325,7 @@ fn trans_unadjusted<'a>(bcx: &'a Block<'a>,
let mut bcx = bcx; let mut bcx = bcx;
debug!("trans_unadjusted(expr={})", bcx.expr_to_str(expr)); debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
let _indenter = indenter(); let _indenter = indenter();
debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
@ -545,8 +545,8 @@ fn trans_index<'a>(bcx: &'a Block<'a>,
let (base, len) = base_datum.get_vec_base_and_len(bcx); let (base, len) = base_datum.get_vec_base_and_len(bcx);
debug!("trans_index: base {}", bcx.val_to_str(base)); debug!("trans_index: base {}", bcx.val_to_string(base));
debug!("trans_index: len {}", bcx.val_to_str(len)); debug!("trans_index: len {}", bcx.val_to_string(len));
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, ix_val, len); let bounds_check = ICmp(bcx, lib::llvm::IntUGE, ix_val, len);
let expect = ccx.get_intrinsic(&("llvm.expect.i1")); let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
@ -780,7 +780,7 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
let expr_ty = expr_ty(bcx, expr); let expr_ty = expr_ty(bcx, expr);
let store = ty::ty_closure_store(expr_ty); let store = ty::ty_closure_store(expr_ty);
debug!("translating block function {} with type {}", debug!("translating block function {} with type {}",
expr_to_str(expr), expr_ty.repr(tcx)); expr_to_string(expr), expr_ty.repr(tcx));
closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest) closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest)
} }
ast::ExprCall(ref f, ref args) => { ast::ExprCall(ref f, ref args) => {
@ -893,7 +893,7 @@ fn trans_def_dps_unadjusted<'a>(
_ => { _ => {
bcx.tcx().sess.span_bug(ref_expr.span, format!( bcx.tcx().sess.span_bug(ref_expr.span, format!(
"Non-DPS def {:?} referened by {}", "Non-DPS def {:?} referened by {}",
def, bcx.node_id_to_str(ref_expr.id)).as_slice()); def, bcx.node_id_to_string(ref_expr.id)).as_slice());
} }
} }
} }
@ -974,7 +974,7 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
} }
}; };
debug!("take_local(nid={:?}, v={}, ty={})", debug!("take_local(nid={:?}, v={}, ty={})",
nid, bcx.val_to_str(datum.val), bcx.ty_to_str(datum.ty)); nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
datum datum
} }
} }
@ -1462,13 +1462,13 @@ fn trans_binary<'a>(bcx: &'a Block<'a>,
debug!("trans_binary (expr {}): lhs_datum={}", debug!("trans_binary (expr {}): lhs_datum={}",
expr.id, expr.id,
lhs_datum.to_str(ccx)); lhs_datum.to_string(ccx));
let lhs_ty = lhs_datum.ty; let lhs_ty = lhs_datum.ty;
let lhs = lhs_datum.to_llscalarish(bcx); let lhs = lhs_datum.to_llscalarish(bcx);
debug!("trans_binary (expr {}): rhs_datum={}", debug!("trans_binary (expr {}): rhs_datum={}",
expr.id, expr.id,
rhs_datum.to_str(ccx)); rhs_datum.to_string(ccx));
let rhs_ty = rhs_datum.ty; let rhs_ty = rhs_datum.ty;
let rhs = rhs_datum.to_llscalarish(bcx); let rhs = rhs_datum.to_llscalarish(bcx);
trans_eager_binop(bcx, expr, binop_ty, op, trans_eager_binop(bcx, expr, binop_ty, op,
@ -1729,7 +1729,7 @@ fn trans_assign_op<'a>(
let _icx = push_ctxt("trans_assign_op"); let _icx = push_ctxt("trans_assign_op");
let mut bcx = bcx; let mut bcx = bcx;
debug!("trans_assign_op(expr={})", bcx.expr_to_str(expr)); debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
// User-defined operator methods cannot be used with `+=` etc right now // User-defined operator methods cannot be used with `+=` etc right now
assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id))); assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
@ -1799,7 +1799,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
debug!("deref_once(expr={}, datum={}, method_call={})", debug!("deref_once(expr={}, datum={}, method_call={})",
expr.repr(bcx.tcx()), expr.repr(bcx.tcx()),
datum.to_str(ccx), datum.to_string(ccx),
method_call); method_call);
let mut bcx = bcx; let mut bcx = bcx;
@ -1877,7 +1877,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
}; };
debug!("deref_once(expr={}, method_call={}, result={})", debug!("deref_once(expr={}, method_call={}, result={})",
expr.id, method_call, r.datum.to_str(ccx)); expr.id, method_call, r.datum.to_string(ccx));
return r; return r;

View file

@ -266,8 +266,8 @@ pub fn trans_native_call<'a>(
llfn={}, \ llfn={}, \
llretptr={})", llretptr={})",
callee_ty.repr(tcx), callee_ty.repr(tcx),
ccx.tn.val_to_str(llfn), ccx.tn.val_to_string(llfn),
ccx.tn.val_to_str(llretptr)); ccx.tn.val_to_string(llretptr));
let (fn_abi, fn_sig) = match ty::get(callee_ty).sty { let (fn_abi, fn_sig) = match ty::get(callee_ty).sty {
ty::ty_bare_fn(ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()), ty::ty_bare_fn(ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()),
@ -314,9 +314,9 @@ pub fn trans_native_call<'a>(
debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}", debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}",
i, i,
ccx.tn.val_to_str(llarg_rust), ccx.tn.val_to_string(llarg_rust),
rust_indirect, rust_indirect,
ccx.tn.type_to_str(arg_tys[i].ty)); ccx.tn.type_to_string(arg_tys[i].ty));
// Ensure that we always have the Rust value indirectly, // Ensure that we always have the Rust value indirectly,
// because it makes bitcasting easier. // because it makes bitcasting easier.
@ -330,7 +330,7 @@ pub fn trans_native_call<'a>(
} }
debug!("llarg_rust={} (after indirection)", debug!("llarg_rust={} (after indirection)",
ccx.tn.val_to_str(llarg_rust)); ccx.tn.val_to_string(llarg_rust));
// Check whether we need to do any casting // Check whether we need to do any casting
match arg_tys[i].cast { match arg_tys[i].cast {
@ -339,7 +339,7 @@ pub fn trans_native_call<'a>(
} }
debug!("llarg_rust={} (after casting)", debug!("llarg_rust={} (after casting)",
ccx.tn.val_to_str(llarg_rust)); ccx.tn.val_to_string(llarg_rust));
// Finally, load the value if needed for the foreign ABI // Finally, load the value if needed for the foreign ABI
let foreign_indirect = arg_tys[i].is_indirect(); let foreign_indirect = arg_tys[i].is_indirect();
@ -355,7 +355,7 @@ pub fn trans_native_call<'a>(
}; };
debug!("argument {}, llarg_foreign={}", debug!("argument {}, llarg_foreign={}",
i, ccx.tn.val_to_str(llarg_foreign)); i, ccx.tn.val_to_string(llarg_foreign));
// fill padding with undef value // fill padding with undef value
match arg_tys[i].pad { match arg_tys[i].pad {
@ -430,10 +430,10 @@ pub fn trans_native_call<'a>(
None => fn_type.ret_ty.ty None => fn_type.ret_ty.ty
}; };
debug!("llretptr={}", ccx.tn.val_to_str(llretptr)); debug!("llretptr={}", ccx.tn.val_to_string(llretptr));
debug!("llforeign_retval={}", ccx.tn.val_to_str(llforeign_retval)); debug!("llforeign_retval={}", ccx.tn.val_to_string(llforeign_retval));
debug!("llrust_ret_ty={}", ccx.tn.type_to_str(llrust_ret_ty)); debug!("llrust_ret_ty={}", ccx.tn.type_to_string(llrust_ret_ty));
debug!("llforeign_ret_ty={}", ccx.tn.type_to_str(llforeign_ret_ty)); debug!("llforeign_ret_ty={}", ccx.tn.type_to_string(llforeign_ret_ty));
if llrust_ret_ty == llforeign_ret_ty { if llrust_ret_ty == llforeign_ret_ty {
base::store_ty(bcx, llforeign_retval, llretptr, fn_sig.output) base::store_ty(bcx, llforeign_retval, llretptr, fn_sig.output)
@ -538,7 +538,7 @@ pub fn register_rust_fn_with_foreign_abi(ccx: &CrateContext,
let llfn = base::register_fn_llvmty(ccx, sp, sym, node_id, cconv, llfn_ty); let llfn = base::register_fn_llvmty(ccx, sp, sym, node_id, cconv, llfn_ty);
add_argument_attributes(&tys, llfn); add_argument_attributes(&tys, llfn);
debug!("register_rust_fn_with_foreign_abi(node_id={:?}, llfn_ty={}, llfn={})", debug!("register_rust_fn_with_foreign_abi(node_id={:?}, llfn_ty={}, llfn={})",
node_id, ccx.tn.type_to_str(llfn_ty), ccx.tn.val_to_str(llfn)); node_id, ccx.tn.type_to_string(llfn_ty), ccx.tn.val_to_string(llfn));
llfn llfn
} }
@ -583,13 +583,13 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
_ => { _ => {
ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \
expected a bare fn ty", expected a bare fn ty",
ccx.tcx.map.path_to_str(id), ccx.tcx.map.path_to_string(id),
t.repr(tcx)).as_slice()); t.repr(tcx)).as_slice());
} }
}; };
debug!("build_rust_fn: path={} id={} t={}", debug!("build_rust_fn: path={} id={} t={}",
ccx.tcx.map.path_to_str(id), ccx.tcx.map.path_to_string(id),
id, t.repr(tcx)); id, t.repr(tcx));
let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_slice()); let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_slice());
@ -610,8 +610,8 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
let t = ty::node_id_to_type(tcx, id); let t = ty::node_id_to_type(tcx, id);
debug!("build_wrap_fn(llrustfn={}, llwrapfn={}, t={})", debug!("build_wrap_fn(llrustfn={}, llwrapfn={}, t={})",
ccx.tn.val_to_str(llrustfn), ccx.tn.val_to_string(llrustfn),
ccx.tn.val_to_str(llwrapfn), ccx.tn.val_to_string(llwrapfn),
t.repr(ccx.tcx())); t.repr(ccx.tcx()));
// Avoid all the Rust generation stuff and just generate raw // Avoid all the Rust generation stuff and just generate raw
@ -668,11 +668,11 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
match foreign_outptr { match foreign_outptr {
Some(llforeign_outptr) => { Some(llforeign_outptr) => {
debug!("out pointer, foreign={}", debug!("out pointer, foreign={}",
ccx.tn.val_to_str(llforeign_outptr)); ccx.tn.val_to_string(llforeign_outptr));
let llrust_retptr = let llrust_retptr =
builder.bitcast(llforeign_outptr, llrust_ret_ty.ptr_to()); builder.bitcast(llforeign_outptr, llrust_ret_ty.ptr_to());
debug!("out pointer, foreign={} (casted)", debug!("out pointer, foreign={} (casted)",
ccx.tn.val_to_str(llrust_retptr)); ccx.tn.val_to_string(llrust_retptr));
llrust_args.push(llrust_retptr); llrust_args.push(llrust_retptr);
return_alloca = None; return_alloca = None;
} }
@ -683,8 +683,8 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
allocad={}, \ allocad={}, \
llrust_ret_ty={}, \ llrust_ret_ty={}, \
return_ty={}", return_ty={}",
ccx.tn.val_to_str(slot), ccx.tn.val_to_string(slot),
ccx.tn.type_to_str(llrust_ret_ty), ccx.tn.type_to_string(llrust_ret_ty),
tys.fn_sig.output.repr(tcx)); tys.fn_sig.output.repr(tcx));
llrust_args.push(slot); llrust_args.push(slot);
return_alloca = Some(slot); return_alloca = Some(slot);
@ -712,7 +712,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
let mut llforeign_arg = llvm::LLVMGetParam(llwrapfn, foreign_index); let mut llforeign_arg = llvm::LLVMGetParam(llwrapfn, foreign_index);
debug!("llforeign_arg {}{}: {}", "#", debug!("llforeign_arg {}{}: {}", "#",
i, ccx.tn.val_to_str(llforeign_arg)); i, ccx.tn.val_to_string(llforeign_arg));
debug!("rust_indirect = {}, foreign_indirect = {}", debug!("rust_indirect = {}, foreign_indirect = {}",
rust_indirect, foreign_indirect); rust_indirect, foreign_indirect);
@ -751,12 +751,12 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
}; };
debug!("llrust_arg {}{}: {}", "#", debug!("llrust_arg {}{}: {}", "#",
i, ccx.tn.val_to_str(llrust_arg)); i, ccx.tn.val_to_string(llrust_arg));
llrust_args.push(llrust_arg); llrust_args.push(llrust_arg);
} }
// Perform the call itself // Perform the call itself
debug!("calling llrustfn = {}, t = {}", ccx.tn.val_to_str(llrustfn), t.repr(ccx.tcx())); debug!("calling llrustfn = {}, t = {}", ccx.tn.val_to_string(llrustfn), t.repr(ccx.tcx()));
let attributes = base::get_fn_llvm_attributes(ccx, t); let attributes = base::get_fn_llvm_attributes(ccx, t);
let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), attributes.as_slice()); let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), attributes.as_slice());
@ -876,9 +876,9 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
ret_def={}", ret_def={}",
ty.repr(ccx.tcx()), ty.repr(ccx.tcx()),
ccx.tn.types_to_str(llsig.llarg_tys.as_slice()), ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
ccx.tn.type_to_str(llsig.llret_ty), ccx.tn.type_to_string(llsig.llret_ty),
ccx.tn.types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()), ccx.tn.types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
ccx.tn.type_to_str(fn_ty.ret_ty.ty), ccx.tn.type_to_string(fn_ty.ret_ty.ty),
ret_def); ret_def);
ForeignTypes { ForeignTypes {

View file

@ -167,11 +167,11 @@ pub fn lazily_emit_visit_glue(ccx: &CrateContext, ti: &tydesc_info) -> ValueRef
match ti.visit_glue.get() { match ti.visit_glue.get() {
Some(visit_glue) => visit_glue, Some(visit_glue) => visit_glue,
None => { None => {
debug!("+++ lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_str(ccx.tcx(), ti.ty)); debug!("+++ lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_string(ccx.tcx(), ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "visit"); let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "visit");
ti.visit_glue.set(Some(glue_fn)); ti.visit_glue.set(Some(glue_fn));
make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, "visit"); make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, "visit");
debug!("--- lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_str(ccx.tcx(), ti.ty)); debug!("--- lazily_emit_tydesc_glue VISIT {}", ppaux::ty_to_string(ccx.tcx(), ti.ty));
glue_fn glue_fn
} }
} }
@ -432,13 +432,13 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info {
if ccx.sess().count_type_sizes() { if ccx.sess().count_type_sizes() {
println!("{}\t{}", llsize_of_real(ccx, llty), println!("{}\t{}", llsize_of_real(ccx, llty),
ppaux::ty_to_str(ccx.tcx(), t)); ppaux::ty_to_string(ccx.tcx(), t));
} }
let llsize = llsize_of(ccx, llty); let llsize = llsize_of(ccx, llty);
let llalign = llalign_of(ccx, llty); let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx(), t), name); debug!("+++ declare_tydesc {} {}", ppaux::ty_to_string(ccx.tcx(), t), name);
let gvar = name.as_slice().with_c_str(|buf| { let gvar = name.as_slice().with_c_str(|buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf)
@ -447,10 +447,10 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info {
note_unique_llvm_symbol(ccx, name); note_unique_llvm_symbol(ccx, name);
let ty_name = token::intern_and_get_ident( let ty_name = token::intern_and_get_ident(
ppaux::ty_to_str(ccx.tcx(), t).as_slice()); ppaux::ty_to_string(ccx.tcx(), t).as_slice());
let ty_name = C_str_slice(ccx, ty_name); let ty_name = C_str_slice(ccx, ty_name);
debug!("--- declare_tydesc {}", ppaux::ty_to_str(ccx.tcx(), t)); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t));
tydesc_info { tydesc_info {
ty: t, ty: t,
tydesc: gvar, tydesc: gvar,
@ -468,7 +468,7 @@ fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type,
ccx, ccx,
t, t,
format!("glue_{}", name).as_slice()); format!("glue_{}", name).as_slice());
debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx(), t)); debug!("{} is for type {}", fn_nm, ppaux::ty_to_string(ccx.tcx(), t));
let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil()); let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil());
note_unique_llvm_symbol(ccx, fn_nm); note_unique_llvm_symbol(ccx, fn_nm);
return llfn; return llfn;

View file

@ -29,7 +29,7 @@ use middle::ty;
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
use syntax::parse::token; use syntax::parse::token;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
pub fn get_simple_intrinsic(ccx: &CrateContext, item: &ast::ForeignItem) -> Option<ValueRef> { pub fn get_simple_intrinsic(ccx: &CrateContext, item: &ast::ForeignItem) -> Option<ValueRef> {
let name = match token::get_ident(item.ident).get() { let name = match token::get_ident(item.ident).get() {
@ -398,10 +398,10 @@ pub fn trans_intrinsic(ccx: &CrateContext,
format!("transmute called on types with different sizes: \ format!("transmute called on types with different sizes: \
{} ({} bit{}) to \ {} ({} bit{}) to \
{} ({} bit{})", {} ({} bit{})",
ty_to_str(ccx.tcx(), in_type), ty_to_string(ccx.tcx(), in_type),
in_type_size, in_type_size,
if in_type_size == 1 {""} else {"s"}, if in_type_size == 1 {""} else {"s"},
ty_to_str(ccx.tcx(), out_type), ty_to_string(ccx.tcx(), out_type),
out_type_size, out_type_size,
if out_type_size == 1 {""} else {"s"}).as_slice()); if out_type_size == 1 {""} else {"s"}).as_slice());
} }
@ -587,14 +587,14 @@ pub fn check_intrinsics(ccx: &CrateContext) {
.span_err(transmute_restriction.span, .span_err(transmute_restriction.span,
format!("transmute called on types with different sizes: \ format!("transmute called on types with different sizes: \
{} ({} bit{}) to {} ({} bit{})", {} ({} bit{}) to {} ({} bit{})",
ty_to_str(ccx.tcx(), transmute_restriction.from), ty_to_string(ccx.tcx(), transmute_restriction.from),
from_type_size as uint, from_type_size as uint,
if from_type_size == 1 { if from_type_size == 1 {
"" ""
} else { } else {
"s" "s"
}, },
ty_to_str(ccx.tcx(), transmute_restriction.to), ty_to_string(ccx.tcx(), transmute_restriction.to),
to_type_size as uint, to_type_size as uint,
if to_type_size == 1 { if to_type_size == 1 {
"" ""

View file

@ -25,13 +25,13 @@ impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
impl LlvmRepr for Type { impl LlvmRepr for Type {
fn llrepr(&self, ccx: &CrateContext) -> String { fn llrepr(&self, ccx: &CrateContext) -> String {
ccx.tn.type_to_str(*self) ccx.tn.type_to_string(*self)
} }
} }
impl LlvmRepr for ValueRef { impl LlvmRepr for ValueRef {
fn llrepr(&self, ccx: &CrateContext) -> String { fn llrepr(&self, ccx: &CrateContext) -> String {
ccx.tn.val_to_str(*self) ccx.tn.val_to_string(*self)
} }
} }

View file

@ -23,7 +23,7 @@ use middle::trans::meth;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use middle::trans::type_of::*; use middle::trans::type_of::*;
use middle::ty; use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
use std::rc::Rc; use std::rc::Rc;
use arena::TypedArena; use arena::TypedArena;
@ -98,7 +98,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
debug!("passing {} args:", args.len()); debug!("passing {} args:", args.len());
let mut bcx = self.bcx; let mut bcx = self.bcx;
for (i, a) in args.iter().enumerate() { for (i, a) in args.iter().enumerate() {
debug!("arg {}: {}", i, bcx.val_to_str(*a)); debug!("arg {}: {}", i, bcx.val_to_string(*a));
} }
let result = unpack_result!(bcx, callee::trans_call_inner( let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty, self.bcx, None, mth_ty,
@ -129,7 +129,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
pub fn visit_ty(&mut self, t: ty::t) { pub fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx; let bcx = self.bcx;
let tcx = bcx.tcx(); let tcx = bcx.tcx();
debug!("reflect::visit_ty {}", ty_to_str(bcx.tcx(), t)); debug!("reflect::visit_ty {}", ty_to_string(bcx.tcx(), t));
match ty::get(t).sty { match ty::get(t).sty {
ty::ty_bot => self.leaf("bot"), ty::ty_bot => self.leaf("bot"),
@ -175,7 +175,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_trait(..) => { ty::ty_trait(..) => {
let extra = [ let extra = [
self.c_slice(token::intern_and_get_ident( self.c_slice(token::intern_and_get_ident(
ty_to_str(tcx, t).as_slice())) ty_to_string(tcx, t).as_slice()))
]; ];
self.visit("trait", extra); self.visit("trait", extra);
} }
@ -204,7 +204,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_trait(..) => { ty::ty_trait(..) => {
let extra = [ let extra = [
self.c_slice(token::intern_and_get_ident( self.c_slice(token::intern_and_get_ident(
ty_to_str(tcx, t).as_slice())) ty_to_string(tcx, t).as_slice()))
]; ];
self.visit("trait", extra); self.visit("trait", extra);
} }
@ -269,7 +269,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
let extra = (vec!( let extra = (vec!(
self.c_slice( self.c_slice(
token::intern_and_get_ident(ty_to_str(tcx, token::intern_and_get_ident(ty_to_string(tcx,
t).as_slice())), t).as_slice())),
self.c_bool(named_fields), self.c_bool(named_fields),
self.c_uint(fields.len()) self.c_uint(fields.len())

View file

@ -29,7 +29,7 @@ use middle::trans::machine::{llsize_of, nonzero_llsize_of, llsize_of_alloc};
use middle::trans::type_::Type; use middle::trans::type_::Type;
use middle::trans::type_of; use middle::trans::type_of;
use middle::ty; use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_string;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
@ -73,12 +73,12 @@ pub struct VecTypes {
} }
impl VecTypes { impl VecTypes {
pub fn to_str(&self, ccx: &CrateContext) -> String { pub fn to_string(&self, ccx: &CrateContext) -> String {
format!("VecTypes {{unit_ty={}, llunit_ty={}, \ format!("VecTypes {{unit_ty={}, llunit_ty={}, \
llunit_size={}, llunit_alloc_size={}}}", llunit_size={}, llunit_alloc_size={}}}",
ty_to_str(ccx.tcx(), self.unit_ty), ty_to_string(ccx.tcx(), self.unit_ty),
ccx.tn.type_to_str(self.llunit_ty), ccx.tn.type_to_string(self.llunit_ty),
ccx.tn.val_to_str(self.llunit_size), ccx.tn.val_to_string(self.llunit_size),
self.llunit_alloc_size) self.llunit_alloc_size)
} }
} }
@ -97,7 +97,7 @@ pub fn trans_fixed_vstore<'a>(
// generate the content. // generate the content.
debug!("trans_fixed_vstore(vstore_expr={}, dest={:?})", debug!("trans_fixed_vstore(vstore_expr={}, dest={:?})",
bcx.expr_to_str(vstore_expr), dest.to_str(bcx.ccx())); bcx.expr_to_string(vstore_expr), dest.to_string(bcx.ccx()));
let vt = vec_types_from_expr(bcx, vstore_expr); let vt = vec_types_from_expr(bcx, vstore_expr);
@ -129,7 +129,7 @@ pub fn trans_slice_vstore<'a>(
let mut bcx = bcx; let mut bcx = bcx;
debug!("trans_slice_vstore(vstore_expr={}, dest={})", debug!("trans_slice_vstore(vstore_expr={}, dest={})",
bcx.expr_to_str(vstore_expr), dest.to_str(ccx)); bcx.expr_to_string(vstore_expr), dest.to_string(ccx));
// Handle the &"..." case: // Handle the &"..." case:
match content_expr.node { match content_expr.node {
@ -150,7 +150,7 @@ pub fn trans_slice_vstore<'a>(
// Handle the &[...] case: // Handle the &[...] case:
let vt = vec_types_from_expr(bcx, vstore_expr); let vt = vec_types_from_expr(bcx, vstore_expr);
let count = elements_required(bcx, content_expr); let count = elements_required(bcx, content_expr);
debug!("vt={}, count={:?}", vt.to_str(ccx), count); debug!("vt={}, count={:?}", vt.to_string(ccx), count);
let llcount = C_uint(ccx, count); let llcount = C_uint(ccx, count);
let llfixed; let llfixed;
@ -202,8 +202,8 @@ pub fn trans_lit_str<'a>(
*/ */
debug!("trans_lit_str(lit_expr={}, dest={})", debug!("trans_lit_str(lit_expr={}, dest={})",
bcx.expr_to_str(lit_expr), bcx.expr_to_string(lit_expr),
dest.to_str(bcx.ccx())); dest.to_string(bcx.ccx()));
match dest { match dest {
Ignore => bcx, Ignore => bcx,
@ -233,7 +233,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
* the array elements into them. * the array elements into them.
*/ */
debug!("trans_uniq_vstore(vstore_expr={})", bcx.expr_to_str(vstore_expr)); debug!("trans_uniq_vstore(vstore_expr={})", bcx.expr_to_string(vstore_expr));
let fcx = bcx.fcx; let fcx = bcx.fcx;
let ccx = fcx.ccx; let ccx = fcx.ccx;
@ -297,7 +297,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
let dataptr = get_dataptr(bcx, val); let dataptr = get_dataptr(bcx, val);
debug!("alloc_uniq_vec() returned val={}, dataptr={}", debug!("alloc_uniq_vec() returned val={}, dataptr={}",
bcx.val_to_str(val), bcx.val_to_str(dataptr)); bcx.val_to_string(val), bcx.val_to_string(dataptr));
let bcx = write_content(bcx, &vt, vstore_expr, let bcx = write_content(bcx, &vt, vstore_expr,
content_expr, SaveIn(dataptr)); content_expr, SaveIn(dataptr));
@ -319,9 +319,9 @@ pub fn write_content<'a>(
let mut bcx = bcx; let mut bcx = bcx;
debug!("write_content(vt={}, dest={}, vstore_expr={:?})", debug!("write_content(vt={}, dest={}, vstore_expr={:?})",
vt.to_str(bcx.ccx()), vt.to_string(bcx.ccx()),
dest.to_str(bcx.ccx()), dest.to_string(bcx.ccx()),
bcx.expr_to_str(vstore_expr)); bcx.expr_to_string(vstore_expr));
match content_expr.node { match content_expr.node {
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
@ -361,7 +361,7 @@ pub fn write_content<'a>(
for (i, element) in elements.iter().enumerate() { for (i, element) in elements.iter().enumerate() {
let lleltptr = GEPi(bcx, lldest, [i]); let lleltptr = GEPi(bcx, lldest, [i]);
debug!("writing index {:?} with lleltptr={:?}", debug!("writing index {:?} with lleltptr={:?}",
i, bcx.val_to_str(lleltptr)); i, bcx.val_to_string(lleltptr));
bcx = expr::trans_into(bcx, &**element, bcx = expr::trans_into(bcx, &**element,
SaveIn(lleltptr)); SaveIn(lleltptr));
fcx.schedule_drop_mem( fcx.schedule_drop_mem(

View file

@ -199,7 +199,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
t, t,
t_norm.repr(cx.tcx()), t_norm.repr(cx.tcx()),
t_norm, t_norm,
cx.tn.type_to_str(llty)); cx.tn.type_to_string(llty));
cx.lltypes.borrow_mut().insert(t, llty); cx.lltypes.borrow_mut().insert(t, llty);
return llty; return llty;
} }
@ -291,7 +291,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
debug!("--> mapped t={} {:?} to llty={}", debug!("--> mapped t={} {:?} to llty={}",
t.repr(cx.tcx()), t.repr(cx.tcx()),
t, t,
cx.tn.type_to_str(llty)); cx.tn.type_to_string(llty));
cx.lltypes.borrow_mut().insert(t, llty); cx.lltypes.borrow_mut().insert(t, llty);

View file

@ -32,8 +32,8 @@ use middle::typeck::MethodCall;
use middle::ty_fold; use middle::ty_fold;
use middle::ty_fold::{TypeFoldable,TypeFolder}; use middle::ty_fold::{TypeFoldable,TypeFolder};
use middle; use middle;
use util::ppaux::{note_and_explain_region, bound_region_ptr_to_str}; use util::ppaux::{note_and_explain_region, bound_region_ptr_to_string};
use util::ppaux::{trait_store_to_str, ty_to_str}; use util::ppaux::{trait_store_to_string, ty_to_string};
use util::ppaux::{Repr, UserString}; use util::ppaux::{Repr, UserString};
use util::common::{indenter}; use util::common::{indenter};
use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet, FnvHashMap}; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet, FnvHashMap};
@ -2243,8 +2243,8 @@ pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool {
fn type_requires(cx: &ctxt, seen: &mut Vec<DefId>, fn type_requires(cx: &ctxt, seen: &mut Vec<DefId>,
r_ty: t, ty: t) -> bool { r_ty: t, ty: t) -> bool {
debug!("type_requires({}, {})?", debug!("type_requires({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty)); ::util::ppaux::ty_to_string(cx, ty));
let r = { let r = {
get(r_ty).sty == get(ty).sty || get(r_ty).sty == get(ty).sty ||
@ -2252,8 +2252,8 @@ pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool {
}; };
debug!("type_requires({}, {})? {}", debug!("type_requires({}, {})? {}",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty), ::util::ppaux::ty_to_string(cx, ty),
r); r);
return r; return r;
} }
@ -2261,8 +2261,8 @@ pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool {
fn subtypes_require(cx: &ctxt, seen: &mut Vec<DefId>, fn subtypes_require(cx: &ctxt, seen: &mut Vec<DefId>,
r_ty: t, ty: t) -> bool { r_ty: t, ty: t) -> bool {
debug!("subtypes_require({}, {})?", debug!("subtypes_require({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty)); ::util::ppaux::ty_to_string(cx, ty));
let r = match get(ty).sty { let r = match get(ty).sty {
// fixed length vectors need special treatment compared to // fixed length vectors need special treatment compared to
@ -2337,8 +2337,8 @@ pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool {
}; };
debug!("subtypes_require({}, {})? {}", debug!("subtypes_require({}, {})? {}",
::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty), ::util::ppaux::ty_to_string(cx, ty),
r); r);
return r; return r;
@ -2381,7 +2381,7 @@ pub fn is_type_representable(cx: &ctxt, sp: Span, ty: t) -> Representability {
fn type_structurally_recursive(cx: &ctxt, sp: Span, seen: &mut Vec<DefId>, fn type_structurally_recursive(cx: &ctxt, sp: Span, seen: &mut Vec<DefId>,
ty: t) -> Representability { ty: t) -> Representability {
debug!("type_structurally_recursive: {}", debug!("type_structurally_recursive: {}",
::util::ppaux::ty_to_str(cx, ty)); ::util::ppaux::ty_to_string(cx, ty));
// Compare current type to previously seen types // Compare current type to previously seen types
match get(ty).sty { match get(ty).sty {
@ -2441,7 +2441,7 @@ pub fn is_type_representable(cx: &ctxt, sp: Span, ty: t) -> Representability {
} }
debug!("is_type_representable: {}", debug!("is_type_representable: {}",
::util::ppaux::ty_to_str(cx, ty)); ::util::ppaux::ty_to_string(cx, ty));
// To avoid a stack overflow when checking an enum variant or struct that // To avoid a stack overflow when checking an enum variant or struct that
// contains a different, structurally recursive type, maintain a stack // contains a different, structurally recursive type, maintain a stack
@ -2595,7 +2595,7 @@ pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> Rc<ty::TraitRef> {
Some(t) => t.clone(), Some(t) => t.clone(),
None => cx.sess.bug( None => cx.sess.bug(
format!("node_id_to_trait_ref: no trait ref for node `{}`", format!("node_id_to_trait_ref: no trait ref for node `{}`",
cx.map.node_to_str(id)).as_slice()) cx.map.node_to_string(id)).as_slice())
} }
} }
@ -2608,7 +2608,7 @@ pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t {
Some(t) => t, Some(t) => t,
None => cx.sess.bug( None => cx.sess.bug(
format!("node_id_to_type: no type for node `{}`", format!("node_id_to_type: no type for node `{}`",
cx.map.node_to_str(id)).as_slice()) cx.map.node_to_string(id)).as_slice())
} }
} }
@ -2842,7 +2842,7 @@ pub fn adjust_ty(cx: &ctxt,
format!("the {}th autoderef failed: \ format!("the {}th autoderef failed: \
{}", {}",
i, i,
ty_to_str(cx, adjusted_ty)) ty_to_string(cx, adjusted_ty))
.as_slice()); .as_slice());
} }
} }
@ -3220,11 +3220,11 @@ pub fn param_tys_in_type(ty: t) -> Vec<ParamTy> {
rslt rslt
} }
pub fn ty_sort_str(cx: &ctxt, t: t) -> String { pub fn ty_sort_string(cx: &ctxt, t: t) -> String {
match get(t).sty { match get(t).sty {
ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) | ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) |
ty_uint(_) | ty_float(_) | ty_str => { ty_uint(_) | ty_float(_) | ty_str => {
::util::ppaux::ty_to_str(cx, t) ::util::ppaux::ty_to_string(cx, t)
} }
ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)), ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)),
@ -3277,18 +3277,18 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
terr_mismatch => "types differ".to_string(), terr_mismatch => "types differ".to_string(),
terr_fn_style_mismatch(values) => { terr_fn_style_mismatch(values) => {
format!("expected {} fn but found {} fn", format!("expected {} fn but found {} fn",
values.expected.to_str(), values.expected.to_string(),
values.found.to_str()) values.found.to_string())
} }
terr_abi_mismatch(values) => { terr_abi_mismatch(values) => {
format!("expected {} fn but found {} fn", format!("expected {} fn but found {} fn",
values.expected.to_str(), values.expected.to_string(),
values.found.to_str()) values.found.to_string())
} }
terr_onceness_mismatch(values) => { terr_onceness_mismatch(values) => {
format!("expected {} fn but found {} fn", format!("expected {} fn but found {} fn",
values.expected.to_str(), values.expected.to_string(),
values.found.to_str()) values.found.to_string())
} }
terr_sigil_mismatch(values) => { terr_sigil_mismatch(values) => {
format!("expected {}, found {}", format!("expected {}, found {}",
@ -3344,22 +3344,22 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
terr_regions_insufficiently_polymorphic(br, _) => { terr_regions_insufficiently_polymorphic(br, _) => {
format!("expected bound lifetime parameter {}, \ format!("expected bound lifetime parameter {}, \
but found concrete lifetime", but found concrete lifetime",
bound_region_ptr_to_str(cx, br)) bound_region_ptr_to_string(cx, br))
} }
terr_regions_overly_polymorphic(br, _) => { terr_regions_overly_polymorphic(br, _) => {
format!("expected concrete lifetime, \ format!("expected concrete lifetime, \
but found bound lifetime parameter {}", but found bound lifetime parameter {}",
bound_region_ptr_to_str(cx, br)) bound_region_ptr_to_string(cx, br))
} }
terr_trait_stores_differ(_, ref values) => { terr_trait_stores_differ(_, ref values) => {
format!("trait storage differs: expected `{}` but found `{}`", format!("trait storage differs: expected `{}` but found `{}`",
trait_store_to_str(cx, (*values).expected), trait_store_to_string(cx, (*values).expected),
trait_store_to_str(cx, (*values).found)) trait_store_to_string(cx, (*values).found))
} }
terr_sorts(values) => { terr_sorts(values) => {
format!("expected {} but found {}", format!("expected {} but found {}",
ty_sort_str(cx, values.expected), ty_sort_string(cx, values.expected),
ty_sort_str(cx, values.found)) ty_sort_string(cx, values.found))
} }
terr_traits(values) => { terr_traits(values) => {
format!("expected trait `{}` but found trait `{}`", format!("expected trait `{}` but found trait `{}`",
@ -3384,13 +3384,13 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
} }
terr_int_mismatch(ref values) => { terr_int_mismatch(ref values) => {
format!("expected `{}` but found `{}`", format!("expected `{}` but found `{}`",
values.expected.to_str(), values.expected.to_string(),
values.found.to_str()) values.found.to_string())
} }
terr_float_mismatch(ref values) => { terr_float_mismatch(ref values) => {
format!("expected `{}` but found `{}`", format!("expected `{}` but found `{}`",
values.expected.to_str(), values.expected.to_string(),
values.found.to_str()) values.found.to_string())
} }
terr_variadic_mismatch(ref values) => { terr_variadic_mismatch(ref values) => {
format!("expected {} fn but found {} function", format!("expected {} fn but found {} function",
@ -3701,7 +3701,7 @@ pub fn substd_enum_variants(cx: &ctxt,
} }
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> String { pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> String {
with_path(cx, id, |path| ast_map::path_to_str(path)).to_string() with_path(cx, id, |path| ast_map::path_to_string(path)).to_string()
} }
pub enum DtorKind { pub enum DtorKind {
@ -3973,7 +3973,7 @@ fn each_super_struct(cx: &ctxt, mut did: ast::DefId, f: |ast::DefId|) {
None => { None => {
cx.sess.bug( cx.sess.bug(
format!("ID not mapped to super-struct: {}", format!("ID not mapped to super-struct: {}",
cx.map.node_to_str(did.node)).as_slice()); cx.map.node_to_string(did.node)).as_slice());
} }
} }
} }
@ -3995,7 +3995,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
_ => { _ => {
cx.sess.bug( cx.sess.bug(
format!("ID not mapped to struct fields: {}", format!("ID not mapped to struct fields: {}",
cx.map.node_to_str(did.node)).as_slice()); cx.map.node_to_string(did.node)).as_slice());
} }
} }
}); });
@ -4621,7 +4621,7 @@ pub fn hash_crate_independent(tcx: &ctxt, t: t, svh: &Svh) -> u64 {
} }
impl Variance { impl Variance {
pub fn to_str(self) -> &'static str { pub fn to_string(self) -> &'static str {
match self { match self {
Covariant => "+", Covariant => "+",
Contravariant => "-", Contravariant => "-",

View file

@ -66,7 +66,7 @@ use syntax::abi;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::print::pprust::{lifetime_to_str, path_to_str}; use syntax::print::pprust::{lifetime_to_string, path_to_string};
pub trait AstConv { pub trait AstConv {
fn tcx<'a>(&'a self) -> &'a ty::ctxt; fn tcx<'a>(&'a self) -> &'a ty::ctxt;
@ -108,7 +108,7 @@ pub fn ast_region_to_region(tcx: &ty::ctxt, lifetime: &ast::Lifetime)
}; };
debug!("ast_region_to_region(lifetime={} id={}) yields {}", debug!("ast_region_to_region(lifetime={} id={}) yields {}",
lifetime_to_str(lifetime), lifetime_to_string(lifetime),
lifetime.id, r.repr(tcx)); lifetime.id, r.repr(tcx));
r r
@ -142,7 +142,7 @@ pub fn opt_ast_region_to_region<AC:AstConv,RS:RegionScope>(
}; };
debug!("opt_ast_region_to_region(opt_lifetime={:?}) yields {}", debug!("opt_ast_region_to_region(opt_lifetime={:?}) yields {}",
opt_lifetime.as_ref().map(|e| lifetime_to_str(e)), opt_lifetime.as_ref().map(|e| lifetime_to_string(e)),
r.repr(this.tcx())); r.repr(this.tcx()));
r r
@ -331,7 +331,7 @@ pub fn ast_ty_to_prim_ty(tcx: &ty::ctxt, ast_ty: &ast::Ty) -> Option<ty::t> {
None => { None => {
tcx.sess.span_bug(ast_ty.span, tcx.sess.span_bug(ast_ty.span,
format!("unbound path {}", format!("unbound path {}",
path_to_str(path)).as_slice()) path_to_string(path)).as_slice())
} }
Some(&d) => d Some(&d) => d
}; };
@ -394,7 +394,7 @@ pub fn ast_ty_to_builtin_ty<AC:AstConv,
.sess .sess
.span_bug(ast_ty.span, .span_bug(ast_ty.span,
format!("unbound path {}", format!("unbound path {}",
path_to_str(path)).as_slice()) path_to_string(path)).as_slice())
} }
Some(&d) => d Some(&d) => d
}; };
@ -793,7 +793,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
tcx.sess tcx.sess
.span_bug(ast_ty.span, .span_bug(ast_ty.span,
format!("unbound path {}", format!("unbound path {}",
path_to_str(path)).as_slice()) path_to_string(path)).as_slice())
} }
Some(&d) => d Some(&d) => d
}; };
@ -808,7 +808,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
} }
match a_def { match a_def {
def::DefTrait(_) => { def::DefTrait(_) => {
let path_str = path_to_str(path); let path_str = path_to_string(path);
tcx.sess.span_err( tcx.sess.span_err(
ast_ty.span, ast_ty.span,
format!("reference to trait `{name}` where a \ format!("reference to trait `{name}` where a \
@ -835,7 +835,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
def::DefMod(id) => { def::DefMod(id) => {
tcx.sess.span_fatal(ast_ty.span, tcx.sess.span_fatal(ast_ty.span,
format!("found module name used as a type: {}", format!("found module name used as a type: {}",
tcx.map.node_to_str(id.node)).as_slice()); tcx.map.node_to_string(id.node)).as_slice());
} }
def::DefPrimTy(_) => { def::DefPrimTy(_) => {
fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");

View file

@ -399,11 +399,11 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
variant_id, substitutions, etc); variant_id, substitutions, etc);
} }
Some(&def::DefStruct(..)) | Some(&def::DefVariant(..)) => { Some(&def::DefStruct(..)) | Some(&def::DefVariant(..)) => {
let name = pprust::path_to_str(path); let name = pprust::path_to_string(path);
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("mismatched types: expected `{}` but \ format!("mismatched types: expected `{}` but \
found `{}`", found `{}`",
fcx.infcx().ty_to_str(expected), fcx.infcx().ty_to_string(expected),
name).as_slice()); name).as_slice());
} }
_ => { _ => {
@ -525,9 +525,9 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
.span_err(path.span, .span_err(path.span,
format!("`{}` does not name the \ format!("`{}` does not name the \
structure `{}`", structure `{}`",
pprust::path_to_str(path), pprust::path_to_string(path),
fcx.infcx() fcx.infcx()
.ty_to_str(expected)).as_slice()) .ty_to_string(expected)).as_slice())
} }
check_struct_pat(pcx, pat.id, pat.span, expected, path, check_struct_pat(pcx, pat.id, pat.span, expected, path,
@ -747,7 +747,7 @@ fn check_pointer_pat(pcx: &pat_ctxt,
tcx.sess.span_err( tcx.sess.span_err(
span, span,
format!("type `{}` cannot be dereferenced", format!("type `{}` cannot be dereferenced",
fcx.infcx().ty_to_str(expected)).as_slice()); fcx.infcx().ty_to_string(expected)).as_slice());
fcx.write_error(pat_id); fcx.write_error(pat_id);
} }
_ => { _ => {

View file

@ -30,14 +30,14 @@ itself (note that inherent impls can only be defined in the same
module as the type itself). module as the type itself).
Inherent candidates are not always derived from impls. If you have a Inherent candidates are not always derived from impls. If you have a
trait instance, such as a value of type `Box<ToStr>`, then the trait trait instance, such as a value of type `Box<ToString>`, then the trait
methods (`to_str()`, in this case) are inherently associated with it. methods (`to_string()`, in this case) are inherently associated with it.
Another case is type parameters, in which case the methods of their Another case is type parameters, in which case the methods of their
bounds are inherent. bounds are inherent.
Extension candidates are derived from imported traits. If I have the Extension candidates are derived from imported traits. If I have the
trait `ToStr` imported, and I call `to_str()` on a value of type `T`, trait `ToString` imported, and I call `to_string()` on a value of type `T`,
then we will go off to find out whether there is an impl of `ToStr` then we will go off to find out whether there is an impl of `ToString`
for `T`. These kinds of method calls are called "extension methods". for `T`. These kinds of method calls are called "extension methods".
They can be defined in any module, not only the one that defined `T`. They can be defined in any module, not only the one that defined `T`.
Furthermore, you must import the trait to call such a method. Furthermore, you must import the trait to call such a method.
@ -376,7 +376,7 @@ impl<'a> LookupContext<'a> {
autoderefs: uint) autoderefs: uint)
-> Option<Option<MethodCallee>> { -> Option<Option<MethodCallee>> {
debug!("search_step: self_ty={} autoderefs={}", debug!("search_step: self_ty={} autoderefs={}",
self.ty_to_str(self_ty), autoderefs); self.ty_to_string(self_ty), autoderefs);
match self.deref_args { match self.deref_args {
check::DontDerefArgs => { check::DontDerefArgs => {
@ -508,7 +508,7 @@ impl<'a> LookupContext<'a> {
did: DefId, did: DefId,
substs: &subst::Substs) { substs: &subst::Substs) {
debug!("push_inherent_candidates_from_object(did={}, substs={})", debug!("push_inherent_candidates_from_object(did={}, substs={})",
self.did_to_str(did), self.did_to_string(did),
substs.repr(self.tcx())); substs.repr(self.tcx()));
let _indenter = indenter(); let _indenter = indenter();
let tcx = self.tcx(); let tcx = self.tcx();
@ -733,7 +733,7 @@ impl<'a> LookupContext<'a> {
None => None, None => None,
Some(method) => { Some(method) => {
debug!("(searching for autoderef'd method) writing \ debug!("(searching for autoderef'd method) writing \
adjustment {:?} for {}", adjustment, self.ty_to_str( self_ty)); adjustment {:?} for {}", adjustment, self.ty_to_string( self_ty));
match adjustment { match adjustment {
Some((self_expr_id, adj)) => { Some((self_expr_id, adj)) => {
self.fcx.write_adjustment(self_expr_id, adj); self.fcx.write_adjustment(self_expr_id, adj);
@ -809,7 +809,7 @@ impl<'a> LookupContext<'a> {
fn auto_slice_vec(&self, mt: ty::mt, autoderefs: uint) -> Option<MethodCallee> { fn auto_slice_vec(&self, mt: ty::mt, autoderefs: uint) -> Option<MethodCallee> {
let tcx = self.tcx(); let tcx = self.tcx();
debug!("auto_slice_vec {}", ppaux::ty_to_str(tcx, mt.ty)); debug!("auto_slice_vec {}", ppaux::ty_to_string(tcx, mt.ty));
// First try to borrow to a slice // First try to borrow to a slice
let entry = self.search_for_some_kind_of_autorefd_method( let entry = self.search_for_some_kind_of_autorefd_method(
@ -886,7 +886,7 @@ impl<'a> LookupContext<'a> {
* `~[]` to `&[]`. * `~[]` to `&[]`.
*/ */
debug!("search_for_autosliced_method {}", ppaux::ty_to_str(self.tcx(), self_ty)); debug!("search_for_autosliced_method {}", ppaux::ty_to_string(self.tcx(), self_ty));
let sty = ty::get(self_ty).sty.clone(); let sty = ty::get(self_ty).sty.clone();
match sty { match sty {
@ -939,7 +939,7 @@ impl<'a> LookupContext<'a> {
ty_infer(TyVar(_)) => { ty_infer(TyVar(_)) => {
self.bug(format!("unexpected type: {}", self.bug(format!("unexpected type: {}",
self.ty_to_str(self_ty)).as_slice()); self.ty_to_string(self_ty)).as_slice());
} }
} }
} }
@ -993,7 +993,7 @@ impl<'a> LookupContext<'a> {
} }
fn search_for_method(&self, rcvr_ty: ty::t) -> Option<MethodCallee> { fn search_for_method(&self, rcvr_ty: ty::t) -> Option<MethodCallee> {
debug!("search_for_method(rcvr_ty={})", self.ty_to_str(rcvr_ty)); debug!("search_for_method(rcvr_ty={})", self.ty_to_string(rcvr_ty));
let _indenter = indenter(); let _indenter = indenter();
// I am not sure that inherent methods should have higher // I am not sure that inherent methods should have higher
@ -1094,7 +1094,7 @@ impl<'a> LookupContext<'a> {
let tcx = self.tcx(); let tcx = self.tcx();
debug!("confirm_candidate(rcvr_ty={}, candidate={})", debug!("confirm_candidate(rcvr_ty={}, candidate={})",
self.ty_to_str(rcvr_ty), self.ty_to_string(rcvr_ty),
candidate.repr(self.tcx())); candidate.repr(self.tcx()));
self.enforce_object_limitations(candidate); self.enforce_object_limitations(candidate);
@ -1177,7 +1177,7 @@ impl<'a> LookupContext<'a> {
fn_style: bare_fn_ty.fn_style, fn_style: bare_fn_ty.fn_style,
abi: bare_fn_ty.abi.clone(), abi: bare_fn_ty.abi.clone(),
}); });
debug!("after replacing bound regions, fty={}", self.ty_to_str(fty)); debug!("after replacing bound regions, fty={}", self.ty_to_string(fty));
// Before, we only checked whether self_ty could be a subtype // Before, we only checked whether self_ty could be a subtype
// of rcvr_ty; now we actually make it so (this may cause // of rcvr_ty; now we actually make it so (this may cause
@ -1191,8 +1191,8 @@ impl<'a> LookupContext<'a> {
Err(_) => { Err(_) => {
self.bug(format!( self.bug(format!(
"{} was a subtype of {} but now is not?", "{} was a subtype of {} but now is not?",
self.ty_to_str(rcvr_ty), self.ty_to_string(rcvr_ty),
self.ty_to_str(transformed_self_ty)).as_slice()); self.ty_to_string(transformed_self_ty)).as_slice());
} }
} }
@ -1288,7 +1288,7 @@ impl<'a> LookupContext<'a> {
// candidate method's `self_ty`. // candidate method's `self_ty`.
fn is_relevant(&self, rcvr_ty: ty::t, candidate: &Candidate) -> bool { fn is_relevant(&self, rcvr_ty: ty::t, candidate: &Candidate) -> bool {
debug!("is_relevant(rcvr_ty={}, candidate={})", debug!("is_relevant(rcvr_ty={}, candidate={})",
self.ty_to_str(rcvr_ty), candidate.repr(self.tcx())); self.ty_to_string(rcvr_ty), candidate.repr(self.tcx()));
return match candidate.method_ty.explicit_self { return match candidate.method_ty.explicit_self {
SelfStatic => { SelfStatic => {
@ -1457,11 +1457,11 @@ impl<'a> LookupContext<'a> {
self.fcx.tcx() self.fcx.tcx()
} }
fn ty_to_str(&self, t: ty::t) -> String { fn ty_to_string(&self, t: ty::t) -> String {
self.fcx.infcx().ty_to_str(t) self.fcx.infcx().ty_to_string(t)
} }
fn did_to_str(&self, did: DefId) -> String { fn did_to_string(&self, did: DefId) -> String {
ty::item_path_str(self.tcx(), did) ty::item_path_str(self.tcx(), did)
} }

View file

@ -397,8 +397,8 @@ impl<'a> Visitor<()> for GatherLocalsVisitor<'a> {
}; };
self.assign(local.id, o_ty); self.assign(local.id, o_ty);
debug!("Local variable {} is assigned type {}", debug!("Local variable {} is assigned type {}",
self.fcx.pat_to_str(&*local.pat), self.fcx.pat_to_string(&*local.pat),
self.fcx.infcx().ty_to_str( self.fcx.infcx().ty_to_string(
self.fcx.inh.locals.borrow().get_copy(&local.id))); self.fcx.inh.locals.borrow().get_copy(&local.id)));
visit::walk_local(self, local, ()); visit::walk_local(self, local, ());
} }
@ -411,7 +411,7 @@ impl<'a> Visitor<()> for GatherLocalsVisitor<'a> {
self.assign(p.id, None); self.assign(p.id, None);
debug!("Pattern binding {} is assigned to {}", debug!("Pattern binding {} is assigned to {}",
token::get_ident(path1.node), token::get_ident(path1.node),
self.fcx.infcx().ty_to_str( self.fcx.infcx().ty_to_string(
self.fcx.inh.locals.borrow().get_copy(&p.id))); self.fcx.inh.locals.borrow().get_copy(&p.id)));
} }
_ => {} _ => {}
@ -534,7 +534,7 @@ fn span_for_field(tcx: &ty::ctxt, field: &ty::field_ty, struct_id: ast::DefId) -
let item = match tcx.map.find(struct_id.node) { let item = match tcx.map.find(struct_id.node) {
Some(ast_map::NodeItem(item)) => item, Some(ast_map::NodeItem(item)) => item,
None => fail!("node not in ast map: {}", struct_id.node), None => fail!("node not in ast map: {}", struct_id.node),
_ => fail!("expected item, found {}", tcx.map.node_to_str(struct_id.node)) _ => fail!("expected item, found {}", tcx.map.node_to_string(struct_id.node))
}; };
match item.node { match item.node {
@ -803,7 +803,7 @@ fn check_impl_methods_against_trait(ccx: &CrateCtxt,
format!( format!(
"method `{}` is not a member of trait `{}`", "method `{}` is not a member of trait `{}`",
token::get_ident(impl_method_ty.ident), token::get_ident(impl_method_ty.ident),
pprust::path_to_str(&ast_trait_ref.path)).as_slice()); pprust::path_to_string(&ast_trait_ref.path)).as_slice());
} }
} }
} }
@ -870,7 +870,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
format!("method `{}` has a `{}` declaration in the impl, \ format!("method `{}` has a `{}` declaration in the impl, \
but not in the trait", but not in the trait",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
pprust::explicit_self_to_str( pprust::explicit_self_to_string(
impl_m.explicit_self)).as_slice()); impl_m.explicit_self)).as_slice());
return; return;
} }
@ -880,7 +880,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
format!("method `{}` has a `{}` declaration in the trait, \ format!("method `{}` has a `{}` declaration in the trait, \
but not in the impl", but not in the impl",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
pprust::explicit_self_to_str( pprust::explicit_self_to_string(
trait_m.explicit_self)).as_slice()); trait_m.explicit_self)).as_slice());
return; return;
} }
@ -1051,7 +1051,7 @@ fn compare_impl_method(tcx: &ty::ctxt,
declaration", declaration",
token::get_ident(trait_m.ident), token::get_ident(trait_m.ident),
i, i,
ppaux::trait_ref_to_str( ppaux::trait_ref_to_string(
tcx, tcx,
&*impl_trait_bound)).as_slice()) &*impl_trait_bound)).as_slice())
} }
@ -1101,8 +1101,8 @@ fn check_cast(fcx: &FnCtxt,
let t_e = fcx.expr_ty(e); let t_e = fcx.expr_ty(e);
debug!("t_1={}", fcx.infcx().ty_to_str(t_1)); debug!("t_1={}", fcx.infcx().ty_to_string(t_1));
debug!("t_e={}", fcx.infcx().ty_to_str(t_e)); debug!("t_e={}", fcx.infcx().ty_to_string(t_e));
if ty::type_is_error(t_e) { if ty::type_is_error(t_e) {
fcx.write_error(id); fcx.write_error(id);
@ -1126,13 +1126,13 @@ fn check_cast(fcx: &FnCtxt,
fcx.type_error_message(span, |actual| { fcx.type_error_message(span, |actual| {
format!("cast from nil: `{}` as `{}`", format!("cast from nil: `{}` as `{}`",
actual, actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_string(t_1))
}, t_e, None); }, t_e, None);
} else if ty::type_is_nil(t_1) { } else if ty::type_is_nil(t_1) {
fcx.type_error_message(span, |actual| { fcx.type_error_message(span, |actual| {
format!("cast to nil: `{}` as `{}`", format!("cast to nil: `{}` as `{}`",
actual, actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_string(t_1))
}, t_e, None); }, t_e, None);
} }
@ -1149,7 +1149,7 @@ fn check_cast(fcx: &FnCtxt,
format!("illegal cast; cast through an \ format!("illegal cast; cast through an \
integer first: `{}` as `{}`", integer first: `{}` as `{}`",
actual, actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_string(t_1))
}, t_e, None); }, t_e, None);
} }
// casts from C-like enums are allowed // casts from C-like enums are allowed
@ -1217,7 +1217,7 @@ fn check_cast(fcx: &FnCtxt,
fcx.type_error_message(span, |actual| { fcx.type_error_message(span, |actual| {
format!("non-scalar cast: `{}` as `{}`", format!("non-scalar cast: `{}` as `{}`",
actual, actual,
fcx.infcx().ty_to_str(t_1)) fcx.infcx().ty_to_string(t_1))
}, t_e, None); }, t_e, None);
} }
@ -1286,7 +1286,7 @@ impl<'a> FnCtxt<'a> {
#[inline] #[inline]
pub fn write_ty(&self, node_id: ast::NodeId, ty: ty::t) { pub fn write_ty(&self, node_id: ast::NodeId, ty: ty::t) {
debug!("write_ty({}, {}) in fcx {}", debug!("write_ty({}, {}) in fcx {}",
node_id, ppaux::ty_to_str(self.tcx(), ty), self.tag()); node_id, ppaux::ty_to_string(self.tcx(), ty), self.tag());
self.inh.node_types.borrow_mut().insert(node_id, ty); self.inh.node_types.borrow_mut().insert(node_id, ty);
} }
@ -1343,7 +1343,7 @@ impl<'a> FnCtxt<'a> {
ast_ty_to_ty(self, self.infcx(), ast_t) ast_ty_to_ty(self, self.infcx(), ast_t)
} }
pub fn pat_to_str(&self, pat: &ast::Pat) -> String { pub fn pat_to_string(&self, pat: &ast::Pat) -> String {
pat.repr(self.tcx()) pat.repr(self.tcx())
} }
@ -1363,7 +1363,7 @@ impl<'a> FnCtxt<'a> {
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
format!("no type for node {}: {} in fcx {}", format!("no type for node {}: {} in fcx {}",
id, self.tcx().map.node_to_str(id), id, self.tcx().map.node_to_string(id),
self.tag()).as_slice()); self.tag()).as_slice());
} }
} }
@ -1375,7 +1375,7 @@ impl<'a> FnCtxt<'a> {
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
format!("no method entry for node {}: {} in fcx {}", format!("no method entry for node {}: {} in fcx {}",
id, self.tcx().map.node_to_str(id), id, self.tcx().map.node_to_string(id),
self.tag()).as_slice()); self.tag()).as_slice());
} }
} }
@ -1842,7 +1842,7 @@ fn check_argument_types(fcx: &FnCtxt,
}; };
debug!("check_argument_types: formal_tys={:?}", debug!("check_argument_types: formal_tys={:?}",
formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<String>>()); formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
// Check the arguments. // Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments // We do this in a pretty awful way: first we typecheck any arguments
@ -2410,7 +2410,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
operation `{}` not \ operation `{}` not \
supported for floating \ supported for floating \
point SIMD vector `{}`", point SIMD vector `{}`",
ast_util::binop_to_str(op), ast_util::binop_to_string(op),
actual) actual)
}, },
lhs_t, lhs_t,
@ -2440,7 +2440,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|actual| { |actual| {
format!("binary operation `{}` cannot be applied \ format!("binary operation `{}` cannot be applied \
to type `{}`", to type `{}`",
ast_util::binop_to_str(op), ast_util::binop_to_string(op),
actual) actual)
}, },
lhs_t, lhs_t,
@ -2457,7 +2457,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
operation `{}=` \ operation `{}=` \
cannot be applied to \ cannot be applied to \
type `{}`", type `{}`",
ast_util::binop_to_str(op), ast_util::binop_to_string(op),
actual) actual)
}, },
lhs_t, lhs_t,
@ -2506,7 +2506,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || { trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || {
fcx.type_error_message(ex.span, |actual| { fcx.type_error_message(ex.span, |actual| {
format!("binary operation `{}` cannot be applied to type `{}`", format!("binary operation `{}` cannot be applied to type `{}`",
ast_util::binop_to_str(op), ast_util::binop_to_string(op),
actual) actual)
}, lhs_resolved_t, None) }, lhs_resolved_t, None)
}) })
@ -2594,7 +2594,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
expected_sig); expected_sig);
let fty_sig = fn_ty.sig.clone(); let fty_sig = fn_ty.sig.clone();
let fty = ty::mk_closure(tcx, fn_ty); let fty = ty::mk_closure(tcx, fn_ty);
debug!("check_expr_fn fty={}", fcx.infcx().ty_to_str(fty)); debug!("check_expr_fn fty={}", fcx.infcx().ty_to_string(fty));
fcx.write_ty(expr.id, fty); fcx.write_ty(expr.id, fty);
@ -2628,7 +2628,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
autoderef(fcx, expr.span, expr_t, Some(base.id), lvalue_pref, |base_t, _| { autoderef(fcx, expr.span, expr_t, Some(base.id), lvalue_pref, |base_t, _| {
match ty::get(base_t).sty { match ty::get(base_t).sty {
ty::ty_struct(base_id, ref substs) => { ty::ty_struct(base_id, ref substs) => {
debug!("struct named {}", ppaux::ty_to_str(tcx, base_t)); debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
let fields = ty::lookup_struct_fields(tcx, base_id); let fields = ty::lookup_struct_fields(tcx, base_id);
lookup_field_ty(tcx, base_id, fields.as_slice(), lookup_field_ty(tcx, base_id, fields.as_slice(),
field.node.name, &(*substs)) field.node.name, &(*substs))
@ -3386,7 +3386,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
tcx.sess tcx.sess
.span_err(path.span, .span_err(path.span,
format!("`{}` does not name a structure", format!("`{}` does not name a structure",
pprust::path_to_str( pprust::path_to_string(
path)).as_slice()) path)).as_slice())
} }
} }
@ -3454,10 +3454,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
} }
debug!("type of expr({}) {} is...", expr.id, debug!("type of expr({}) {} is...", expr.id,
syntax::print::pprust::expr_to_str(expr)); syntax::print::pprust::expr_to_string(expr));
debug!("... {}, expected is {}", debug!("... {}, expected is {}",
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)), ppaux::ty_to_string(tcx, fcx.expr_ty(expr)),
expected.repr(tcx)) expected.repr(tcx));
unifier(); unifier();
} }
@ -3792,7 +3792,7 @@ pub fn check_instantiable(tcx: &ty::ctxt,
format!("this type cannot be instantiated without an \ format!("this type cannot be instantiated without an \
instance of itself; consider using \ instance of itself; consider using \
`Option<{}>`", `Option<{}>`",
ppaux::ty_to_str(tcx, item_ty)).as_slice()); ppaux::ty_to_string(tcx, item_ty)).as_slice());
false false
} else { } else {
true true
@ -3853,7 +3853,7 @@ pub fn check_enum_variants_sized(ccx: &CrateCtxt,
dynamically sized types may only \ dynamically sized types may only \
appear as the final type in a \ appear as the final type in a \
variant", variant",
ppaux::ty_to_str(ccx.tcx, ppaux::ty_to_string(ccx.tcx,
*t)).as_slice()); *t)).as_slice());
} }
} }
@ -3918,7 +3918,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt,
match v.node.disr_expr { match v.node.disr_expr {
Some(e) => { Some(e) => {
debug!("disr expr, checking {}", pprust::expr_to_str(&*e)); debug!("disr expr, checking {}", pprust::expr_to_string(&*e));
let inh = blank_inherited_fields(ccx); let inh = blank_inherited_fields(ccx);
let fcx = blank_fn_ctxt(ccx, &inh, rty, e.id); let fcx = blank_fn_ctxt(ccx, &inh, rty, e.id);
@ -4522,7 +4522,7 @@ pub fn check_bounds_are_used(ccx: &CrateCtxt,
tps: &OwnedSlice<ast::TyParam>, tps: &OwnedSlice<ast::TyParam>,
ty: ty::t) { ty: ty::t) {
debug!("check_bounds_are_used(n_tps={}, ty={})", debug!("check_bounds_are_used(n_tps={}, ty={})",
tps.len(), ppaux::ty_to_str(ccx.tcx, ty)); tps.len(), ppaux::ty_to_string(ccx.tcx, ty));
// make a vector of booleans initially false, set to true when used // make a vector of booleans initially false, set to true when used
if tps.len() == 0u { return; } if tps.len() == 0u { return; }
@ -4840,7 +4840,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
fty, fty,
|| { || {
format!("intrinsic has wrong type: expected `{}`", format!("intrinsic has wrong type: expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty)) ppaux::ty_to_string(ccx.tcx, fty))
}); });
} }
} }

View file

@ -133,7 +133,7 @@ use middle::typeck::infer;
use middle::typeck::MethodCall; use middle::typeck::MethodCall;
use middle::pat_util; use middle::pat_util;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use util::ppaux::{ty_to_str, region_to_str, Repr}; use util::ppaux::{ty_to_string, region_to_string, Repr};
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
@ -876,7 +876,7 @@ fn constrain_autoderefs(rcx: &mut Rcx,
let r_deref_expr = ty::ReScope(deref_expr.id); let r_deref_expr = ty::ReScope(deref_expr.id);
for i in range(0u, derefs) { for i in range(0u, derefs) {
debug!("constrain_autoderefs(deref_expr=?, derefd_ty={}, derefs={:?}/{:?}", debug!("constrain_autoderefs(deref_expr=?, derefd_ty={}, derefs={:?}/{:?}",
rcx.fcx.infcx().ty_to_str(derefd_ty), rcx.fcx.infcx().ty_to_string(derefd_ty),
i, derefs); i, derefs);
let method_call = MethodCall::autoderef(deref_expr.id, i); let method_call = MethodCall::autoderef(deref_expr.id, i);
@ -948,7 +948,7 @@ fn constrain_index(rcx: &mut Rcx,
*/ */
debug!("constrain_index(index_expr=?, indexed_ty={}", debug!("constrain_index(index_expr=?, indexed_ty={}",
rcx.fcx.infcx().ty_to_str(indexed_ty)); rcx.fcx.infcx().ty_to_string(indexed_ty));
let r_index_expr = ty::ReScope(index_expr.id); let r_index_expr = ty::ReScope(index_expr.id);
match ty::get(indexed_ty).sty { match ty::get(indexed_ty).sty {
@ -984,7 +984,7 @@ fn constrain_regions_in_type_of_node(
|method_call| rcx.resolve_method_type(method_call)); |method_call| rcx.resolve_method_type(method_call));
debug!("constrain_regions_in_type_of_node(\ debug!("constrain_regions_in_type_of_node(\
ty={}, ty0={}, id={}, minimum_lifetime={:?})", ty={}, ty0={}, id={}, minimum_lifetime={:?})",
ty_to_str(tcx, ty), ty_to_str(tcx, ty0), ty_to_string(tcx, ty), ty_to_string(tcx, ty0),
id, minimum_lifetime); id, minimum_lifetime);
constrain_regions_in_type(rcx, minimum_lifetime, origin, ty); constrain_regions_in_type(rcx, minimum_lifetime, origin, ty);
} }
@ -1011,8 +1011,8 @@ fn constrain_regions_in_type(
let tcx = rcx.fcx.ccx.tcx; let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_regions_in_type(minimum_lifetime={}, ty={})", debug!("constrain_regions_in_type(minimum_lifetime={}, ty={})",
region_to_str(tcx, "", false, minimum_lifetime), region_to_string(tcx, "", false, minimum_lifetime),
ty_to_str(tcx, ty)); ty_to_string(tcx, ty));
relate_nested_regions(tcx, Some(minimum_lifetime), ty, |r_sub, r_sup| { relate_nested_regions(tcx, Some(minimum_lifetime), ty, |r_sub, r_sup| {
debug!("relate_nested_regions(r_sub={}, r_sup={})", debug!("relate_nested_regions(r_sub={}, r_sup={})",
@ -1190,7 +1190,7 @@ fn link_region_from_node_type(rcx: &Rcx,
let rptr_ty = rcx.resolve_node_type(id); let rptr_ty = rcx.resolve_node_type(id);
if !ty::type_is_bot(rptr_ty) && !ty::type_is_error(rptr_ty) { if !ty::type_is_bot(rptr_ty) && !ty::type_is_error(rptr_ty) {
let tcx = rcx.fcx.ccx.tcx; let tcx = rcx.fcx.ccx.tcx;
debug!("rptr_ty={}", ty_to_str(tcx, rptr_ty)); debug!("rptr_ty={}", ty_to_string(tcx, rptr_ty));
let r = ty::ty_region(tcx, span, rptr_ty); let r = ty::ty_region(tcx, span, rptr_ty);
link_region(rcx, span, r, ty::BorrowKind::from_mutbl(mutbl), link_region(rcx, span, r, ty::BorrowKind::from_mutbl(mutbl),
cmt_borrowed); cmt_borrowed);

View file

@ -30,7 +30,7 @@ pub fn replace_late_bound_regions_in_fn_sig(
let mut map = HashMap::new(); let mut map = HashMap::new();
let fn_sig = { let fn_sig = {
let mut f = ty_fold::RegionFolder::regions(tcx, |r| { let mut f = ty_fold::RegionFolder::regions(tcx, |r| {
debug!("region r={}", r.to_str()); debug!("region r={}", r.to_string());
match r { match r {
ty::ReLateBound(s, br) if s == fn_sig.binder_id => { ty::ReLateBound(s, br) if s == fn_sig.binder_id => {
*map.find_or_insert_with(br, |_| mapf(br)) *map.find_or_insert_with(br, |_| mapf(br))
@ -153,7 +153,7 @@ pub fn relate_free_regions(tcx: &ty::ctxt, fn_sig: &ty::FnSig) {
} }
for &t in all_tys.iter() { for &t in all_tys.iter() {
debug!("relate_free_regions(t={})", ppaux::ty_to_str(tcx, t)); debug!("relate_free_regions(t={})", ppaux::ty_to_string(tcx, t));
relate_nested_regions(tcx, None, t, |a, b| { relate_nested_regions(tcx, None, t, |a, b| {
match (&a, &b) { match (&a, &b) {
(&ty::ReFree(free_a), &ty::ReFree(free_b)) => { (&ty::ReFree(free_a), &ty::ReFree(free_b)) => {

View file

@ -16,7 +16,7 @@ use middle::typeck::astconv::AstConv;
use middle::typeck::check::{FnCtxt, impl_self_ty}; use middle::typeck::check::{FnCtxt, impl_self_ty};
use middle::typeck::check::{structurally_resolved_type}; use middle::typeck::check::{structurally_resolved_type};
use middle::typeck::check::writeback; use middle::typeck::check::writeback;
use middle::typeck::infer::fixup_err_to_str; use middle::typeck::infer::fixup_err_to_string;
use middle::typeck::infer::{resolve_and_force_all_but_regions, resolve_type}; use middle::typeck::infer::{resolve_and_force_all_but_regions, resolve_type};
use middle::typeck::infer; use middle::typeck::infer;
use middle::typeck::{vtable_origin, vtable_res, vtable_param_res}; use middle::typeck::{vtable_origin, vtable_res, vtable_param_res};
@ -35,7 +35,7 @@ use std::collections::HashSet;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::print::pprust::expr_to_str; use syntax::print::pprust::expr_to_string;
use syntax::visit; use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
@ -154,8 +154,8 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
vcx.tcx().sess.span_fatal(span, vcx.tcx().sess.span_fatal(span,
format!("failed to find an implementation of \ format!("failed to find an implementation of \
trait {} for {}", trait {} for {}",
vcx.infcx.trait_ref_to_str(&*trait_ref), vcx.infcx.trait_ref_to_string(&*trait_ref),
vcx.infcx.ty_to_str(ty)).as_slice()); vcx.infcx.ty_to_string(ty)).as_slice());
} }
} }
true true
@ -205,8 +205,8 @@ fn relate_trait_refs(vcx: &VtableContext,
let tcx = vcx.tcx(); let tcx = vcx.tcx();
tcx.sess.span_err(span, tcx.sess.span_err(span,
format!("expected {}, but found {} ({})", format!("expected {}, but found {} ({})",
ppaux::trait_ref_to_str(tcx, &r_exp_trait_ref), ppaux::trait_ref_to_string(tcx, &r_exp_trait_ref),
ppaux::trait_ref_to_str(tcx, &r_act_trait_ref), ppaux::trait_ref_to_string(tcx, &r_act_trait_ref),
ty::type_err_to_str(tcx, err)).as_slice()); ty::type_err_to_str(tcx, err)).as_slice());
} }
} }
@ -385,8 +385,8 @@ fn search_for_vtable(vcx: &VtableContext,
debug!("(checking vtable) num 2 relating trait \ debug!("(checking vtable) num 2 relating trait \
ty {} to of_trait_ref {}", ty {} to of_trait_ref {}",
vcx.infcx.trait_ref_to_str(&*trait_ref), vcx.infcx.trait_ref_to_string(&*trait_ref),
vcx.infcx.trait_ref_to_str(&*of_trait_ref)); vcx.infcx.trait_ref_to_string(&*of_trait_ref));
relate_trait_refs(vcx, span, of_trait_ref, trait_ref.clone()); relate_trait_refs(vcx, span, of_trait_ref, trait_ref.clone());
@ -488,7 +488,7 @@ fn fixup_ty(vcx: &VtableContext,
tcx.sess.span_fatal(span, tcx.sess.span_fatal(span,
format!("cannot determine a type for this bounded type \ format!("cannot determine a type for this bounded type \
parameter: {}", parameter: {}",
fixup_err_to_str(e)).as_slice()) fixup_err_to_string(e)).as_slice())
} }
Err(_) => { Err(_) => {
None None
@ -527,7 +527,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
} }
debug!("vtable: early_resolve_expr() ex with id {:?} (early: {}): {}", debug!("vtable: early_resolve_expr() ex with id {:?} (early: {}): {}",
ex.id, is_early, expr_to_str(ex)); ex.id, is_early, expr_to_string(ex));
let _indent = indenter(); let _indent = indenter();
let cx = fcx.ccx; let cx = fcx.ccx;
@ -626,7 +626,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
ex.span, ex.span,
format!("can only cast an boxed pointer \ format!("can only cast an boxed pointer \
to a boxed object, not a {}", to a boxed object, not a {}",
ty::ty_sort_str(fcx.tcx(), src_ty)).as_slice()); ty::ty_sort_string(fcx.tcx(), src_ty)).as_slice());
} }
_ => {} _ => {}
} }
@ -639,7 +639,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
ex.span, ex.span,
format!("can only cast an &-pointer \ format!("can only cast an &-pointer \
to an &-object, not a {}", to an &-object, not a {}",
ty::ty_sort_str(fcx.tcx(), src_ty)).as_slice()); ty::ty_sort_string(fcx.tcx(), src_ty)).as_slice());
} }
_ => {} _ => {}
} }
@ -657,7 +657,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
let did = def.def_id(); let did = def.def_id();
let item_ty = ty::lookup_item_type(cx.tcx, did); let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def {:?} {:?}, {:?}, {}", ex.id, did, def, debug!("early resolve expr: def {:?} {:?}, {:?}, {}", ex.id, did, def,
fcx.infcx().ty_to_str(item_ty.ty)); fcx.infcx().ty_to_string(item_ty.ty));
debug!("early_resolve_expr: looking up vtables for type params {}", debug!("early_resolve_expr: looking up vtables for type params {}",
item_ty.generics.types.repr(fcx.tcx())); item_ty.generics.types.repr(fcx.tcx()));
let vcx = fcx.vtable_context(); let vcx = fcx.vtable_context();

View file

@ -31,7 +31,7 @@ use std::cell::Cell;
use syntax::ast; use syntax::ast;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::print::pprust::pat_to_str; use syntax::print::pprust::pat_to_string;
use syntax::visit; use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
@ -159,7 +159,7 @@ impl<'cx> Visitor<()> for WritebackCx<'cx> {
self.visit_node_id(ResolvingPattern(p.span), p.id); self.visit_node_id(ResolvingPattern(p.span), p.id);
debug!("Type for pattern binding {} (id {}) resolved to {}", debug!("Type for pattern binding {} (id {}) resolved to {}",
pat_to_str(p), pat_to_string(p),
p.id, p.id,
ty::node_id_to_type(self.tcx(), p.id).repr(self.tcx())); ty::node_id_to_type(self.tcx(), p.id).repr(self.tcx()));
@ -403,7 +403,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this expression: {}", this expression: {}",
infer::fixup_err_to_str(e)).as_slice()) infer::fixup_err_to_string(e)).as_slice())
} }
ResolvingLocal(span) => { ResolvingLocal(span) => {
@ -411,7 +411,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this local variable: {}", this local variable: {}",
infer::fixup_err_to_str(e)).as_slice()) infer::fixup_err_to_string(e)).as_slice())
} }
ResolvingPattern(span) => { ResolvingPattern(span) => {
@ -419,7 +419,7 @@ impl<'cx> Resolver<'cx> {
span, span,
format!("cannot determine a type for \ format!("cannot determine a type for \
this pattern binding: {}", this pattern binding: {}",
infer::fixup_err_to_str(e)).as_slice()) infer::fixup_err_to_string(e)).as_slice())
} }
ResolvingUpvar(upvar_id) => { ResolvingUpvar(upvar_id) => {
@ -430,8 +430,8 @@ impl<'cx> Resolver<'cx> {
captured variable `{}`: {}", captured variable `{}`: {}",
ty::local_var_name_str( ty::local_var_name_str(
self.tcx, self.tcx,
upvar_id.var_id).get().to_str(), upvar_id.var_id).get().to_string(),
infer::fixup_err_to_str(e)).as_slice()); infer::fixup_err_to_string(e)).as_slice());
} }
ResolvingImplRes(span) => { ResolvingImplRes(span) => {

View file

@ -63,7 +63,7 @@ use syntax::codemap;
use syntax::owned_slice::OwnedSlice; use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
use syntax::parse::token; use syntax::parse::token;
use syntax::print::pprust::{path_to_str}; use syntax::print::pprust::{path_to_string};
use syntax::visit; use syntax::visit;
struct CollectItemTypesVisitor<'a> { struct CollectItemTypesVisitor<'a> {
@ -665,7 +665,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt,
ccx.tcx.sess.span_fatal( ccx.tcx.sess.span_fatal(
ast_trait_ref.path.span, ast_trait_ref.path.span,
format!("`{}` is not a trait", format!("`{}` is not a trait",
path_to_str(&ast_trait_ref.path)).as_slice()); path_to_string(&ast_trait_ref.path)).as_slice());
} }
} }
} }
@ -836,7 +836,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
debug!("type of {} (id {}) is {}", debug!("type of {} (id {}) is {}",
token::get_ident(it.ident), token::get_ident(it.ident),
it.id, it.id,
ppaux::ty_to_str(tcx, pty.ty)); ppaux::ty_to_string(tcx, pty.ty));
ccx.tcx.tcache.borrow_mut().insert(local_def(it.id), pty.clone()); ccx.tcx.tcache.borrow_mut().insert(local_def(it.id), pty.clone());
return pty; return pty;
@ -1169,7 +1169,7 @@ fn ty_generics(ccx: &CrateCtxt,
format!("incompatible bounds on type parameter {}, \ format!("incompatible bounds on type parameter {}, \
bound {} does not allow unsized type", bound {} does not allow unsized type",
token::get_ident(ident), token::get_ident(ident),
ppaux::trait_ref_to_str(tcx, ppaux::trait_ref_to_string(tcx,
&*trait_ref)).as_slice()); &*trait_ref)).as_slice());
} }
true true

View file

@ -90,7 +90,7 @@ use syntax::codemap;
use syntax::parse::token; use syntax::parse::token;
use syntax::print::pprust; use syntax::print::pprust;
use util::ppaux::UserString; use util::ppaux::UserString;
use util::ppaux::bound_region_to_str; use util::ppaux::bound_region_to_string;
use util::ppaux::note_and_explain_region; use util::ppaux::note_and_explain_region;
pub trait ErrorReporting { pub trait ErrorReporting {
@ -442,7 +442,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
ty::local_var_name_str(self.tcx, ty::local_var_name_str(self.tcx,
upvar_id.var_id) upvar_id.var_id)
.get() .get()
.to_str()).as_slice()); .to_string()).as_slice());
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"...the borrowed pointer is valid for ", "...the borrowed pointer is valid for ",
@ -454,7 +454,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
ty::local_var_name_str(self.tcx, ty::local_var_name_str(self.tcx,
upvar_id.var_id) upvar_id.var_id)
.get() .get()
.to_str()).as_slice(), .to_string()).as_slice(),
sup, sup,
""); "");
} }
@ -500,7 +500,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
outlive the enclosing closure", outlive the enclosing closure",
ty::local_var_name_str(self.tcx, ty::local_var_name_str(self.tcx,
id).get() id).get()
.to_str()).as_slice()); .to_string()).as_slice());
note_and_explain_region( note_and_explain_region(
self.tcx, self.tcx,
"captured variable is valid for ", "captured variable is valid for ",
@ -1046,7 +1046,7 @@ impl<'a> Rebuilder<'a> {
.sess .sess
.fatal(format!( .fatal(format!(
"unbound path {}", "unbound path {}",
pprust::path_to_str(path)).as_slice()) pprust::path_to_string(path)).as_slice())
} }
Some(&d) => d Some(&d) => d
}; };
@ -1231,7 +1231,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
opt_explicit_self: Option<ast::ExplicitSelf_>, opt_explicit_self: Option<ast::ExplicitSelf_>,
generics: &ast::Generics, generics: &ast::Generics,
span: codemap::Span) { span: codemap::Span) {
let suggested_fn = pprust::fun_to_str(decl, fn_style, ident, let suggested_fn = pprust::fun_to_string(decl, fn_style, ident,
opt_explicit_self, generics); opt_explicit_self, generics);
let msg = format!("consider using an explicit lifetime \ let msg = format!("consider using an explicit lifetime \
parameter as shown: {}", suggested_fn); parameter as shown: {}", suggested_fn);
@ -1249,11 +1249,11 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
infer::Coercion(_) => " for automatic coercion".to_string(), infer::Coercion(_) => " for automatic coercion".to_string(),
infer::LateBoundRegion(_, br) => { infer::LateBoundRegion(_, br) => {
format!(" for {}in function call", format!(" for {}in function call",
bound_region_to_str(self.tcx, "lifetime parameter ", true, br)) bound_region_to_string(self.tcx, "lifetime parameter ", true, br))
} }
infer::BoundRegionInFnType(_, br) => { infer::BoundRegionInFnType(_, br) => {
format!(" for {}in function type", format!(" for {}in function type",
bound_region_to_str(self.tcx, "lifetime parameter ", true, br)) bound_region_to_string(self.tcx, "lifetime parameter ", true, br))
} }
infer::EarlyBoundRegion(_, name) => { infer::EarlyBoundRegion(_, name) => {
format!(" for lifetime parameter `{}", format!(" for lifetime parameter `{}",
@ -1265,7 +1265,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
} }
infer::UpvarRegion(ref upvar_id, _) => { infer::UpvarRegion(ref upvar_id, _) => {
format!(" for capture of `{}` by closure", format!(" for capture of `{}` by closure",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_str()) ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string())
} }
}; };
@ -1334,7 +1334,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
"...so that closure can access `{}`", "...so that closure can access `{}`",
ty::local_var_name_str(self.tcx, upvar_id.var_id) ty::local_var_name_str(self.tcx, upvar_id.var_id)
.get() .get()
.to_str()).as_slice()) .to_string()).as_slice())
} }
infer::InfStackClosure(span) => { infer::InfStackClosure(span) => {
self.tcx.sess.span_note( self.tcx.sess.span_note(
@ -1359,7 +1359,7 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
does not outlive the enclosing closure", does not outlive the enclosing closure",
ty::local_var_name_str( ty::local_var_name_str(
self.tcx, self.tcx,
id).get().to_str()).as_slice()); id).get().to_string()).as_slice());
} }
infer::IndexSlice(span) => { infer::IndexSlice(span) => {
self.tcx.sess.span_note( self.tcx.sess.span_note(
@ -1508,7 +1508,7 @@ impl LifeGiver {
let mut lifetime; let mut lifetime;
loop { loop {
let mut s = String::from_str("'"); let mut s = String::from_str("'");
s.push_str(num_to_str(self.counter.get()).as_slice()); s.push_str(num_to_string(self.counter.get()).as_slice());
if !self.taken.contains(&s) { if !self.taken.contains(&s) {
lifetime = name_to_dummy_lifetime( lifetime = name_to_dummy_lifetime(
token::str_to_ident(s.as_slice()).name); token::str_to_ident(s.as_slice()).name);
@ -1521,7 +1521,7 @@ impl LifeGiver {
return lifetime; return lifetime;
// 0 .. 25 generates a .. z, 26 .. 51 generates aa .. zz, and so on // 0 .. 25 generates a .. z, 26 .. 51 generates aa .. zz, and so on
fn num_to_str(counter: uint) -> String { fn num_to_string(counter: uint) -> String {
let mut s = String::new(); let mut s = String::new();
let (n, r) = (counter/26 + 1, counter % 26); let (n, r) = (counter/26 + 1, counter % 26);
let letter: char = from_u32((r+97) as u32).unwrap(); let letter: char = from_u32((r+97) as u32).unwrap();

View file

@ -26,7 +26,7 @@ use syntax::ast::{NormalFn, UnsafeFn, NodeId};
use syntax::ast::{Onceness, FnStyle}; use syntax::ast::{Onceness, FnStyle};
use std::collections::HashMap; use std::collections::HashMap;
use util::common::{indenter}; use util::common::{indenter};
use util::ppaux::mt_to_str; use util::ppaux::mt_to_string;
use util::ppaux::Repr; use util::ppaux::Repr;
pub struct Glb<'f>(pub CombineFields<'f>); // "greatest lower bound" (common subtype) pub struct Glb<'f>(pub CombineFields<'f>); // "greatest lower bound" (common subtype)
@ -50,8 +50,8 @@ impl<'f> Combine for Glb<'f> {
debug!("{}.mts({}, {})", debug!("{}.mts({}, {})",
self.tag(), self.tag(),
mt_to_str(tcx, a), mt_to_string(tcx, a),
mt_to_str(tcx, b)); mt_to_string(tcx, b));
match (a.mutbl, b.mutbl) { match (a.mutbl, b.mutbl) {
// If one side or both is mut, then the GLB must use // If one side or both is mut, then the GLB must use

View file

@ -39,7 +39,7 @@ use middle::typeck::infer::*;
use middle::typeck::infer::combine::*; use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb; use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lub::Lub; use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::unify::{Root, UnifyKey}; use middle::typeck::infer::unify::*;
use middle::typeck::infer::sub::Sub; use middle::typeck::infer::sub::Sub;
use util::ppaux::Repr; use util::ppaux::Repr;
@ -436,8 +436,7 @@ pub enum LatticeVarResult<K,T> {
* - If the variables do not both have an upper bound, we will unify * - If the variables do not both have an upper bound, we will unify
* the variables and return the unified variable, in which case the * the variables and return the unified variable, in which case the
* result is a variable. This is indicated with a `VarResult` * result is a variable. This is indicated with a `VarResult`
* return. * return. */
*/
pub fn lattice_vars<L:LatticeDir+Combine, pub fn lattice_vars<L:LatticeDir+Combine,
T:LatticeValue, T:LatticeValue,
K:UnifyKey<Bounds<T>>>( K:UnifyKey<Bounds<T>>>(

View file

@ -25,7 +25,7 @@ use syntax::ast::{Many, Once, NodeId};
use syntax::ast::{NormalFn, UnsafeFn}; use syntax::ast::{NormalFn, UnsafeFn};
use syntax::ast::{Onceness, FnStyle}; use syntax::ast::{Onceness, FnStyle};
use syntax::ast::{MutMutable, MutImmutable}; use syntax::ast::{MutMutable, MutImmutable};
use util::ppaux::mt_to_str; use util::ppaux::mt_to_string;
use util::ppaux::Repr; use util::ppaux::Repr;
pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype pub struct Lub<'f>(pub CombineFields<'f>); // least-upper-bound: common supertype
@ -49,8 +49,8 @@ impl<'f> Combine for Lub<'f> {
debug!("{}.mts({}, {})", debug!("{}.mts({}, {})",
self.tag(), self.tag(),
mt_to_str(tcx, a), mt_to_string(tcx, a),
mt_to_str(tcx, b)); mt_to_string(tcx, b));
if a.mutbl != b.mutbl { if a.mutbl != b.mutbl {
return Err(ty::terr_mutability) return Err(ty::terr_mutability)

View file

@ -44,7 +44,7 @@ use syntax::ast;
use syntax::codemap; use syntax::codemap;
use syntax::codemap::Span; use syntax::codemap::Span;
use util::common::indent; use util::common::indent;
use util::ppaux::{bound_region_to_str, ty_to_str, trait_ref_to_str, Repr}; use util::ppaux::{bound_region_to_string, ty_to_string, trait_ref_to_string, Repr};
pub mod doc; pub mod doc;
pub mod macros; pub mod macros;
@ -245,7 +245,7 @@ pub enum fixup_err {
region_var_bound_by_region_var(RegionVid, RegionVid) region_var_bound_by_region_var(RegionVid, RegionVid)
} }
pub fn fixup_err_to_str(f: fixup_err) -> String { pub fn fixup_err_to_string(f: fixup_err) -> String {
match f { match f {
unresolved_int_ty(_) => { unresolved_int_ty(_) => {
"cannot determine the type of this integer; add a suffix to \ "cannot determine the type of this integer; add a suffix to \
@ -662,19 +662,19 @@ impl<'a> InferCtxt<'a> {
self.report_region_errors(&errors); // see error_reporting.rs self.report_region_errors(&errors); // see error_reporting.rs
} }
pub fn ty_to_str(&self, t: ty::t) -> String { pub fn ty_to_string(&self, t: ty::t) -> String {
ty_to_str(self.tcx, ty_to_string(self.tcx,
self.resolve_type_vars_if_possible(t)) self.resolve_type_vars_if_possible(t))
} }
pub fn tys_to_str(&self, ts: &[ty::t]) -> String { pub fn tys_to_string(&self, ts: &[ty::t]) -> String {
let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_str(*t)).collect(); let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_string(*t)).collect();
format!("({})", tstrs.connect(", ")) format!("({})", tstrs.connect(", "))
} }
pub fn trait_ref_to_str(&self, t: &ty::TraitRef) -> String { pub fn trait_ref_to_string(&self, t: &ty::TraitRef) -> String {
let t = self.resolve_type_vars_in_trait_ref_if_possible(t); let t = self.resolve_type_vars_in_trait_ref_if_possible(t);
trait_ref_to_str(self.tcx, &t) trait_ref_to_string(self.tcx, &t)
} }
pub fn resolve_type_vars_if_possible(&self, typ: ty::t) -> ty::t { pub fn resolve_type_vars_if_possible(&self, typ: ty::t) -> ty::t {
@ -707,8 +707,8 @@ impl<'a> InferCtxt<'a> {
self.tcx.sess.bug( self.tcx.sess.bug(
format!("resolve_type_vars_if_possible() yielded {} \ format!("resolve_type_vars_if_possible() yielded {} \
when supplied with {}", when supplied with {}",
self.ty_to_str(dummy0), self.ty_to_string(dummy0),
self.ty_to_str(dummy1)).as_slice()); self.ty_to_string(dummy1)).as_slice());
} }
} }
} }
@ -761,7 +761,7 @@ impl<'a> InferCtxt<'a> {
Some(e) => { Some(e) => {
self.tcx.sess.span_err(sp, self.tcx.sess.span_err(sp,
format!("{}{}", format!("{}{}",
mk_msg(Some(self.ty_to_str(e)), actual_ty), mk_msg(Some(self.ty_to_string(e)), actual_ty),
error_str).as_slice()); error_str).as_slice());
} }
} }
@ -783,7 +783,7 @@ impl<'a> InferCtxt<'a> {
return; return;
} }
self.type_error_message_str(sp, |_e, a| { mk_msg(a) }, self.ty_to_str(actual_ty), err); self.type_error_message_str(sp, |_e, a| { mk_msg(a) }, self.ty_to_string(actual_ty), err);
} }
pub fn report_mismatched_types(&self, pub fn report_mismatched_types(&self,
@ -800,7 +800,7 @@ impl<'a> InferCtxt<'a> {
// if I leave out : String, it infers &str and complains // if I leave out : String, it infers &str and complains
|actual: String| { |actual: String| {
format!("mismatched types: expected `{}` but found `{}`", format!("mismatched types: expected `{}` but found `{}`",
self.ty_to_str(resolved_expected), self.ty_to_string(resolved_expected),
actual) actual)
} }
} }
@ -819,7 +819,7 @@ impl<'a> InferCtxt<'a> {
let rvar = self.next_region_var( let rvar = self.next_region_var(
BoundRegionInFnType(trace.origin.span(), br)); BoundRegionInFnType(trace.origin.span(), br));
debug!("Bound region {} maps to {:?}", debug!("Bound region {} maps to {:?}",
bound_region_to_str(self.tcx, "", false, br), bound_region_to_string(self.tcx, "", false, br),
rvar); rvar);
rvar rvar
}); });

View file

@ -56,7 +56,7 @@ use middle::typeck::infer::{unresolved_float_ty, unresolved_int_ty};
use middle::typeck::infer::{unresolved_ty}; use middle::typeck::infer::{unresolved_ty};
use syntax::codemap::Span; use syntax::codemap::Span;
use util::common::indent; use util::common::indent;
use util::ppaux::{Repr, ty_to_str}; use util::ppaux::{Repr, ty_to_string};
pub static resolve_nested_tvar: uint = 0b0000000001; pub static resolve_nested_tvar: uint = 0b0000000001;
pub static resolve_rvar: uint = 0b0000000010; pub static resolve_rvar: uint = 0b0000000010;
@ -121,7 +121,7 @@ impl<'a> ResolveState<'a> {
self.err = None; self.err = None;
debug!("Resolving {} (modes={:x})", debug!("Resolving {} (modes={:x})",
ty_to_str(self.infcx.tcx, typ), ty_to_string(self.infcx.tcx, typ),
self.modes); self.modes);
// n.b. This is a hokey mess because the current fold doesn't // n.b. This is a hokey mess because the current fold doesn't
@ -133,8 +133,8 @@ impl<'a> ResolveState<'a> {
match self.err { match self.err {
None => { None => {
debug!("Resolved to {} + {} (modes={:x})", debug!("Resolved to {} + {} (modes={:x})",
ty_to_str(self.infcx.tcx, rty), ty_to_string(self.infcx.tcx, rty),
ty_to_str(self.infcx.tcx, rty), ty_to_string(self.infcx.tcx, rty),
self.modes); self.modes);
return Ok(rty); return Ok(rty);
} }

View file

@ -22,7 +22,7 @@ use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::then; use middle::typeck::infer::then;
use middle::typeck::infer::{TypeTrace, Subtype}; use middle::typeck::infer::{TypeTrace, Subtype};
use util::common::{indenter}; use util::common::{indenter};
use util::ppaux::{bound_region_to_str, Repr}; use util::ppaux::{bound_region_to_string, Repr};
use syntax::ast::{Onceness, FnStyle, MutImmutable, MutMutable}; use syntax::ast::{Onceness, FnStyle, MutImmutable, MutMutable};
@ -176,7 +176,7 @@ impl<'f> Combine for Sub<'f> {
replace_late_bound_regions_in_fn_sig(self.get_ref().infcx.tcx, b, |br| { replace_late_bound_regions_in_fn_sig(self.get_ref().infcx.tcx, b, |br| {
let skol = self.get_ref().infcx.region_vars.new_skolemized(br); let skol = self.get_ref().infcx.region_vars.new_skolemized(br);
debug!("Bound region {} skolemized to {:?}", debug!("Bound region {} skolemized to {:?}",
bound_region_to_str(self.get_ref().infcx.tcx, "", false, br), bound_region_to_string(self.get_ref().infcx.tcx, "", false, br),
skol); skol);
skol skol
}) })

Some files were not shown because too many files have changed in this diff Show more