1
Fork 0

auto merge of #13877 : thestinger/rust/de-tilde-str-vec, r=alexcrichton

This commit is contained in:
bors 2014-05-01 16:06:48 -07:00
commit 9f836d5a53
31 changed files with 186 additions and 197 deletions

View file

@ -170,7 +170,7 @@ fn parse_compile_flags(line: &str) -> Option<~str> {
} }
fn parse_run_flags(line: &str) -> Option<~str> { fn parse_run_flags(line: &str) -> Option<~str> {
parse_name_value_directive(line, ~"run-flags") parse_name_value_directive(line, "run-flags".to_owned())
} }
fn parse_debugger_cmd(line: &str) -> Option<~str> { fn parse_debugger_cmd(line: &str) -> Option<~str> {

View file

@ -698,13 +698,13 @@ static INITIAL_LOAD_FACTOR: Fraction = (9, 10);
/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot."); /// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot.");
/// ///
/// // check for a specific one. /// // check for a specific one.
/// if !book_reviews.contains_key(& &"Les Misérables") { /// if !book_reviews.contains_key(&("Les Misérables")) {
/// println!("We've got {} reviews, but Les Misérables ain't one.", /// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len()); /// book_reviews.len());
/// } /// }
/// ///
/// // oops, this review has a lot of spelling mistakes, let's delete it. /// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove(& &"The Adventures of Sherlock Holmes"); /// book_reviews.remove(&("The Adventures of Sherlock Holmes"));
/// ///
/// // look up the values associated with some keys. /// // look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"]; /// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];

View file

@ -1651,10 +1651,10 @@ mod test_set {
// FIXME: #5801: this needs a type hint to compile... // FIXME: #5801: this needs a type hint to compile...
let result: Option<(&uint, & &'static str)> = z.next(); let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&5u, & &"bar")); assert_eq!(result.unwrap(), (&5u, &("bar")));
let result: Option<(&uint, & &'static str)> = z.next(); let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&11u, & &"foo")); assert_eq!(result.unwrap(), (&11u, &("foo")));
let result: Option<(&uint, & &'static str)> = z.next(); let result: Option<(&uint, & &'static str)> = z.next();
assert!(result.is_none()); assert!(result.is_none());

View file

@ -1441,7 +1441,7 @@ mod tests {
optmulti("l", "", "Desc", "VAL")); optmulti("l", "", "Desc", "VAL"));
let expected = let expected =
~"Usage: fruits "Usage: fruits
Options: Options:
-b --banana VAL Desc -b --banana VAL Desc
@ -1450,7 +1450,7 @@ Options:
-k --kiwi Desc -k --kiwi Desc
-p [VAL] Desc -p [VAL] Desc
-l VAL Desc -l VAL Desc
"; ".to_owned();
let generated_usage = usage("Usage: fruits", optgroups.as_slice()); let generated_usage = usage("Usage: fruits", optgroups.as_slice());
@ -1471,13 +1471,13 @@ Options:
"This is a long description which _will_ be wrapped..+..")); "This is a long description which _will_ be wrapped..+.."));
let expected = let expected =
~"Usage: fruits "Usage: fruits
Options: Options:
-k --kiwi This is a long description which won't be wrapped..+.. -k --kiwi This is a long description which won't be wrapped..+..
-a --apple This is a long description which _will_ be -a --apple This is a long description which _will_ be
wrapped..+.. wrapped..+..
"; ".to_owned();
let usage = usage("Usage: fruits", optgroups.as_slice()); let usage = usage("Usage: fruits", optgroups.as_slice());
@ -1496,14 +1496,14 @@ Options:
confuse the line wrapping; an apple costs 0.51 in some parts of Europe.")); confuse the line wrapping; an apple costs 0.51 in some parts of Europe."));
let expected = let expected =
~"Usage: fruits "Usage: fruits
Options: Options:
-k --kw The word kiwi is normally spelled with two i's -k --kw The word kiwi is normally spelled with two i's
-a --apple This description has some characters that could -a --apple This description has some characters that could
confuse the line wrapping; an apple costs 0.51 in confuse the line wrapping; an apple costs 0.51 in
some parts of Europe. some parts of Europe.
"; ".to_owned();
let usage = usage("Usage: fruits", optgroups.as_slice()); let usage = usage("Usage: fruits", optgroups.as_slice());

View file

@ -220,7 +220,9 @@ impl<'a> Parser<'a> {
try!(self.parse_group_opts()) try!(self.parse_group_opts())
} else { } else {
self.caps += 1; self.caps += 1;
self.stack.push(Paren(self.flags, self.caps, ~"")) self.stack.push(Paren(self.flags,
self.caps,
"".to_owned()))
} }
} }
')' => { ')' => {
@ -769,7 +771,7 @@ impl<'a> Parser<'a> {
} }
if self.cur() == ':' { if self.cur() == ':' {
// Save the old flags with the opening paren. // Save the old flags with the opening paren.
self.stack.push(Paren(self.flags, 0, ~"")); self.stack.push(Paren(self.flags, 0, "".to_owned()));
} }
self.flags = flags; self.flags = flags;
return Ok(()) return Ok(())

View file

@ -116,7 +116,7 @@ impl<'a> NfaGen<'a> {
|cx, name| match name { |cx, name| match name {
&Some(ref name) => { &Some(ref name) => {
let name = name.as_slice(); let name = name.as_slice();
quote_expr!(cx, Some(~$name)) quote_expr!(cx, Some($name.to_owned()))
} }
&None => quote_expr!(cx, None), &None => quote_expr!(cx, None),
} }
@ -306,7 +306,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str,
} }
::regex::Regex { ::regex::Regex {
original: ~$regex, original: $regex.to_owned(),
names: vec!$cap_names, names: vec!$cap_names,
p: ::regex::native::Native(exec), p: ::regex::native::Native(exec),
} }

View file

@ -1059,11 +1059,11 @@ pub fn build_session_(sopts: session::Options,
pub fn parse_pretty(sess: &Session, name: &str) -> PpMode { pub fn parse_pretty(sess: &Session, name: &str) -> PpMode {
match name { match name {
&"normal" => PpmNormal, "normal" => PpmNormal,
&"expanded" => PpmExpanded, "expanded" => PpmExpanded,
&"typed" => PpmTyped, "typed" => PpmTyped,
&"expanded,identified" => PpmExpandedIdentified, "expanded,identified" => PpmExpandedIdentified,
&"identified" => PpmIdentified, "identified" => PpmIdentified,
_ => { _ => {
sess.fatal("argument to `pretty` must be one of `normal`, \ sess.fatal("argument to `pretty` must be one of `normal`, \
`expanded`, `typed`, `identified`, \ `expanded`, `typed`, `identified`, \

View file

@ -300,7 +300,7 @@ pub fn run_compiler(args: &[~str]) {
None::<d::PpMode> => {/* continue */ } None::<d::PpMode> => {/* continue */ }
} }
if r.contains(&~"ls") { if r.contains(&("ls".to_owned())) {
match input { match input {
d::FileInput(ref ifile) => { d::FileInput(ref ifile) => {
let mut stdout = io::stdout(); let mut stdout = io::stdout();

View file

@ -1036,7 +1036,7 @@ fn check_crate_attrs_usage(cx: &Context, attrs: &[ast::Attribute]) {
if !iter.any(|other_attr| { name.equiv(other_attr) }) { if !iter.any(|other_attr| { name.equiv(other_attr) }) {
cx.span_lint(AttributeUsage, attr.span, "unknown crate attribute"); cx.span_lint(AttributeUsage, attr.span, "unknown crate attribute");
} }
if name.equiv(& &"link") { if name.equiv(&("link")) {
cx.tcx.sess.span_err(attr.span, cx.tcx.sess.span_err(attr.span,
"obsolete crate `link` attribute"); "obsolete crate `link` attribute");
cx.tcx.sess.note("the link attribute has been superceded by the crate_id \ cx.tcx.sess.note("the link attribute has been superceded by the crate_id \

View file

@ -189,9 +189,9 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_rptr(_, ref mt) => { ty::ty_rptr(_, ref mt) => {
match ty::get(mt.ty).sty { match ty::get(mt.ty).sty {
ty::ty_vec(ref mt, None) => { ty::ty_vec(ref mt, None) => {
let (name, extra) = (~"slice", Vec::new()); let (name, extra) = ("slice".to_owned(), Vec::new());
let extra = extra.append(self.c_mt(mt).as_slice()); let extra = extra.append(self.c_mt(mt).as_slice());
self.visit(~"evec_" + name, extra.as_slice()) self.visit("evec_".to_owned() + name, extra.as_slice())
} }
ty::ty_str => self.visit("estr_slice".to_owned(), &[]), ty::ty_str => self.visit("estr_slice".to_owned(), &[]),
_ => { _ => {

View file

@ -647,8 +647,8 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
ty::ty_vec(mt, None) => { ty::ty_vec(mt, None) => {
fcx.type_error_message(pat.span, fcx.type_error_message(pat.span,
|_| { |_| {
~"unique vector patterns are no \ "unique vector patterns are no \
longer supported" longer supported".to_owned()
}, },
expected, expected,
None); None);

View file

@ -2566,10 +2566,6 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
match expr.node { match expr.node {
ast::ExprVstore(ev, vst) => { ast::ExprVstore(ev, vst) => {
let typ = match ev.node { let typ = match ev.node {
ast::ExprLit(lit) if ast_util::lit_is_str(lit) => {
ast_expr_vstore_to_ty(fcx, ev, vst, || ty::mt{ ty: ty::mk_str(tcx),
mutbl: ast::MutImmutable })
}
ast::ExprVec(ref args) => { ast::ExprVec(ref args) => {
let mutability = match vst { let mutability = match vst {
ast::ExprVstoreMutSlice => ast::MutMutable, ast::ExprVstoreMutSlice => ast::MutMutable,
@ -2622,8 +2618,16 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
mutbl: mutability}) mutbl: mutability})
} }
} }
_ => ast::ExprLit(_) => {
tcx.sess.span_bug(expr.span, "vstore modifier on non-sequence") let error = if vst == ast::ExprVstoreSlice {
"`&\"string\"` has been removed; use `\"string\"` instead"
} else {
"`~\"string\"` has been removed; use `\"string\".to_owned()` instead"
};
tcx.sess.span_err(expr.span, error);
ty::mk_err()
}
_ => tcx.sess.span_bug(expr.span, "vstore modifier on non-sequence"),
}; };
fcx.write_ty(ev.id, typ); fcx.write_ty(ev.id, typ);
fcx.write_ty(id, typ); fcx.write_ty(id, typ);

View file

@ -1740,7 +1740,7 @@ impl<T: Iterator<char>> Builder<T> {
Some(NumberValue(n)) => { Ok(Number(n)) } Some(NumberValue(n)) => { Ok(Number(n)) }
Some(BooleanValue(b)) => { Ok(Boolean(b)) } Some(BooleanValue(b)) => { Ok(Boolean(b)) }
Some(StringValue(ref mut s)) => { Some(StringValue(ref mut s)) => {
let mut temp = ~""; let mut temp = "".to_owned();
swap(s, &mut temp); swap(s, &mut temp);
Ok(String(temp)) Ok(String(temp))
} }
@ -2633,16 +2633,16 @@ mod tests {
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
assert_eq!(from_str("\"\""), Ok(String(~""))); assert_eq!(from_str("\"\""), Ok(String("".to_owned())));
assert_eq!(from_str("\"foo\""), Ok(String(~"foo"))); assert_eq!(from_str("\"foo\""), Ok(String("foo".to_owned())));
assert_eq!(from_str("\"\\\"\""), Ok(String(~"\""))); assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_owned())));
assert_eq!(from_str("\"\\b\""), Ok(String(~"\x08"))); assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_owned())));
assert_eq!(from_str("\"\\n\""), Ok(String(~"\n"))); assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_owned())));
assert_eq!(from_str("\"\\r\""), Ok(String(~"\r"))); assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_owned())));
assert_eq!(from_str("\"\\t\""), Ok(String(~"\t"))); assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_owned())));
assert_eq!(from_str(" \"foo\" "), Ok(String(~"foo"))); assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_owned())));
assert_eq!(from_str("\"\\u12ab\""), Ok(String(~"\u12ab"))); assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u12ab".to_owned())));
assert_eq!(from_str("\"\\uAB12\""), Ok(String(~"\uAB12"))); assert_eq!(from_str("\"\\uAB12\""), Ok(String("\uAB12".to_owned())));
} }
#[test] #[test]
@ -2890,7 +2890,7 @@ mod tests {
fn test_find(){ fn test_find(){
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap(); let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
let found_str = json_value.find(&"dog".to_owned()); let found_str = json_value.find(&"dog".to_owned());
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == &"cat"); assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cat");
} }
#[test] #[test]
@ -2898,7 +2898,7 @@ mod tests {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.find_path(&[&"dog".to_owned(), let found_str = json_value.find_path(&[&"dog".to_owned(),
&"cat".to_owned(), &"mouse".to_owned()]); &"cat".to_owned(), &"mouse".to_owned()]);
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == &"cheese"); assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cheese");
} }
#[test] #[test]
@ -2906,7 +2906,7 @@ mod tests {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.search(&"mouse".to_owned()).and_then(|j| j.as_string()); let found_str = json_value.search(&"mouse".to_owned()).and_then(|j| j.as_string());
assert!(found_str.is_some()); assert!(found_str.is_some());
assert!(found_str.unwrap() == &"cheese"); assert!(found_str.unwrap() == "cheese");
} }
#[test] #[test]
@ -2946,7 +2946,7 @@ mod tests {
fn test_as_string(){ fn test_as_string(){
let json_value = from_str("\"dog\"").unwrap(); let json_value = from_str("\"dog\"").unwrap();
let json_str = json_value.as_string(); let json_str = json_value.as_string();
let expected_str = &"dog"; let expected_str = "dog";
assert_eq!(json_str, Some(expected_str)); assert_eq!(json_str, Some(expected_str));
} }
@ -3067,7 +3067,7 @@ mod tests {
r#"{ "foo":"bar", "array" : [0, 1, 2,3 ,4,5], "idents":[null,true,false]}"#, r#"{ "foo":"bar", "array" : [0, 1, 2,3 ,4,5], "idents":[null,true,false]}"#,
~[ ~[
(ObjectStart, ~[]), (ObjectStart, ~[]),
(StringValue(~"bar"), ~[Key("foo")]), (StringValue("bar".to_owned()), ~[Key("foo")]),
(ListStart, ~[Key("array")]), (ListStart, ~[Key("array")]),
(NumberValue(0.0), ~[Key("array"), Index(0)]), (NumberValue(0.0), ~[Key("array"), Index(0)]),
(NumberValue(1.0), ~[Key("array"), Index(1)]), (NumberValue(1.0), ~[Key("array"), Index(1)]),
@ -3155,7 +3155,7 @@ mod tests {
(NumberValue(1.0), ~[Key("a")]), (NumberValue(1.0), ~[Key("a")]),
(ListStart, ~[Key("b")]), (ListStart, ~[Key("b")]),
(BooleanValue(true), ~[Key("b"), Index(0)]), (BooleanValue(true), ~[Key("b"), Index(0)]),
(StringValue(~"foo\nbar"), ~[Key("b"), Index(1)]), (StringValue("foo\nbar".to_owned()), ~[Key("b"), Index(1)]),
(ObjectStart, ~[Key("b"), Index(2)]), (ObjectStart, ~[Key("b"), Index(2)]),
(ObjectStart, ~[Key("b"), Index(2), Key("c")]), (ObjectStart, ~[Key("b"), Index(2), Key("c")]),
(NullValue, ~[Key("b"), Index(2), Key("c"), Key("d")]), (NullValue, ~[Key("b"), Index(2), Key("c"), Key("d")]),
@ -3287,7 +3287,7 @@ mod tests {
assert!(stack.last_is_index()); assert!(stack.last_is_index());
assert!(stack.get(0) == Index(1)); assert!(stack.get(0) == Index(1));
stack.push_key(~"foo"); stack.push_key("foo".to_owned());
assert!(stack.len() == 2); assert!(stack.len() == 2);
assert!(stack.is_equal_to([Index(1), Key("foo")])); assert!(stack.is_equal_to([Index(1), Key("foo")]));
@ -3299,7 +3299,7 @@ mod tests {
assert!(stack.get(0) == Index(1)); assert!(stack.get(0) == Index(1));
assert!(stack.get(1) == Key("foo")); assert!(stack.get(1) == Key("foo"));
stack.push_key(~"bar"); stack.push_key("bar".to_owned());
assert!(stack.len() == 3); assert!(stack.len() == 3);
assert!(stack.is_equal_to([Index(1), Key("foo"), Key("bar")])); assert!(stack.is_equal_to([Index(1), Key("foo"), Key("bar")]));
@ -3363,7 +3363,7 @@ mod tests {
} }
fn big_json() -> ~str { fn big_json() -> ~str {
let mut src = ~"[\n"; let mut src = "[\n".to_owned();
for _ in range(0, 500) { for _ in range(0, 500) {
src = src + r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": [1,2,3]},"#; src = src + r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": [1,2,3]},"#;
} }

View file

@ -62,7 +62,7 @@
//! let mut flags = FlagA | FlagB; //! let mut flags = FlagA | FlagB;
//! flags.clear(); //! flags.clear();
//! assert!(flags.is_empty()); //! assert!(flags.is_empty());
//! assert_eq!(format!("{}", flags), ~"hi!"); //! assert_eq!(format!("{}", flags).as_slice(), "hi!");
//! } //! }
//! ~~~ //! ~~~
//! //!

View file

@ -343,7 +343,7 @@ mod tests {
assert_eq!(hasher.hash(&'a'), 97); assert_eq!(hasher.hash(&'a'), 97);
assert_eq!(hasher.hash(& &"a"), 97 + 0xFF); assert_eq!(hasher.hash(&("a")), 97 + 0xFF);
assert_eq!(hasher.hash(& &[1u8, 2u8, 3u8]), 9); assert_eq!(hasher.hash(& &[1u8, 2u8, 3u8]), 9);
unsafe { unsafe {

View file

@ -555,7 +555,7 @@ mod tests {
($path:expr, $disp:ident, $exp:expr) => ( ($path:expr, $disp:ident, $exp:expr) => (
{ {
let path = Path::new($path); let path = Path::new($path);
assert!(path.$disp().to_str() == ~$exp); assert!(path.$disp().to_str().as_slice() == $exp);
} }
) )
) )

View file

@ -637,7 +637,7 @@ fn test_repr() {
exact_test(&true, "true"); exact_test(&true, "true");
exact_test(&false, "false"); exact_test(&false, "false");
exact_test(&1.234, "1.234f64"); exact_test(&1.234, "1.234f64");
exact_test(&(&"hello"), "\"hello\""); exact_test(&("hello"), "\"hello\"");
// FIXME What do I do about this one? // FIXME What do I do about this one?
exact_test(&("he\u10f3llo".to_owned()), "~\"he\\u10f3llo\""); exact_test(&("he\u10f3llo".to_owned()), "~\"he\\u10f3llo\"");

View file

@ -2025,12 +2025,12 @@ pub trait StrSlice<'a> {
/// # Example /// # Example
/// ///
/// ```rust /// ```rust
/// let s = ~"Do you know the muffin man, /// let s = "Do you know the muffin man,
/// The muffin man, the muffin man, ..."; /// The muffin man, the muffin man, ...".to_owned();
/// ///
/// assert_eq!(s.replace("muffin man", "little lamb"), /// assert_eq!(s.replace("muffin man", "little lamb"),
/// ~"Do you know the little lamb, /// "Do you know the little lamb,
/// The little lamb, the little lamb, ..."); /// The little lamb, the little lamb, ...".to_owned());
/// ///
/// // not found, so no change. /// // not found, so no change.
/// assert_eq!(s.replace("cookie monster", "little lamb"), s); /// assert_eq!(s.replace("cookie monster", "little lamb"), s);
@ -3604,11 +3604,11 @@ mod tests {
#[test] #[test]
fn test_total_ord() { fn test_total_ord() {
"1234".cmp(& &"123") == Greater; "1234".cmp(&("123")) == Greater;
"123".cmp(& &"1234") == Less; "123".cmp(&("1234")) == Less;
"1234".cmp(& &"1234") == Equal; "1234".cmp(&("1234")) == Equal;
"12345555".cmp(& &"123456") == Less; "12345555".cmp(&("123456")) == Less;
"22".cmp(& &"1234") == Greater; "22".cmp(&("1234")) == Greater;
} }
#[test] #[test]
@ -4005,7 +4005,7 @@ mod tests {
#[test] #[test]
fn test_from_str() { fn test_from_str() {
let owned: Option<~str> = from_str(&"string"); let owned: Option<~str> = from_str("string");
assert_eq!(owned, Some("string".to_owned())); assert_eq!(owned, Some("string".to_owned()));
} }

View file

@ -242,7 +242,7 @@ impl<T: Clone> Vec<T> {
/// ///
/// ```rust /// ```rust
/// let mut vec = vec!("hello"); /// let mut vec = vec!("hello");
/// vec.grow(2, & &"world"); /// vec.grow(2, &("world"));
/// assert_eq!(vec, vec!("hello", "world", "world")); /// assert_eq!(vec, vec!("hello", "world", "world"));
/// ``` /// ```
pub fn grow(&mut self, n: uint, value: &T) { pub fn grow(&mut self, n: uint, value: &T) {
@ -267,8 +267,8 @@ impl<T: Clone> Vec<T> {
/// ///
/// ```rust /// ```rust
/// let mut vec = vec!("a", "b", "c"); /// let mut vec = vec!("a", "b", "c");
/// vec.grow_set(1, & &"fill", "d"); /// vec.grow_set(1, &("fill"), "d");
/// vec.grow_set(4, & &"fill", "e"); /// vec.grow_set(4, &("fill"), "e");
/// assert_eq!(vec, vec!("a", "d", "c", "fill", "e")); /// assert_eq!(vec, vec!("a", "d", "c", "fill", "e"));
/// ``` /// ```
pub fn grow_set(&mut self, index: uint, initval: &T, value: T) { pub fn grow_set(&mut self, index: uint, initval: &T, value: T) {

View file

@ -270,22 +270,22 @@ pub fn syntax_expander_table() -> SyntaxEnv {
} }
let mut syntax_expanders = SyntaxEnv::new(); let mut syntax_expanders = SyntaxEnv::new();
syntax_expanders.insert(intern(&"macro_rules"), syntax_expanders.insert(intern("macro_rules"),
IdentTT(~BasicIdentMacroExpander { IdentTT(~BasicIdentMacroExpander {
expander: ext::tt::macro_rules::add_new_extension, expander: ext::tt::macro_rules::add_new_extension,
span: None, span: None,
}, },
None)); None));
syntax_expanders.insert(intern(&"fmt"), syntax_expanders.insert(intern("fmt"),
builtin_normal_expander( builtin_normal_expander(
ext::fmt::expand_syntax_ext)); ext::fmt::expand_syntax_ext));
syntax_expanders.insert(intern(&"format_args"), syntax_expanders.insert(intern("format_args"),
builtin_normal_expander( builtin_normal_expander(
ext::format::expand_args)); ext::format::expand_args));
syntax_expanders.insert(intern(&"env"), syntax_expanders.insert(intern("env"),
builtin_normal_expander( builtin_normal_expander(
ext::env::expand_env)); ext::env::expand_env));
syntax_expanders.insert(intern(&"option_env"), syntax_expanders.insert(intern("option_env"),
builtin_normal_expander( builtin_normal_expander(
ext::env::expand_option_env)); ext::env::expand_option_env));
syntax_expanders.insert(intern("bytes"), syntax_expanders.insert(intern("bytes"),
@ -297,63 +297,63 @@ pub fn syntax_expander_table() -> SyntaxEnv {
syntax_expanders.insert(intern("concat"), syntax_expanders.insert(intern("concat"),
builtin_normal_expander( builtin_normal_expander(
ext::concat::expand_syntax_ext)); ext::concat::expand_syntax_ext));
syntax_expanders.insert(intern(&"log_syntax"), syntax_expanders.insert(intern("log_syntax"),
builtin_normal_expander( builtin_normal_expander(
ext::log_syntax::expand_syntax_ext)); ext::log_syntax::expand_syntax_ext));
syntax_expanders.insert(intern(&"deriving"), syntax_expanders.insert(intern("deriving"),
ItemDecorator(ext::deriving::expand_meta_deriving)); ItemDecorator(ext::deriving::expand_meta_deriving));
// Quasi-quoting expanders // Quasi-quoting expanders
syntax_expanders.insert(intern(&"quote_tokens"), syntax_expanders.insert(intern("quote_tokens"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_tokens)); ext::quote::expand_quote_tokens));
syntax_expanders.insert(intern(&"quote_expr"), syntax_expanders.insert(intern("quote_expr"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_expr)); ext::quote::expand_quote_expr));
syntax_expanders.insert(intern(&"quote_ty"), syntax_expanders.insert(intern("quote_ty"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_ty)); ext::quote::expand_quote_ty));
syntax_expanders.insert(intern(&"quote_item"), syntax_expanders.insert(intern("quote_item"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_item)); ext::quote::expand_quote_item));
syntax_expanders.insert(intern(&"quote_pat"), syntax_expanders.insert(intern("quote_pat"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_pat)); ext::quote::expand_quote_pat));
syntax_expanders.insert(intern(&"quote_stmt"), syntax_expanders.insert(intern("quote_stmt"),
builtin_normal_expander( builtin_normal_expander(
ext::quote::expand_quote_stmt)); ext::quote::expand_quote_stmt));
syntax_expanders.insert(intern(&"line"), syntax_expanders.insert(intern("line"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_line)); ext::source_util::expand_line));
syntax_expanders.insert(intern(&"col"), syntax_expanders.insert(intern("col"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_col)); ext::source_util::expand_col));
syntax_expanders.insert(intern(&"file"), syntax_expanders.insert(intern("file"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_file)); ext::source_util::expand_file));
syntax_expanders.insert(intern(&"stringify"), syntax_expanders.insert(intern("stringify"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_stringify)); ext::source_util::expand_stringify));
syntax_expanders.insert(intern(&"include"), syntax_expanders.insert(intern("include"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_include)); ext::source_util::expand_include));
syntax_expanders.insert(intern(&"include_str"), syntax_expanders.insert(intern("include_str"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_include_str)); ext::source_util::expand_include_str));
syntax_expanders.insert(intern(&"include_bin"), syntax_expanders.insert(intern("include_bin"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_include_bin)); ext::source_util::expand_include_bin));
syntax_expanders.insert(intern(&"module_path"), syntax_expanders.insert(intern("module_path"),
builtin_normal_expander( builtin_normal_expander(
ext::source_util::expand_mod)); ext::source_util::expand_mod));
syntax_expanders.insert(intern(&"asm"), syntax_expanders.insert(intern("asm"),
builtin_normal_expander( builtin_normal_expander(
ext::asm::expand_asm)); ext::asm::expand_asm));
syntax_expanders.insert(intern(&"cfg"), syntax_expanders.insert(intern("cfg"),
builtin_normal_expander( builtin_normal_expander(
ext::cfg::expand_cfg)); ext::cfg::expand_cfg));
syntax_expanders.insert(intern(&"trace_macros"), syntax_expanders.insert(intern("trace_macros"),
builtin_normal_expander( builtin_normal_expander(
ext::trace_macros::expand_trace_macros)); ext::trace_macros::expand_trace_macros));
syntax_expanders syntax_expanders

View file

@ -2783,24 +2783,7 @@ impl<'a> Parser<'a> {
let lo = self.span.lo; let lo = self.span.lo;
self.expect_and(); self.expect_and();
let sub = self.parse_pat(); let sub = self.parse_pat();
hi = sub.span.hi; pat = PatRegion(sub);
// HACK: parse &"..." as a literal of a borrowed str
pat = match sub.node {
PatLit(e) => {
match e.node {
ExprLit(lit) if lit_is_str(lit) => {
let vst = @Expr {
id: ast::DUMMY_NODE_ID,
node: ExprVstore(e, ExprVstoreSlice),
span: mk_sp(lo, hi)
};
PatLit(vst)
}
_ => PatRegion(sub),
}
}
_ => PatRegion(sub),
};
hi = self.last_span.hi; hi = self.last_span.hi;
return @ast::Pat { return @ast::Pat {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,

View file

@ -534,7 +534,7 @@ impl<T: Writer> ConsoleTestState<T> {
pub fn write_run_start(&mut self, len: uint) -> io::IoResult<()> { pub fn write_run_start(&mut self, len: uint) -> io::IoResult<()> {
self.total = len; self.total = len;
let noun = if len != 1 { &"tests" } else { &"test" }; let noun = if len != 1 { "tests" } else { "test" };
self.write_plain(format!("\nrunning {} {}\n", len, noun)) self.write_plain(format!("\nrunning {} {}\n", len, noun))
} }

View file

@ -185,7 +185,7 @@ fn encode_inner(s: &str, full_url: bool) -> ~str {
* ```rust * ```rust
* use url::encode; * use url::encode;
* *
* let url = encode(&"https://example.com/Rust (programming language)"); * let url = encode("https://example.com/Rust (programming language)");
* println!("{}", url); // https://example.com/Rust%20(programming%20language) * println!("{}", url); // https://example.com/Rust%20(programming%20language)
* ``` * ```
*/ */
@ -260,7 +260,7 @@ fn decode_inner(s: &str, full_url: bool) -> ~str {
* ```rust * ```rust
* use url::decode; * use url::decode;
* *
* let url = decode(&"https://example.com/Rust%20(programming%20language)"); * let url = decode("https://example.com/Rust%20(programming%20language)");
* println!("{}", url); // https://example.com/Rust (programming language) * println!("{}", url); // https://example.com/Rust (programming language)
* ``` * ```
*/ */

View file

@ -30,7 +30,7 @@ impl Drop for S {
} }
fn move_in_match() { fn move_in_match() {
match S {f:~"foo", g:~"bar"} { match S {f: "foo".to_owned(), g: "bar".to_owned()} {
S { //~ ERROR cannot move out of type `S`, which defines the `Drop` trait S { //~ ERROR cannot move out of type `S`, which defines the `Drop` trait
f: _s, //~ NOTE attempting to move value to here f: _s, //~ NOTE attempting to move value to here
g: _t //~ NOTE and here g: _t //~ NOTE and here

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
static a: &'static str = &"foo"; static a: &'static str = "foo";
static b: *u8 = a as *u8; //~ ERROR non-scalar cast static b: *u8 = a as *u8; //~ ERROR non-scalar cast
static c: *u8 = &a as *u8; //~ ERROR mismatched types static c: *u8 = &a as *u8; //~ ERROR mismatched types

View file

@ -33,7 +33,7 @@ pub fn main() {
(&[1]).test_imm(); (&[1]).test_imm();
("test").test_imm(); ("test").test_imm();
("test".to_owned()).test_imm(); ("test".to_owned()).test_imm();
(&"test").test_imm(); ("test").test_imm();
// FIXME: Other types of mutable vecs don't currently exist // FIXME: Other types of mutable vecs don't currently exist

View file

@ -10,9 +10,9 @@
pub fn main() { pub fn main() {
let x = &"hello"; let x = "hello";
let v = &"hello"; let v = "hello";
let y : &str = &"there"; let y : &str = "there";
println!("{}", x); println!("{}", x);
println!("{}", y); println!("{}", y);
@ -20,15 +20,15 @@ pub fn main() {
assert_eq!(x[0], 'h' as u8); assert_eq!(x[0], 'h' as u8);
assert_eq!(x[4], 'o' as u8); assert_eq!(x[4], 'o' as u8);
let z : &str = &"thing"; let z : &str = "thing";
assert_eq!(v, x); assert_eq!(v, x);
assert!(x != z); assert!(x != z);
let a = &"aaaa"; let a = "aaaa";
let b = &"bbbb"; let b = "bbbb";
let c = &"cccc"; let c = "cccc";
let cc = &"ccccc"; let cc = "ccccc";
println!("{}", a); println!("{}", a);

View file

@ -16,7 +16,7 @@ fn perform_hax<T: 'static>(x: ~T) -> ~hax: {
} }
fn deadcode() { fn deadcode() {
perform_hax(~~"deadcode"); perform_hax(~"deadcode".to_owned());
} }
pub fn main() { pub fn main() {

View file

@ -16,7 +16,7 @@ fn perform_hax<T: 'static>(x: ~T) -> ~hax: {
} }
fn deadcode() { fn deadcode() {
perform_hax(~~"deadcode"); perform_hax(~"deadcode".to_owned());
} }
pub fn main() { pub fn main() {

View file

@ -44,15 +44,15 @@ fn g2(ref_1: &str, ref_2: &str) -> ~str {
pub fn main() { pub fn main() {
assert_eq!(f1("b".to_owned()), "found b".to_owned()); assert_eq!(f1("b".to_owned()), "found b".to_owned());
assert_eq!(f1(&"c"), "not found".to_owned()); assert_eq!(f1("c"), "not found".to_owned());
assert_eq!(f1("d"), "not found".to_owned()); assert_eq!(f1("d"), "not found".to_owned());
assert_eq!(f2("b".to_owned()), "found b".to_owned()); assert_eq!(f2("b".to_owned()), "found b".to_owned());
assert_eq!(f2(&"c"), "not found (c)".to_owned()); assert_eq!(f2("c"), "not found (c)".to_owned());
assert_eq!(f2("d"), "not found (d)".to_owned()); assert_eq!(f2("d"), "not found (d)".to_owned());
assert_eq!(g1("b".to_owned(), "c".to_owned()), "found b,c".to_owned()); assert_eq!(g1("b".to_owned(), "c".to_owned()), "found b,c".to_owned());
assert_eq!(g1(&"c", &"d"), "not found".to_owned()); assert_eq!(g1("c", "d"), "not found".to_owned());
assert_eq!(g1("d", "e"), "not found".to_owned()); assert_eq!(g1("d", "e"), "not found".to_owned());
assert_eq!(g2("b".to_owned(), "c".to_owned()), "found b,c".to_owned()); assert_eq!(g2("b".to_owned(), "c".to_owned()), "found b,c".to_owned());
assert_eq!(g2(&"c", &"d"), "not found (c, d)".to_owned()); assert_eq!(g2("c", "d"), "not found (c, d)".to_owned());
assert_eq!(g2("d", "e"), "not found (d, e)".to_owned()); assert_eq!(g2("d", "e"), "not found (d, e)".to_owned());
} }

View file

@ -22,8 +22,8 @@ macro_rules! check {
static S: $t = $e; static S: $t = $e;
let v: $t = $e; let v: $t = $e;
assert_eq!(S, v); assert_eq!(S, v);
assert_eq!(format!("{:?}", v), ~$s); assert_eq!(format!("{:?}", v).as_slice(), $s);
assert_eq!(format!("{:?}", S), ~$s); assert_eq!(format!("{:?}", S).as_slice(), $s);
});* });*
}} }}
} }