1
Fork 0

Change some instances of .connect() to .join()

This commit is contained in:
Wesley Wiser 2015-07-10 08:19:21 -04:00
parent 29c0c956bf
commit 93ddee6cee
39 changed files with 87 additions and 87 deletions

View file

@ -344,7 +344,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
check_lines, check_lines,
breakpoint_lines breakpoint_lines
} = parse_debugger_commands(testfile, "gdb"); } = parse_debugger_commands(testfile, "gdb");
let mut cmds = commands.connect("\n"); let mut cmds = commands.join("\n");
// compile test file (it should have 'compile-flags:-g' in the header) // compile test file (it should have 'compile-flags:-g' in the header)
let compiler_run_result = compile_test(config, props, testfile); let compiler_run_result = compile_test(config, props, testfile);
@ -799,7 +799,7 @@ fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> {
split_maybe_args(options).into_iter() split_maybe_args(options).into_iter()
.filter(|x| !options_to_remove.contains(x)) .filter(|x| !options_to_remove.contains(x))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(" "); .join(" ");
Some(new_options) Some(new_options)
} }
@ -1412,7 +1412,7 @@ fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
// Linux and mac don't require adjusting the library search path // Linux and mac don't require adjusting the library search path
if cfg!(unix) { if cfg!(unix) {
format!("{} {}", prog, args.connect(" ")) format!("{} {}", prog, args.join(" "))
} else { } else {
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
// for diagnostic purposes // for diagnostic purposes
@ -1420,7 +1420,7 @@ fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
} }
format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.connect(" ")) format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.join(" "))
} }
} }

View file

@ -606,22 +606,22 @@ fn test_concat() {
assert_eq!(d, [1, 2, 3]); assert_eq!(d, [1, 2, 3]);
let v: &[&[_]] = &[&[1], &[2, 3]]; let v: &[&[_]] = &[&[1], &[2, 3]];
assert_eq!(v.connect(&0), [1, 0, 2, 3]); assert_eq!(v.join(&0), [1, 0, 2, 3]);
let v: &[&[_]] = &[&[1], &[2], &[3]]; let v: &[&[_]] = &[&[1], &[2], &[3]];
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]); assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
} }
#[test] #[test]
fn test_connect() { fn test_join() {
let v: [Vec<i32>; 0] = []; let v: [Vec<i32>; 0] = [];
assert_eq!(v.connect(&0), []); assert_eq!(v.join(&0), []);
assert_eq!([vec![1], vec![2, 3]].connect(&0), [1, 0, 2, 3]); assert_eq!([vec![1], vec![2, 3]].join(&0), [1, 0, 2, 3]);
assert_eq!([vec![1], vec![2], vec![3]].connect(&0), [1, 0, 2, 0, 3]); assert_eq!([vec![1], vec![2], vec![3]].join(&0), [1, 0, 2, 0, 3]);
let v: [&[_]; 2] = [&[1], &[2, 3]]; let v: [&[_]; 2] = [&[1], &[2, 3]];
assert_eq!(v.connect(&0), [1, 0, 2, 3]); assert_eq!(v.join(&0), [1, 0, 2, 3]);
let v: [&[_]; 3] = [&[1], &[2], &[3]]; let v: [&[_]; 3] = [&[1], &[2], &[3]];
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]); assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
} }
#[test] #[test]
@ -1339,11 +1339,11 @@ mod bench {
} }
#[bench] #[bench]
fn connect(b: &mut Bencher) { fn join(b: &mut Bencher) {
let xss: Vec<Vec<i32>> = let xss: Vec<Vec<i32>> =
(0..100).map(|i| (0..i).collect()).collect(); (0..100).map(|i| (0..i).collect()).collect();
b.iter(|| { b.iter(|| {
xss.connect(&0) xss.join(&0)
}); });
} }

View file

@ -158,32 +158,32 @@ fn test_concat_for_different_lengths() {
test_concat!("abc", ["", "a", "bc"]); test_concat!("abc", ["", "a", "bc"]);
} }
macro_rules! test_connect { macro_rules! test_join {
($expected: expr, $string: expr, $delim: expr) => { ($expected: expr, $string: expr, $delim: expr) => {
{ {
let s = $string.connect($delim); let s = $string.join($delim);
assert_eq!($expected, s); assert_eq!($expected, s);
} }
} }
} }
#[test] #[test]
fn test_connect_for_different_types() { fn test_join_for_different_types() {
test_connect!("a-b", ["a", "b"], "-"); test_join!("a-b", ["a", "b"], "-");
let hyphen = "-".to_string(); let hyphen = "-".to_string();
test_connect!("a-b", [s("a"), s("b")], &*hyphen); test_join!("a-b", [s("a"), s("b")], &*hyphen);
test_connect!("a-b", vec!["a", "b"], &*hyphen); test_join!("a-b", vec!["a", "b"], &*hyphen);
test_connect!("a-b", &*vec!["a", "b"], "-"); test_join!("a-b", &*vec!["a", "b"], "-");
test_connect!("a-b", vec![s("a"), s("b")], "-"); test_join!("a-b", vec![s("a"), s("b")], "-");
} }
#[test] #[test]
fn test_connect_for_different_lengths() { fn test_join_for_different_lengths() {
let empty: &[&str] = &[]; let empty: &[&str] = &[];
test_connect!("", empty, "-"); test_join!("", empty, "-");
test_connect!("a", ["a"], "-"); test_join!("a", ["a"], "-");
test_connect!("a-b", ["a", "b"], "-"); test_join!("a-b", ["a", "b"], "-");
test_connect!("-a-bc", ["", "a", "bc"], "-"); test_join!("-a-bc", ["", "a", "bc"], "-");
} }
#[test] #[test]
@ -2081,12 +2081,12 @@ mod bench {
} }
#[bench] #[bench]
fn bench_connect(b: &mut Bencher) { fn bench_join(b: &mut Bencher) {
let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
let sep = ""; let sep = "";
let v = vec![s, s, s, s, s, s, s, s, s, s]; let v = vec![s, s, s, s, s, s, s, s, s, s];
b.iter(|| { b.iter(|| {
assert_eq!(v.connect(sep).len(), s.len() * 10 + sep.len() * 9); assert_eq!(v.join(sep).len(), s.len() * 10 + sep.len() * 9);
}) })
} }

View file

@ -784,13 +784,13 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
// FIXME: #5516 should be graphemes not codepoints // FIXME: #5516 should be graphemes not codepoints
// wrapped description // wrapped description
row.push_str(&desc_rows.connect(&desc_sep[..])); row.push_str(&desc_rows.join(&desc_sep[..]));
row row
}); });
format!("{}\n\nOptions:\n{}\n", brief, format!("{}\n\nOptions:\n{}\n", brief,
rows.collect::<Vec<String>>().connect("\n")) rows.collect::<Vec<String>>().join("\n"))
} }
fn format_option(opt: &OptGroup) -> String { fn format_option(opt: &OptGroup) -> String {
@ -836,7 +836,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
line.push_str(&opts.iter() line.push_str(&opts.iter()
.map(format_option) .map(format_option)
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(" ")[..]); .join(" ")[..]);
line line
} }

View file

@ -2028,7 +2028,7 @@ fn encode_dylib_dependency_formats(rbml_w: &mut Encoder, ecx: &EncodeContext) {
cstore::RequireStatic => "s", cstore::RequireStatic => "s",
})).to_string()) })).to_string())
}).collect::<Vec<String>>(); }).collect::<Vec<String>>();
rbml_w.wr_tagged_str(tag, &s.connect(",")); rbml_w.wr_tagged_str(tag, &s.join(","));
} }
None => { None => {
rbml_w.wr_tagged_str(tag, ""); rbml_w.wr_tagged_str(tag, "");

View file

@ -1098,7 +1098,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn tys_to_string(&self, ts: &[Ty<'tcx>]) -> String { pub fn tys_to_string(&self, ts: &[Ty<'tcx>]) -> String {
let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_string(*t)).collect(); let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_string(*t)).collect();
format!("({})", tstrs.connect(", ")) format!("({})", tstrs.join(", "))
} }
pub fn trait_ref_to_string(&self, t: &ty::TraitRef<'tcx>) -> String { pub fn trait_ref_to_string(&self, t: &ty::TraitRef<'tcx>) -> String {

View file

@ -843,7 +843,7 @@ fn write_out_deps(sess: &Session,
let mut file = try!(fs::File::create(&deps_filename)); let mut file = try!(fs::File::create(&deps_filename));
for path in &out_filenames { for path in &out_filenames {
try!(write!(&mut file, try!(write!(&mut file,
"{}: {}\n\n", path.display(), files.connect(" "))); "{}: {}\n\n", path.display(), files.join(" ")));
} }
Ok(()) Ok(())
})(); })();

View file

@ -606,7 +606,7 @@ Available lint options:
for (name, to) in lints { for (name, to) in lints {
let name = name.to_lowercase().replace("_", "-"); let name = name.to_lowercase().replace("_", "-");
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
.collect::<Vec<String>>().connect(", "); .collect::<Vec<String>>().join(", ");
println!(" {} {}", println!(" {} {}",
padded(&name[..]), desc); padded(&name[..]), desc);
} }

View file

@ -378,7 +378,7 @@ impl UserIdentifiedItem {
fn reconstructed_input(&self) -> String { fn reconstructed_input(&self) -> String {
match *self { match *self {
ItemViaNode(node_id) => node_id.to_string(), ItemViaNode(node_id) => node_id.to_string(),
ItemViaPath(ref parts) => parts.connect("::"), ItemViaPath(ref parts) => parts.join("::"),
} }
} }

View file

@ -178,7 +178,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
return match search_mod(self, &self.infcx.tcx.map.krate().module, 0, names) { return match search_mod(self, &self.infcx.tcx.map.krate().module, 0, names) {
Some(id) => id, Some(id) => id,
None => { None => {
panic!("no item found: `{}`", names.connect("::")); panic!("no item found: `{}`", names.join("::"));
} }
}; };

View file

@ -923,7 +923,7 @@ impl NonSnakeCase {
} }
words.push(buf); words.push(buf);
} }
words.connect("_") words.join("_")
} }
fn check_snake_case(&self, cx: &Context, sort: &str, name: &str, span: Option<Span>) { fn check_snake_case(&self, cx: &Context, sort: &str, name: &str, span: Option<Span>) {

View file

@ -936,7 +936,7 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
bcx.to_str(), bcx.to_str(),
guard_expr, guard_expr,
m, m,
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().connect(", ")); vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().join(", "));
let _indenter = indenter(); let _indenter = indenter();
let mut bcx = insert_lllocals(bcx, &data.bindings_map, None); let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
@ -981,7 +981,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
debug!("compile_submatch(bcx={}, m={:?}, vals=[{}])", debug!("compile_submatch(bcx={}, m={:?}, vals=[{}])",
bcx.to_str(), bcx.to_str(),
m, m,
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().connect(", ")); vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().join(", "));
let _indenter = indenter(); let _indenter = indenter();
let _icx = push_ctxt("match::compile_submatch"); let _icx = push_ctxt("match::compile_submatch");
let mut bcx = bcx; let mut bcx = bcx;

View file

@ -92,7 +92,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
.chain(arch_clobbers.iter() .chain(arch_clobbers.iter()
.map(|s| s.to_string())) .map(|s| s.to_string()))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(","); .join(",");
debug!("Asm Constraints: {}", &all_constraints[..]); debug!("Asm Constraints: {}", &all_constraints[..]);

View file

@ -148,7 +148,7 @@ pub fn Invoke(cx: Block,
terminate(cx, "Invoke"); terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))", debug!("Invoke({} with arguments ({}))",
cx.val_to_string(fn_), cx.val_to_string(fn_),
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", ")); args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().join(", "));
debug_loc.apply(cx.fcx); debug_loc.apply(cx.fcx);
B(cx).invoke(fn_, args, then, catch, attributes) B(cx).invoke(fn_, args, then, catch, attributes)
} }

View file

@ -167,7 +167,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
args.iter() args.iter()
.map(|&v| self.ccx.tn().val_to_string(v)) .map(|&v| self.ccx.tn().val_to_string(v))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(", ")); .join(", "));
unsafe { unsafe {
let v = llvm::LLVMBuildInvoke(self.llbuilder, let v = llvm::LLVMBuildInvoke(self.llbuilder,
@ -809,7 +809,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
args.iter() args.iter()
.map(|&v| self.ccx.tn().val_to_string(v)) .map(|&v| self.ccx.tn().val_to_string(v))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(", ")); .join(", "));
unsafe { unsafe {
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(), let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),

View file

@ -1443,7 +1443,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
let discrfield = discrfield.iter() let discrfield = discrfield.iter()
.skip(1) .skip(1)
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect::<Vec<_>>().connect("$"); .collect::<Vec<_>>().join("$");
let union_member_name = format!("RUST$ENCODED$ENUM${}${}", let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
discrfield, discrfield,
null_variant_name); null_variant_name);

View file

@ -19,7 +19,7 @@ pub trait LlvmRepr {
impl<T:LlvmRepr> LlvmRepr for [T] { impl<T:LlvmRepr> LlvmRepr for [T] {
fn llrepr(&self, ccx: &CrateContext) -> String { fn llrepr(&self, ccx: &CrateContext) -> String {
let reprs: Vec<String> = self.iter().map(|t| t.llrepr(ccx)).collect(); let reprs: Vec<String> = self.iter().map(|t| t.llrepr(ccx)).collect();
format!("[{}]", reprs.connect(",")) format!("[{}]", reprs.join(","))
} }
} }

View file

@ -322,7 +322,7 @@ impl TypeNames {
pub fn types_to_str(&self, tys: &[Type]) -> String { pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect(); let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
format!("[{}]", strs.connect(",")) format!("[{}]", strs.join(","))
} }
pub fn val_to_string(&self, val: ValueRef) -> String { pub fn val_to_string(&self, val: ValueRef) -> String {

View file

@ -457,7 +457,7 @@ fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let tstr = if strings.is_empty() { let tstr = if strings.is_empty() {
base base
} else { } else {
format!("{}<{}>", base, strings.connect(", ")) format!("{}<{}>", base, strings.join(", "))
}; };
if did.krate == 0 { if did.krate == 0 {

View file

@ -134,7 +134,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
p.self_ty(), p.self_ty(),
p)) p))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.connect(", "); .join(", ");
cx.sess.fileline_note( cx.sess.fileline_note(
span, span,
&format!("the method `{}` exists but the \ &format!("the method `{}` exists but the \

View file

@ -1002,7 +1002,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
"not all trait items implemented, missing: `{}`", "not all trait items implemented, missing: `{}`",
missing_items.iter() missing_items.iter()
.map(<ast::Name>::as_str) .map(<ast::Name>::as_str)
.collect::<Vec<_>>().connect("`, `")) .collect::<Vec<_>>().join("`, `"))
} }
if !invalidated_items.is_empty() { if !invalidated_items.is_empty() {
@ -1013,7 +1013,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
invalidator.ident.as_str(), invalidator.ident.as_str(),
invalidated_items.iter() invalidated_items.iter()
.map(<ast::Name>::as_str) .map(<ast::Name>::as_str)
.collect::<Vec<_>>().connect("`, `")) .collect::<Vec<_>>().join("`, `"))
} }
} }
@ -2868,7 +2868,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
span_err!(tcx.sess, span, E0063, span_err!(tcx.sess, span, E0063,
"missing field{}: {}", "missing field{}: {}",
if missing_fields.len() == 1 {""} else {"s"}, if missing_fields.len() == 1 {""} else {"s"},
missing_fields.connect(", ")); missing_fields.join(", "));
} }
} }

View file

@ -514,7 +514,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
} else { } else {
name.to_string() name.to_string()
}, a, b) }, a, b)
}).collect::<Vec<_>>().connect(", ")); }).collect::<Vec<_>>().join(", "));
return; return;
} }

View file

@ -1536,7 +1536,7 @@ fn convert_typed_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
d => format!("{:?}", d), d => format!("{:?}", d),
}) })
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(","); .join(",");
tcx.sess.span_err(it.span, &object_lifetime_default_reprs); tcx.sess.span_err(it.span, &object_lifetime_default_reprs);
} }

View file

@ -2536,12 +2536,12 @@ fn name_from_pat(p: &ast::Pat) -> String {
format!("{} {{ {}{} }}", path_to_string(name), format!("{} {{ {}{} }}", path_to_string(name),
fields.iter().map(|&Spanned { node: ref fp, .. }| fields.iter().map(|&Spanned { node: ref fp, .. }|
format!("{}: {}", fp.ident.as_str(), name_from_pat(&*fp.pat))) format!("{}: {}", fp.ident.as_str(), name_from_pat(&*fp.pat)))
.collect::<Vec<String>>().connect(", "), .collect::<Vec<String>>().join(", "),
if etc { ", ..." } else { "" } if etc { ", ..." } else { "" }
) )
}, },
PatTup(ref elts) => format!("({})", elts.iter().map(|p| name_from_pat(&**p)) PatTup(ref elts) => format!("({})", elts.iter().map(|p| name_from_pat(&**p))
.collect::<Vec<String>>().connect(", ")), .collect::<Vec<String>>().join(", ")),
PatBox(ref p) => name_from_pat(&**p), PatBox(ref p) => name_from_pat(&**p),
PatRegion(ref p, _) => name_from_pat(&**p), PatRegion(ref p, _) => name_from_pat(&**p),
PatLit(..) => { PatLit(..) => {
@ -2555,7 +2555,7 @@ fn name_from_pat(p: &ast::Pat) -> String {
let begin = begin.iter().map(|p| name_from_pat(&**p)); let begin = begin.iter().map(|p| name_from_pat(&**p));
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter(); let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
let end = end.iter().map(|p| name_from_pat(&**p)); let end = end.iter().map(|p| name_from_pat(&**p));
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().connect(", ")) format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
}, },
PatMac(..) => { PatMac(..) => {
warn!("can't document the name of a function argument \ warn!("can't document the name of a function argument \

View file

@ -360,7 +360,7 @@ fn resolved_path(w: &mut fmt::Formatter, did: ast::DefId, path: &clean::Path,
match href(did) { match href(did) {
Some((url, shortty, fqp)) => { Some((url, shortty, fqp)) => {
try!(write!(w, "<a class='{}' href='{}' title='{}'>{}</a>", try!(write!(w, "<a class='{}' href='{}' title='{}'>{}</a>",
shortty, url, fqp.connect("::"), last.name)); shortty, url, fqp.join("::"), last.name));
} }
_ => try!(write!(w, "{}", last.name)), _ => try!(write!(w, "{}", last.name)),
} }

View file

@ -199,7 +199,7 @@ fn stripped_filtered_line<'a>(s: &'a str) -> Option<&'a str> {
fn collapse_whitespace(s: &str) -> String { fn collapse_whitespace(s: &str) -> String {
s.split(|c: char| c.is_whitespace()).filter(|s| { s.split(|c: char| c.is_whitespace()).filter(|s| {
!s.is_empty() !s.is_empty()
}).collect::<Vec<_>>().connect(" ") }).collect::<Vec<_>>().join(" ")
} }
thread_local!(static USED_HEADER_MAP: RefCell<HashMap<String, usize>> = { thread_local!(static USED_HEADER_MAP: RefCell<HashMap<String, usize>> = {
@ -238,14 +238,14 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
let lines = origtext.lines().filter(|l| { let lines = origtext.lines().filter(|l| {
stripped_filtered_line(*l).is_none() stripped_filtered_line(*l).is_none()
}); });
let text = lines.collect::<Vec<&str>>().connect("\n"); let text = lines.collect::<Vec<&str>>().join("\n");
if rendered { return } if rendered { return }
PLAYGROUND_KRATE.with(|krate| { PLAYGROUND_KRATE.with(|krate| {
let mut s = String::new(); let mut s = String::new();
krate.borrow().as_ref().map(|krate| { krate.borrow().as_ref().map(|krate| {
let test = origtext.lines().map(|l| { let test = origtext.lines().map(|l| {
stripped_filtered_line(l).unwrap_or(l) stripped_filtered_line(l).unwrap_or(l)
}).collect::<Vec<&str>>().connect("\n"); }).collect::<Vec<&str>>().join("\n");
let krate = krate.as_ref().map(|s| &**s); let krate = krate.as_ref().map(|s| &**s);
let test = test::maketest(&test, krate, false, let test = test::maketest(&test, krate, false,
&Default::default()); &Default::default());
@ -275,7 +275,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
// Transform the contents of the header into a hyphenated string // Transform the contents of the header into a hyphenated string
let id = s.split_whitespace().map(|s| s.to_ascii_lowercase()) let id = s.split_whitespace().map(|s| s.to_ascii_lowercase())
.collect::<Vec<String>>().connect("-"); .collect::<Vec<String>>().join("-");
// This is a terrible hack working around how hoedown gives us rendered // This is a terrible hack working around how hoedown gives us rendered
// html for text rather than the raw text. // html for text rather than the raw text.
@ -387,7 +387,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {
let lines = text.lines().map(|l| { let lines = text.lines().map(|l| {
stripped_filtered_line(l).unwrap_or(l) stripped_filtered_line(l).unwrap_or(l)
}); });
let text = lines.collect::<Vec<&str>>().connect("\n"); let text = lines.collect::<Vec<&str>>().join("\n");
tests.add_test(text.to_string(), tests.add_test(text.to_string(),
block_info.should_panic, block_info.no_run, block_info.should_panic, block_info.no_run,
block_info.ignore, block_info.test_harness); block_info.ignore, block_info.test_harness);

View file

@ -285,7 +285,7 @@ impl fmt::Display for IndexItemFunctionType {
let inputs: Vec<String> = self.inputs.iter().map(|ref t| { let inputs: Vec<String> = self.inputs.iter().map(|ref t| {
format!("{}", t) format!("{}", t)
}).collect(); }).collect();
try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.connect(","))); try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.join(",")));
match self.output { match self.output {
Some(ref t) => try!(write!(f, "{}", t)), Some(ref t) => try!(write!(f, "{}", t)),
@ -461,7 +461,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::Result<String> {
search_index.push(IndexItem { search_index.push(IndexItem {
ty: shortty(item), ty: shortty(item),
name: item.name.clone().unwrap(), name: item.name.clone().unwrap(),
path: fqp[..fqp.len() - 1].connect("::"), path: fqp[..fqp.len() - 1].join("::"),
desc: shorter(item.doc_value()), desc: shorter(item.doc_value()),
parent: Some(did), parent: Some(did),
search_type: get_index_search_type(&item, parent_basename), search_type: get_index_search_type(&item, parent_basename),
@ -957,7 +957,7 @@ impl DocFolder for Cache {
self.search_index.push(IndexItem { self.search_index.push(IndexItem {
ty: shortty(&item), ty: shortty(&item),
name: s.to_string(), name: s.to_string(),
path: path.connect("::").to_string(), path: path.join("::").to_string(),
desc: shorter(item.doc_value()), desc: shorter(item.doc_value()),
parent: parent, parent: parent,
search_type: get_index_search_type(&item, parent_basename), search_type: get_index_search_type(&item, parent_basename),
@ -1187,7 +1187,7 @@ impl Context {
*slot.borrow_mut() = cx.current.clone(); *slot.borrow_mut() = cx.current.clone();
}); });
let mut title = cx.current.connect("::"); let mut title = cx.current.join("::");
if pushname { if pushname {
if !title.is_empty() { if !title.is_empty() {
title.push_str("::"); title.push_str("::");
@ -1393,7 +1393,7 @@ impl<'a> Item<'a> {
Some(format!("{root}src/{krate}/{path}.html#{href}", Some(format!("{root}src/{krate}/{path}.html#{href}",
root = self.cx.root_path, root = self.cx.root_path,
krate = self.cx.layout.krate, krate = self.cx.layout.krate,
path = path.connect("/"), path = path.join("/"),
href = href)) href = href))
// If this item is not part of the local crate, then things get a little // If this item is not part of the local crate, then things get a little
@ -1417,7 +1417,7 @@ impl<'a> Item<'a> {
}; };
Some(format!("{root}{path}/{file}?gotosrc={goto}", Some(format!("{root}{path}/{file}?gotosrc={goto}",
root = root, root = root,
path = path[..path.len() - 1].connect("/"), path = path[..path.len() - 1].join("/"),
file = item_path(self.item), file = item_path(self.item),
goto = self.item.def_id.node)) goto = self.item.def_id.node))
} }
@ -1523,7 +1523,7 @@ fn item_path(item: &clean::Item) -> String {
} }
fn full_path(cx: &Context, item: &clean::Item) -> String { fn full_path(cx: &Context, item: &clean::Item) -> String {
let mut s = cx.current.connect("::"); let mut s = cx.current.join("::");
s.push_str("::"); s.push_str("::");
s.push_str(item.name.as_ref().unwrap()); s.push_str(item.name.as_ref().unwrap());
return s return s
@ -1535,7 +1535,7 @@ fn shorter<'a>(s: Option<&'a str>) -> String {
(*line).chars().any(|chr|{ (*line).chars().any(|chr|{
!chr.is_whitespace() !chr.is_whitespace()
}) })
}).collect::<Vec<_>>().connect("\n"), }).collect::<Vec<_>>().join("\n"),
None => "".to_string() None => "".to_string()
} }
} }
@ -1920,12 +1920,12 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
try!(write!(w, r#"<script type="text/javascript" async try!(write!(w, r#"<script type="text/javascript" async
src="{root_path}/implementors/{path}/{ty}.{name}.js"> src="{root_path}/implementors/{path}/{ty}.{name}.js">
</script>"#, </script>"#,
root_path = vec![".."; cx.current.len()].connect("/"), root_path = vec![".."; cx.current.len()].join("/"),
path = if ast_util::is_local(it.def_id) { path = if ast_util::is_local(it.def_id) {
cx.current.connect("/") cx.current.join("/")
} else { } else {
let path = &cache.external_paths[&it.def_id]; let path = &cache.external_paths[&it.def_id];
path[..path.len() - 1].connect("/") path[..path.len() - 1].join("/")
}, },
ty = shortty(it).to_static_str(), ty = shortty(it).to_static_str(),
name = *it.name.as_ref().unwrap())); name = *it.name.as_ref().unwrap()));

View file

@ -361,7 +361,7 @@ pub fn unindent(s: &str) -> String {
line[min_indent..].to_string() line[min_indent..].to_string()
} }
}).collect::<Vec<_>>()); }).collect::<Vec<_>>());
unindented.connect("\n") unindented.join("\n")
} else { } else {
s.to_string() s.to_string()
} }

View file

@ -360,7 +360,7 @@ impl Collector {
let s = self.current_header.as_ref().map(|s| &**s).unwrap_or(""); let s = self.current_header.as_ref().map(|s| &**s).unwrap_or("");
format!("{}_{}", s, self.cnt) format!("{}_{}", s, self.cnt)
} else { } else {
format!("{}_{}", self.names.connect("::"), self.cnt) format!("{}_{}", self.names.join("::"), self.cnt)
}; };
self.cnt += 1; self.cnt += 1;
let libs = self.libs.clone(); let libs = self.libs.clone();

View file

@ -442,7 +442,7 @@ impl fmt::Display for Ipv6Addr {
.iter() .iter()
.map(|&seg| format!("{:x}", seg)) .map(|&seg| format!("{:x}", seg))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(":") .join(":")
} }
write!(fmt, "{}::{}", write!(fmt, "{}::{}",

View file

@ -27,7 +27,7 @@ pub fn path_name_i(idents: &[Ident]) -> String {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| { idents.iter().map(|i| {
token::get_ident(*i).to_string() token::get_ident(*i).to_string()
}).collect::<Vec<String>>().connect("::") }).collect::<Vec<String>>().join("::")
} }
pub fn local_def(id: NodeId) -> DefId { pub fn local_def(id: NodeId) -> DefId {

View file

@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
.iter() .iter()
.map(|x| token::get_ident(*x).to_string()) .map(|x| token::get_ident(*x).to_string())
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect("::"); .join("::");
base::MacEager::expr(cx.expr_str( base::MacEager::expr(cx.expr_str(
sp, sp,
token::intern_and_get_ident(&string[..]))) token::intern_and_get_ident(&string[..])))

View file

@ -465,7 +465,7 @@ pub fn parse(sess: &ParseSess,
token::get_ident(bind))).to_string() token::get_ident(bind))).to_string()
} }
_ => panic!() _ => panic!()
} }).collect::<Vec<String>>().connect(" or "); } }).collect::<Vec<String>>().join(" or ");
return Error(sp, format!( return Error(sp, format!(
"local ambiguity: multiple parsing options: \ "local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.", built-in NTs {} or {} other options.",

View file

@ -139,7 +139,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
let lines = vertical_trim(lines); let lines = vertical_trim(lines);
let lines = horizontal_trim(lines); let lines = horizontal_trim(lines);
return lines.connect("\n"); return lines.join("\n");
} }
panic!("not a doc-comment: {}", comment); panic!("not a doc-comment: {}", comment);

View file

@ -5215,7 +5215,7 @@ impl<'a> Parser<'a> {
last_span, last_span,
&format!("illegal ABI: expected one of [{}], \ &format!("illegal ABI: expected one of [{}], \
found `{}`", found `{}`",
abi::all_names().connect(", "), abi::all_names().join(", "),
the_string)); the_string));
Ok(None) Ok(None)
} }

View file

@ -1080,7 +1080,7 @@ impl MetricMap {
.map(|(k,v)| format!("{}: {} (+/- {})", *k, .map(|(k,v)| format!("{}: {} (+/- {})", *k,
v.value, v.noise)) v.value, v.noise))
.collect(); .collect();
v.connect(", ") v.join(", ")
} }
} }

View file

@ -36,7 +36,7 @@ impl TTMacroExpander for Expander {
sp: Span, sp: Span,
_: &[ast::TokenTree]) -> Box<MacResult+'cx> { _: &[ast::TokenTree]) -> Box<MacResult+'cx> {
let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i)) let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i))
.collect::<Vec<_>>().connect(", "); .collect::<Vec<_>>().join(", ");
let interned = token::intern_and_get_ident(&args[..]); let interned = token::intern_and_get_ident(&args[..]);
MacEager::expr(ecx.expr_str(sp, interned)) MacEager::expr(ecx.expr_str(sp, interned))
} }

View file

@ -108,7 +108,7 @@ impl fmt::Display for AsciiArt {
.collect::<Vec<String>>(); .collect::<Vec<String>>();
// Concatenate the lines together using a new-line. // Concatenate the lines together using a new-line.
write!(f, "{}", lines.connect("\n")) write!(f, "{}", lines.join("\n"))
} }
} }

View file

@ -24,7 +24,7 @@ impl<T:to_str> to_str for Vec<T> {
self.iter() self.iter()
.map(|e| e.to_string_()) .map(|e| e.to_string_())
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect(", ")) .join(", "))
} }
} }