libcore: minor code cleanup.
This is minor and probably completely inconsequential to performance, but I find vec::map to be more clear than vec::each and a push.
This commit is contained in:
parent
95423d28f2
commit
a7ecde3323
8 changed files with 16 additions and 25 deletions
|
@ -119,8 +119,8 @@ impl<T: Reader> T : ReaderUtil {
|
||||||
}
|
}
|
||||||
return (i, 0);
|
return (i, 0);
|
||||||
}
|
}
|
||||||
let mut bytes: ~[u8] = ~[];
|
let mut bytes = ~[];
|
||||||
let mut chars: ~[char] = ~[];
|
let mut chars = ~[];
|
||||||
// might need more bytes, but reading n will never over-read
|
// might need more bytes, but reading n will never over-read
|
||||||
let mut nbread = n;
|
let mut nbread = n;
|
||||||
while nbread > 0 {
|
while nbread > 0 {
|
||||||
|
|
|
@ -32,7 +32,7 @@ pub fn console_off() {
|
||||||
#[cfg(notest)]
|
#[cfg(notest)]
|
||||||
#[lang="log_type"]
|
#[lang="log_type"]
|
||||||
pub fn log_type<T>(level: u32, object: &T) {
|
pub fn log_type<T>(level: u32, object: &T) {
|
||||||
let bytes = do io::with_bytes_writer() |writer| {
|
let bytes = do io::with_bytes_writer |writer| {
|
||||||
repr::write_repr(writer, object);
|
repr::write_repr(writer, object);
|
||||||
};
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
@ -559,7 +559,7 @@ impl ReprPrinter {
|
||||||
unsafe {
|
unsafe {
|
||||||
self.align(sys::min_align_of::<T>());
|
self.align(sys::min_align_of::<T>());
|
||||||
let value_addr: &T = transmute(copy self.ptr);
|
let value_addr: &T = transmute(copy self.ptr);
|
||||||
(*value_addr).write_repr(self.writer);
|
value_addr.write_repr(self.writer);
|
||||||
self.bump(sys::size_of::<T>());
|
self.bump(sys::size_of::<T>());
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,9 +90,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
|
||||||
|
|
||||||
// Get the meta_items from inside a vector of attributes
|
// Get the meta_items from inside a vector of attributes
|
||||||
fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
||||||
let mut mitems = ~[];
|
do attrs.map |a| { attr_meta(*a) }
|
||||||
for attrs.each |a| { mitems.push(attr_meta(*a)); }
|
|
||||||
return mitems;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
|
fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
|
||||||
|
|
|
@ -27,8 +27,7 @@ fn declare_upcalls(targ_cfg: @session::config,
|
||||||
fn decl(llmod: ModuleRef, prefix: ~str, name: ~str,
|
fn decl(llmod: ModuleRef, prefix: ~str, name: ~str,
|
||||||
tys: ~[TypeRef], rv: TypeRef) ->
|
tys: ~[TypeRef], rv: TypeRef) ->
|
||||||
ValueRef {
|
ValueRef {
|
||||||
let mut arg_tys: ~[TypeRef] = ~[];
|
let arg_tys = tys.map(|t| *t);
|
||||||
for tys.each |t| { arg_tys.push(*t); }
|
|
||||||
let fn_ty = T_fn(arg_tys, rv);
|
let fn_ty = T_fn(arg_tys, rv);
|
||||||
return base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
|
return base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
|
||||||
}
|
}
|
||||||
|
|
|
@ -598,13 +598,12 @@ fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||||
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
|
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
|
||||||
tcx, cdata);
|
tcx, cdata);
|
||||||
let name = item_name(intr, item);
|
let name = item_name(intr, item);
|
||||||
let mut arg_tys: ~[ty::t] = ~[];
|
let arg_tys = match ty::get(ctor_ty).sty {
|
||||||
match ty::get(ctor_ty).sty {
|
ty::ty_fn(f) => f.sig.inputs.map(|a| a.ty),
|
||||||
ty::ty_fn(f) => {
|
|
||||||
for f.sig.inputs.each |a| { arg_tys.push(a.ty); }
|
// Nullary enum variant.
|
||||||
}
|
_ => ~[],
|
||||||
_ => { /* Nullary enum variant. */ }
|
};
|
||||||
}
|
|
||||||
match variant_disr_val(item) {
|
match variant_disr_val(item) {
|
||||||
Some(val) => { disr_val = val; }
|
Some(val) => { disr_val = val; }
|
||||||
_ => { /* empty */ }
|
_ => { /* empty */ }
|
||||||
|
|
|
@ -34,9 +34,7 @@ type flag = HashMap<~str, ()>;
|
||||||
fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
|
fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
|
||||||
|
|
||||||
fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
|
fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
|
||||||
let mut es = ~[];
|
fields.map(|f| f.node.expr)
|
||||||
for fields.each |f| { es.push(f.node.expr); }
|
|
||||||
return es;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Takes a predicate p, returns true iff p is true for any subexpressions
|
// Takes a predicate p, returns true iff p is true for any subexpressions
|
||||||
|
|
|
@ -282,8 +282,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
||||||
_ => { }
|
_ => { }
|
||||||
}
|
}
|
||||||
s += ~"(";
|
s += ~"(";
|
||||||
let mut strs = ~[];
|
let strs = inputs.map(|a| fn_input_to_str(cx, *a));
|
||||||
for inputs.each |a| { strs.push(fn_input_to_str(cx, *a)); }
|
|
||||||
s += str::connect(strs, ~", ");
|
s += str::connect(strs, ~", ");
|
||||||
s += ~")";
|
s += ~")";
|
||||||
if ty::get(output).sty != ty_nil {
|
if ty::get(output).sty != ty_nil {
|
||||||
|
@ -338,13 +337,11 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
||||||
ty_unboxed_vec(tm) => { ~"unboxed_vec<" + mt_to_str(cx, tm) + ~">" }
|
ty_unboxed_vec(tm) => { ~"unboxed_vec<" + mt_to_str(cx, tm) + ~">" }
|
||||||
ty_type => ~"type",
|
ty_type => ~"type",
|
||||||
ty_rec(elems) => {
|
ty_rec(elems) => {
|
||||||
let mut strs: ~[~str] = ~[];
|
let strs = elems.map(|fld| field_to_str(cx, *fld));
|
||||||
for elems.each |fld| { strs.push(field_to_str(cx, *fld)); }
|
|
||||||
~"{" + str::connect(strs, ~",") + ~"}"
|
~"{" + str::connect(strs, ~",") + ~"}"
|
||||||
}
|
}
|
||||||
ty_tup(elems) => {
|
ty_tup(elems) => {
|
||||||
let mut strs = ~[];
|
let strs = elems.map(|elem| ty_to_str(cx, *elem));
|
||||||
for elems.each |elem| { strs.push(ty_to_str(cx, *elem)); }
|
|
||||||
~"(" + str::connect(strs, ~",") + ~")"
|
~"(" + str::connect(strs, ~",") + ~")"
|
||||||
}
|
}
|
||||||
ty_fn(ref f) => {
|
ty_fn(ref f) => {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue