1
Fork 0

libsyntax: Remove all non-proc do syntax.

This commit is contained in:
Patrick Walton 2013-11-20 16:23:04 -08:00
parent a61a3678eb
commit efc512362b
26 changed files with 178 additions and 192 deletions

View file

@ -104,14 +104,14 @@ pub fn lookup(name: &str) -> Option<Abi> {
let mut res = None; let mut res = None;
do each_abi |abi| { each_abi(|abi| {
if name == abi.data().name { if name == abi.data().name {
res = Some(abi); res = Some(abi);
false false
} else { } else {
true true
} }
}; });
res res
} }
@ -217,21 +217,21 @@ impl AbiSet {
let mut res = None; let mut res = None;
do self.each |abi| { self.each(|abi| {
let data = abi.data(); let data = abi.data();
match data.abi_arch { match data.abi_arch {
Archs(a) if (a & arch.bit()) != 0 => { res = Some(abi); false } Archs(a) if (a & arch.bit()) != 0 => { res = Some(abi); false }
Archs(_) => { true } Archs(_) => { true }
RustArch | AllArch => { res = Some(abi); false } RustArch | AllArch => { res = Some(abi); false }
} }
}; });
res.map(|r| r.for_target(os, arch)) res.map(|r| r.for_target(os, arch))
} }
pub fn check_valid(&self) -> Option<(Abi, Abi)> { pub fn check_valid(&self) -> Option<(Abi, Abi)> {
let mut abis = ~[]; let mut abis = ~[];
do self.each |abi| { abis.push(abi); true }; self.each(|abi| { abis.push(abi); true });
for (i, abi) in abis.iter().enumerate() { for (i, abi) in abis.iter().enumerate() {
let data = abi.data(); let data = abi.data();
@ -285,10 +285,10 @@ impl ToStr for Abi {
impl ToStr for AbiSet { impl ToStr for AbiSet {
fn to_str(&self) -> ~str { fn to_str(&self) -> ~str {
let mut strs = ~[]; let mut strs = ~[];
do self.each |abi| { self.each(|abi| {
strs.push(abi.data().name); strs.push(abi.data().name);
true true
}; });
format!("\"{}\"", strs.connect(" ")) format!("\"{}\"", strs.connect(" "))
} }
} }

View file

@ -52,13 +52,13 @@ pub type path = ~[path_elt];
pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner) pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
-> ~str { -> ~str {
let strs = do p.map |e| { let strs = p.map(|e| {
match *e { match *e {
path_mod(s) | path_name(s) | path_pretty_name(s, _) => { path_mod(s) | path_name(s) | path_pretty_name(s, _) => {
itr.get(s.name) itr.get(s.name)
} }
} }
}; });
strs.connect(sep) strs.connect(sep)
} }

View file

@ -254,12 +254,12 @@ pub fn unguarded_pat(a: &Arm) -> Option<~[@Pat]> {
} }
pub fn public_methods(ms: ~[@method]) -> ~[@method] { pub fn public_methods(ms: ~[@method]) -> ~[@method] {
do ms.move_iter().filter |m| { ms.move_iter().filter(|m| {
match m.vis { match m.vis {
public => true, public => true,
_ => false _ => false
} }
}.collect() }).collect()
} }
// extract a TypeMethod from a trait_method. if the trait_method is // extract a TypeMethod from a trait_method. if the trait_method is

View file

@ -169,18 +169,18 @@ pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
pub fn contains(haystack: &[@ast::MetaItem], pub fn contains(haystack: &[@ast::MetaItem],
needle: @ast::MetaItem) -> bool { needle: @ast::MetaItem) -> bool {
debug!("attr::contains (name={})", needle.name()); debug!("attr::contains (name={})", needle.name());
do haystack.iter().any |item| { haystack.iter().any(|item| {
debug!(" testing: {}", item.name()); debug!(" testing: {}", item.name());
item.node == needle.node item.node == needle.node
} })
} }
pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool { pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
debug!("attr::contains_name (name={})", name); debug!("attr::contains_name (name={})", name);
do metas.iter().any |item| { metas.iter().any(|item| {
debug!(" testing: {}", item.name()); debug!(" testing: {}", item.name());
name == item.name() name == item.name()
} })
} }
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
@ -204,12 +204,10 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
.map(|&mi| (mi.name(), mi)) .map(|&mi| (mi.name(), mi))
.collect::<~[(@str, @MetaItem)]>(); .collect::<~[(@str, @MetaItem)]>();
do extra::sort::quick_sort(v) |&(a, _), &(b, _)| { extra::sort::quick_sort(v, |&(a, _), &(b, _)| a <= b);
a <= b
}
// There doesn't seem to be a more optimal way to do this // There doesn't seem to be a more optimal way to do this
do v.move_iter().map |(_, m)| { v.move_iter().map(|(_, m)| {
match m.node { match m.node {
MetaList(n, ref mis) => { MetaList(n, ref mis) => {
@Spanned { @Spanned {
@ -219,7 +217,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
} }
_ => m _ => m
} }
}.collect() }).collect()
} }
/** /**
@ -248,7 +246,7 @@ pub enum InlineAttr {
/// True if something like #[inline] is found in the list of attrs. /// True if something like #[inline] is found in the list of attrs.
pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline] // FIXME (#2809)---validate the usage of #[inline] and #[inline]
do attrs.iter().fold(InlineNone) |ia,attr| { attrs.iter().fold(InlineNone, |ia,attr| {
match attr.node.value.node { match attr.node.value.node {
MetaWord(n) if "inline" == n => InlineHint, MetaWord(n) if "inline" == n => InlineHint,
MetaList(n, ref items) if "inline" == n => { MetaList(n, ref items) if "inline" == n => {
@ -262,7 +260,7 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
} }
_ => ia _ => ia
} }
} })
} }
/// Tests if any `cfg(...)` meta items in `metas` match `cfg`. e.g. /// Tests if any `cfg(...)` meta items in `metas` match `cfg`. e.g.
@ -278,7 +276,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
// this would be much nicer as a chain of iterator adaptors, but // this would be much nicer as a chain of iterator adaptors, but
// this doesn't work. // this doesn't work.
let some_cfg_matches = do metas.any |mi| { let some_cfg_matches = metas.any(|mi| {
debug!("testing name: {}", mi.name()); debug!("testing name: {}", mi.name());
if "cfg" == mi.name() { // it is a #[cfg()] attribute if "cfg" == mi.name() { // it is a #[cfg()] attribute
debug!("is cfg"); debug!("is cfg");
@ -287,7 +285,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
match mi.meta_item_list() { match mi.meta_item_list() {
Some(cfg_meta) => { Some(cfg_meta) => {
debug!("is cfg(...)"); debug!("is cfg(...)");
do cfg_meta.iter().all |cfg_mi| { cfg_meta.iter().all(|cfg_mi| {
debug!("cfg({}[...])", cfg_mi.name()); debug!("cfg({}[...])", cfg_mi.name());
match cfg_mi.node { match cfg_mi.node {
ast::MetaList(s, ref not_cfgs) if "not" == s => { ast::MetaList(s, ref not_cfgs) if "not" == s => {
@ -301,14 +299,14 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
} }
_ => contains(cfg, *cfg_mi) _ => contains(cfg, *cfg_mi)
} }
} })
} }
None => false None => false
} }
} else { } else {
false false
} }
}; });
debug!("test_cfg (no_cfgs={}, some_cfg_matches={})", no_cfgs, some_cfg_matches); debug!("test_cfg (no_cfgs={}, some_cfg_matches={})", no_cfgs, some_cfg_matches);
no_cfgs || some_cfg_matches no_cfgs || some_cfg_matches
} }

View file

@ -314,9 +314,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
// Skip is the number of characters we need to skip because they are // Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line. // part of the 'filename:line ' part of the previous line.
let skip = fm.name.len() + digits + 3u; let skip = fm.name.len() + digits + 3u;
do skip.times() { skip.times(|| s.push_char(' '));
s.push_char(' ');
}
let orig = fm.get_line(lines.lines[0] as int); let orig = fm.get_line(lines.lines[0] as int);
for pos in range(0u, left-skip) { for pos in range(0u, left-skip) {
let curChar = (orig[pos] as char); let curChar = (orig[pos] as char);
@ -335,9 +333,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
if hi.col != lo.col { if hi.col != lo.col {
// the ^ already takes up one space // the ^ already takes up one space
let num_squigglies = hi.col.to_uint()-lo.col.to_uint()-1u; let num_squigglies = hi.col.to_uint()-lo.col.to_uint()-1u;
do num_squigglies.times() { num_squigglies.times(|| s.push_char('~'));
s.push_char('~')
}
} }
print_maybe_styled(s + "\n", term::attr::ForegroundColor(diagnosticcolor(lvl))); print_maybe_styled(s + "\n", term::attr::ForegroundColor(diagnosticcolor(lvl)));
} }

View file

@ -370,9 +370,9 @@ impl AstBuilder for @ExtCtxt {
} }
fn strip_bounds(&self, generics: &Generics) -> Generics { fn strip_bounds(&self, generics: &Generics) -> Generics {
let new_params = do generics.ty_params.map |ty_param| { let new_params = generics.ty_params.map(|ty_param| {
ast::TyParam { bounds: opt_vec::Empty, ..*ty_param } ast::TyParam { bounds: opt_vec::Empty, ..*ty_param }
}; });
Generics { Generics {
ty_params: new_params, ty_params: new_params,
.. (*generics).clone() .. (*generics).clone()
@ -883,9 +883,9 @@ impl AstBuilder for @ExtCtxt {
fn view_use_list(&self, sp: Span, vis: ast::visibility, fn view_use_list(&self, sp: Span, vis: ast::visibility,
path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::view_item { path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::view_item {
let imports = do imports.map |id| { let imports = imports.map(|id| {
respan(sp, ast::path_list_ident_ { name: *id, id: ast::DUMMY_NODE_ID }) respan(sp, ast::path_list_ident_ { name: *id, id: ast::DUMMY_NODE_ID })
}; });
self.view_use(sp, vis, self.view_use(sp, vis,
~[@respan(sp, ~[@respan(sp,

View file

@ -103,7 +103,7 @@ fn cs_clone(
}, },
_ => { _ => {
// struct-like // struct-like
let fields = do all_fields.map |field| { let fields = all_fields.map(|field| {
let ident = match field.name { let ident = match field.name {
Some(i) => i, Some(i) => i,
None => cx.span_bug(span, None => cx.span_bug(span,
@ -111,7 +111,7 @@ fn cs_clone(
name)) name))
}; };
cx.field_imm(span, ident, subcall(field.self_)) cx.field_imm(span, ident, subcall(field.self_))
}; });
if fields.is_empty() { if fields.is_empty() {
// no fields, so construct like `None` // no fields, so construct like `None`

View file

@ -70,13 +70,16 @@ fn decodable_substructure(cx: @ExtCtxt, span: Span,
}; };
let read_struct_field = cx.ident_of("read_struct_field"); let read_struct_field = cx.ident_of("read_struct_field");
let result = do decode_static_fields(cx, span, substr.type_ident, let result = decode_static_fields(cx,
summary) |span, name, field| { span,
substr.type_ident,
summary,
|span, name, field| {
cx.expr_method_call(span, blkdecoder, read_struct_field, cx.expr_method_call(span, blkdecoder, read_struct_field,
~[cx.expr_str(span, name), ~[cx.expr_str(span, name),
cx.expr_uint(span, field), cx.expr_uint(span, field),
lambdadecode]) lambdadecode])
}; });
cx.expr_method_call(span, decoder, cx.ident_of("read_struct"), cx.expr_method_call(span, decoder, cx.ident_of("read_struct"),
~[cx.expr_str(span, cx.str_of(substr.type_ident)), ~[cx.expr_str(span, cx.str_of(substr.type_ident)),
cx.expr_uint(span, nfields), cx.expr_uint(span, nfields),
@ -93,12 +96,15 @@ fn decodable_substructure(cx: @ExtCtxt, span: Span,
let (name, parts) = match *f { (i, ref p) => (i, p) }; let (name, parts) = match *f { (i, ref p) => (i, p) };
variants.push(cx.expr_str(span, cx.str_of(name))); variants.push(cx.expr_str(span, cx.str_of(name)));
let decoded = do decode_static_fields(cx, span, name, let decoded = decode_static_fields(cx,
parts) |span, _, field| { span,
name,
parts,
|span, _, field| {
cx.expr_method_call(span, blkdecoder, rvariant_arg, cx.expr_method_call(span, blkdecoder, rvariant_arg,
~[cx.expr_uint(span, field), ~[cx.expr_uint(span, field),
lambdadecode]) lambdadecode])
}; });
arms.push(cx.arm(span, arms.push(cx.arm(span,
~[cx.pat_lit(span, cx.expr_uint(span, i))], ~[cx.pat_lit(span, cx.expr_uint(span, i))],
@ -135,18 +141,18 @@ fn decode_static_fields(cx: @ExtCtxt,
if fields.is_empty() { if fields.is_empty() {
cx.expr_ident(outer_span, outer_pat_ident) cx.expr_ident(outer_span, outer_pat_ident)
} else { } else {
let fields = do fields.iter().enumerate().map |(i, &span)| { let fields = fields.iter().enumerate().map(|(i, &span)| {
getarg(span, format!("_field{}", i).to_managed(), i) getarg(span, format!("_field{}", i).to_managed(), i)
}.collect(); }).collect();
cx.expr_call_ident(outer_span, outer_pat_ident, fields) cx.expr_call_ident(outer_span, outer_pat_ident, fields)
} }
} }
Named(ref fields) => { Named(ref fields) => {
// use the field's span to get nicer error messages. // use the field's span to get nicer error messages.
let fields = do fields.iter().enumerate().map |(i, &(name, span))| { let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
cx.field_imm(span, name, getarg(span, cx.str_of(name), i)) cx.field_imm(span, name, getarg(span, cx.str_of(name), i))
}.collect(); }).collect();
cx.expr_struct_ident(outer_span, outer_pat_ident, fields) cx.expr_struct_ident(outer_span, outer_pat_ident, fields)
} }
} }

View file

@ -60,9 +60,9 @@ fn default_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Exp
} }
} }
Named(ref fields) => { Named(ref fields) => {
let default_fields = do fields.map |&(ident, span)| { let default_fields = fields.map(|&(ident, span)| {
cx.field_imm(span, ident, default_call(span)) cx.field_imm(span, ident, default_call(span))
}; });
cx.expr_struct_ident(span, substr.type_ident, default_fields) cx.expr_struct_ident(span, substr.type_ident, default_fields)
} }
} }

View file

@ -24,19 +24,19 @@ would generate two implementations like:
impl<S:extra::serialize::Encoder> Encodable<S> for Node { impl<S:extra::serialize::Encoder> Encodable<S> for Node {
fn encode(&self, s: &S) { fn encode(&self, s: &S) {
do s.emit_struct("Node", 1) { s.emit_struct("Node", 1, || {
s.emit_field("id", 0, || s.emit_uint(self.id)) s.emit_field("id", 0, || s.emit_uint(self.id))
} })
} }
} }
impl<D:Decoder> Decodable for node_id { impl<D:Decoder> Decodable for node_id {
fn decode(d: &D) -> Node { fn decode(d: &D) -> Node {
do d.read_struct("Node", 1) { d.read_struct("Node", 1, || {
Node { Node {
id: d.read_field(~"x", 0, || decode(d)) id: d.read_field(~"x", 0, || decode(d))
} }
} })
} }
} }
@ -53,10 +53,10 @@ would yield functions like:
T: Encodable<S> T: Encodable<S>
> spanned<T>: Encodable<S> { > spanned<T>: Encodable<S> {
fn encode<S:Encoder>(s: &S) { fn encode<S:Encoder>(s: &S) {
do s.emit_rec { s.emit_rec(|| {
s.emit_field("node", 0, || self.node.encode(s)); s.emit_field("node", 0, || self.node.encode(s));
s.emit_field("span", 1, || self.span.encode(s)); s.emit_field("span", 1, || self.span.encode(s));
} })
} }
} }
@ -65,12 +65,12 @@ would yield functions like:
T: Decodable<D> T: Decodable<D>
> spanned<T>: Decodable<D> { > spanned<T>: Decodable<D> {
fn decode(d: &D) -> spanned<T> { fn decode(d: &D) -> spanned<T> {
do d.read_rec { d.read_rec(|| {
{ {
node: d.read_field(~"node", 0, || decode(d)), node: d.read_field(~"node", 0, || decode(d)),
span: d.read_field(~"span", 1, || decode(d)), span: d.read_field(~"span", 1, || decode(d)),
} }
} })
} }
} }
*/ */

View file

@ -362,9 +362,9 @@ impl<'self> TraitDef<'self> {
// a TyParamBound requires an ast id // a TyParamBound requires an ast id
let mut bounds = opt_vec::from( let mut bounds = opt_vec::from(
// extra restrictions on the generics parameters to the type being derived upon // extra restrictions on the generics parameters to the type being derived upon
do self.additional_bounds.map |p| { self.additional_bounds.map(|p| {
cx.typarambound(p.to_path(cx, trait_span, type_ident, generics)) cx.typarambound(p.to_path(cx, trait_span, type_ident, generics))
}); }));
// require the current trait // require the current trait
bounds.push(cx.typarambound(trait_path.clone())); bounds.push(cx.typarambound(trait_path.clone()));
@ -375,9 +375,9 @@ impl<'self> TraitDef<'self> {
let trait_ref = cx.trait_ref(trait_path); let trait_ref = cx.trait_ref(trait_path);
// Create the type parameters on the `self` path. // Create the type parameters on the `self` path.
let self_ty_params = do generics.ty_params.map |ty_param| { let self_ty_params = generics.ty_params.map(|ty_param| {
cx.ty_ident(trait_span, ty_param.ident) cx.ty_ident(trait_span, ty_param.ident)
}; });
let self_lifetimes = generics.lifetimes.clone(); let self_lifetimes = generics.lifetimes.clone();
@ -405,7 +405,7 @@ impl<'self> TraitDef<'self> {
struct_def: &struct_def, struct_def: &struct_def,
type_ident: Ident, type_ident: Ident,
generics: &Generics) -> @ast::item { generics: &Generics) -> @ast::item {
let methods = do self.methods.map |method_def| { let methods = self.methods.map(|method_def| {
let (explicit_self, self_args, nonself_args, tys) = let (explicit_self, self_args, nonself_args, tys) =
method_def.split_self_nonself_args(cx, trait_span, type_ident, generics); method_def.split_self_nonself_args(cx, trait_span, type_ident, generics);
@ -426,7 +426,7 @@ impl<'self> TraitDef<'self> {
type_ident, generics, type_ident, generics,
explicit_self, tys, explicit_self, tys,
body) body)
}; });
self.create_derived_impl(cx, trait_span, type_ident, generics, methods) self.create_derived_impl(cx, trait_span, type_ident, generics, methods)
} }
@ -436,7 +436,7 @@ impl<'self> TraitDef<'self> {
enum_def: &enum_def, enum_def: &enum_def,
type_ident: Ident, type_ident: Ident,
generics: &Generics) -> @ast::item { generics: &Generics) -> @ast::item {
let methods = do self.methods.map |method_def| { let methods = self.methods.map(|method_def| {
let (explicit_self, self_args, nonself_args, tys) = let (explicit_self, self_args, nonself_args, tys) =
method_def.split_self_nonself_args(cx, trait_span, type_ident, generics); method_def.split_self_nonself_args(cx, trait_span, type_ident, generics);
@ -457,7 +457,7 @@ impl<'self> TraitDef<'self> {
type_ident, generics, type_ident, generics,
explicit_self, tys, explicit_self, tys,
body) body)
}; });
self.create_derived_impl(cx, trait_span, type_ident, generics, methods) self.create_derived_impl(cx, trait_span, type_ident, generics, methods)
} }
@ -547,9 +547,9 @@ impl<'self> MethodDef<'self> {
// create the generics that aren't for Self // create the generics that aren't for Self
let fn_generics = self.generics.to_generics(cx, trait_span, type_ident, generics); let fn_generics = self.generics.to_generics(cx, trait_span, type_ident, generics);
let args = do arg_types.move_iter().map |(name, ty)| { let args = arg_types.move_iter().map(|(name, ty)| {
cx.arg(trait_span, name, ty) cx.arg(trait_span, name, ty)
}.collect(); }).collect();
let ret_type = self.get_ret_ty(cx, trait_span, generics, type_ident); let ret_type = self.get_ret_ty(cx, trait_span, generics, type_ident);
@ -624,19 +624,19 @@ impl<'self> MethodDef<'self> {
// transpose raw_fields // transpose raw_fields
let fields = match raw_fields { let fields = match raw_fields {
[ref self_arg, .. rest] => { [ref self_arg, .. rest] => {
do self_arg.iter().enumerate().map |(i, &(span, opt_id, field))| { self_arg.iter().enumerate().map(|(i, &(span, opt_id, field))| {
let other_fields = do rest.map |l| { let other_fields = rest.map(|l| {
match &l[i] { match &l[i] {
&(_, _, ex) => ex &(_, _, ex) => ex
} }
}; });
FieldInfo { FieldInfo {
span: span, span: span,
name: opt_id, name: opt_id,
self_: field, self_: field,
other: other_fields other: other_fields
} }
}.collect() }).collect()
} }
[] => { cx.span_bug(trait_span, "No self arguments to non-static \ [] => { cx.span_bug(trait_span, "No self arguments to non-static \
method in generic `deriving`") } method in generic `deriving`") }
@ -787,16 +787,16 @@ impl<'self> MethodDef<'self> {
} }
} }
let field_tuples = let field_tuples =
do self_vec.iter() self_vec.iter()
.zip(enum_matching_fields.iter()) .zip(enum_matching_fields.iter())
.map |(&(span, id, self_f), other)| { .map(|(&(span, id, self_f), other)| {
FieldInfo { FieldInfo {
span: span, span: span,
name: id, name: id,
self_: self_f, self_: self_f,
other: (*other).clone() other: (*other).clone()
} }
}.collect(); }).collect();
substructure = EnumMatching(variant_index, variant, field_tuples); substructure = EnumMatching(variant_index, variant, field_tuples);
} }
None => { None => {
@ -901,7 +901,7 @@ impl<'self> MethodDef<'self> {
self_args: &[@Expr], self_args: &[@Expr],
nonself_args: &[@Expr]) nonself_args: &[@Expr])
-> @Expr { -> @Expr {
let summary = do enum_def.variants.map |v| { let summary = enum_def.variants.map(|v| {
let ident = v.node.name; let ident = v.node.name;
let summary = match v.node.kind { let summary = match v.node.kind {
ast::tuple_variant_kind(ref args) => Unnamed(args.map(|va| va.ty.span)), ast::tuple_variant_kind(ref args) => Unnamed(args.map(|va| va.ty.span)),
@ -910,7 +910,7 @@ impl<'self> MethodDef<'self> {
} }
}; };
(ident, summary) (ident, summary)
}; });
self.call_substructure_method(cx, self.call_substructure_method(cx,
trait_span, type_ident, trait_span, type_ident,
self_args, nonself_args, self_args, nonself_args,
@ -944,10 +944,10 @@ pub fn create_subpatterns(cx: @ExtCtxt,
field_paths: ~[ast::Path], field_paths: ~[ast::Path],
mutbl: ast::Mutability) mutbl: ast::Mutability)
-> ~[@ast::Pat] { -> ~[@ast::Pat] {
do field_paths.map |path| { field_paths.map(|path| {
cx.pat(path.span, cx.pat(path.span,
ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
} })
} }
#[deriving(Eq)] // dogfooding! #[deriving(Eq)] // dogfooding!
@ -1003,10 +1003,10 @@ fn create_struct_pattern(cx: @ExtCtxt,
// struct_type is definitely not Unknown, since struct_def.fields // struct_type is definitely not Unknown, since struct_def.fields
// must be nonempty to reach here // must be nonempty to reach here
let pattern = if struct_type == Record { let pattern = if struct_type == Record {
let field_pats = do subpats.iter().zip(ident_expr.iter()).map |(&pat, &(_, id, _))| { let field_pats = subpats.iter().zip(ident_expr.iter()).map(|(&pat, &(_, id, _))| {
// id is guaranteed to be Some // id is guaranteed to be Some
ast::FieldPat { ident: id.unwrap(), pat: pat } ast::FieldPat { ident: id.unwrap(), pat: pat }
}.collect(); }).collect();
cx.pat_struct(trait_span, matching_path, field_pats) cx.pat_struct(trait_span, matching_path, field_pats)
} else { } else {
cx.pat_enum(trait_span, matching_path, subpats) cx.pat_enum(trait_span, matching_path, subpats)
@ -1075,13 +1075,13 @@ pub fn cs_fold(use_foldl: bool,
match *substructure.fields { match *substructure.fields {
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
if use_foldl { if use_foldl {
do all_fields.iter().fold(base) |old, field| { all_fields.iter().fold(base, |old, field| {
f(cx, field.span, old, field.self_, field.other) f(cx, field.span, old, field.self_, field.other)
} })
} else { } else {
do all_fields.rev_iter().fold(base) |old, field| { all_fields.rev_iter().fold(base, |old, field| {
f(cx, field.span, old, field.self_, field.other) f(cx, field.span, old, field.self_, field.other)
} })
} }
}, },
EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, trait_span, EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, trait_span,
@ -1113,12 +1113,12 @@ pub fn cs_same_method(f: |@ExtCtxt, Span, ~[@Expr]| -> @Expr,
match *substructure.fields { match *substructure.fields {
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
// call self_n.method(other_1_n, other_2_n, ...) // call self_n.method(other_1_n, other_2_n, ...)
let called = do all_fields.map |field| { let called = all_fields.map(|field| {
cx.expr_method_call(field.span, cx.expr_method_call(field.span,
field.self_, field.self_,
substructure.method_ident, substructure.method_ident,
field.other.clone()) field.other.clone())
}; });
f(cx, trait_span, called) f(cx, trait_span, called)
}, },
@ -1148,13 +1148,13 @@ pub fn cs_same_method_fold(use_foldl: bool,
cs_same_method( cs_same_method(
|cx, span, vals| { |cx, span, vals| {
if use_foldl { if use_foldl {
do vals.iter().fold(base) |old, &new| { vals.iter().fold(base, |old, &new| {
f(cx, span, old, new) f(cx, span, old, new)
} })
} else { } else {
do vals.rev_iter().fold(base) |old, &new| { vals.rev_iter().fold(base, |old, &new| {
f(cx, span, old, new) f(cx, span, old, new)
} })
} }
}, },
enum_nonmatch_f, enum_nonmatch_f,

View file

@ -90,7 +90,7 @@ fn iter_bytes_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @
cx.span_bug(span, "#[deriving(IterBytes)] needs at least one field"); cx.span_bug(span, "#[deriving(IterBytes)] needs at least one field");
} }
do exprs.slice(1, exprs.len()).iter().fold(exprs[0]) |prev, me| { exprs.slice(1, exprs.len()).iter().fold(exprs[0], |prev, me| {
cx.expr_binary(span, BiAnd, prev, *me) cx.expr_binary(span, BiAnd, prev, *me)
} })
} }

View file

@ -74,7 +74,7 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
in_items in_items
} }
MetaList(_, ref titems) => { MetaList(_, ref titems) => {
do titems.rev_iter().fold(in_items) |in_items, &titem| { titems.rev_iter().fold(in_items, |in_items, &titem| {
match titem.node { match titem.node {
MetaNameValue(tname, _) | MetaNameValue(tname, _) |
MetaList(tname, _) | MetaList(tname, _) |
@ -112,7 +112,7 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
} }
} }
} }
} })
} }
} }
} }

View file

@ -104,7 +104,7 @@ fn rand_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
value_ref, value_ref,
variant_count); variant_count);
let mut arms = do variants.iter().enumerate().map |(i, id_sum)| { let mut arms = variants.iter().enumerate().map(|(i, id_sum)| {
let i_expr = cx.expr_uint(span, i); let i_expr = cx.expr_uint(span, i);
let pat = cx.pat_lit(span, i_expr); let pat = cx.pat_lit(span, i_expr);
@ -115,7 +115,7 @@ fn rand_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
rand_thing(cx, span, ident, summary, |sp| rand_call(sp))) rand_thing(cx, span, ident, summary, |sp| rand_call(sp)))
} }
} }
}.collect::<~[ast::Arm]>(); }).collect::<~[ast::Arm]>();
// _ => {} at the end. Should never occur // _ => {} at the end. Should never occur
arms.push(cx.arm_unreachable(span)); arms.push(cx.arm_unreachable(span));
@ -144,9 +144,9 @@ fn rand_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
} }
} }
Named(ref fields) => { Named(ref fields) => {
let rand_fields = do fields.map |&(ident, span)| { let rand_fields = fields.map(|&(ident, span)| {
cx.field_imm(span, ident, rand_call(span)) cx.field_imm(span, ident, rand_call(span))
}; });
cx.expr_struct_ident(span, ctor_ident, rand_fields) cx.expr_struct_ident(span, ctor_ident, rand_fields)
} }
} }

View file

@ -171,9 +171,9 @@ impl<'self> Ty<'self> {
-> ast::Path { -> ast::Path {
match *self { match *self {
Self => { Self => {
let self_params = do self_generics.ty_params.map |ty_param| { let self_params = self_generics.ty_params.map(|ty_param| {
cx.ty_ident(span, ty_param.ident) cx.ty_ident(span, ty_param.ident)
}; });
let lifetimes = self_generics.lifetimes.clone(); let lifetimes = self_generics.lifetimes.clone();
cx.path_all(span, false, ~[self_ty], lifetimes, cx.path_all(span, false, ~[self_ty], lifetimes,
@ -192,10 +192,10 @@ impl<'self> Ty<'self> {
fn mk_ty_param(cx: @ExtCtxt, span: Span, name: &str, bounds: &[Path], fn mk_ty_param(cx: @ExtCtxt, span: Span, name: &str, bounds: &[Path],
self_ident: Ident, self_generics: &Generics) -> ast::TyParam { self_ident: Ident, self_generics: &Generics) -> ast::TyParam {
let bounds = opt_vec::from( let bounds = opt_vec::from(
do bounds.map |b| { bounds.map(|b| {
let path = b.to_path(cx, span, self_ident, self_generics); let path = b.to_path(cx, span, self_ident, self_generics);
cx.typarambound(path) cx.typarambound(path)
}); }));
cx.typaram(cx.ident_of(name), bounds) cx.typaram(cx.ident_of(name), bounds)
} }
@ -224,16 +224,16 @@ impl<'self> LifetimeBounds<'self> {
self_ty: Ident, self_ty: Ident,
self_generics: &Generics) self_generics: &Generics)
-> Generics { -> Generics {
let lifetimes = do self.lifetimes.map |lt| { let lifetimes = self.lifetimes.map(|lt| {
cx.lifetime(span, cx.ident_of(*lt)) cx.lifetime(span, cx.ident_of(*lt))
}; });
let ty_params = do self.bounds.map |t| { let ty_params = self.bounds.map(|t| {
match t { match t {
&(ref name, ref bounds) => { &(ref name, ref bounds) => {
mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics) mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics)
} }
} }
}; });
mk_generics(lifetimes, ty_params) mk_generics(lifetimes, ty_params)
} }
} }

View file

@ -76,9 +76,9 @@ fn zero_substructure(cx: @ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
} }
} }
Named(ref fields) => { Named(ref fields) => {
let zero_fields = do fields.map |&(ident, span)| { let zero_fields = fields.map(|&(ident, span)| {
cx.field_imm(span, ident, zero_call(span)) cx.field_imm(span, ident, zero_call(span))
}; });
cx.expr_struct_ident(span, substr.type_ident, zero_fields) cx.expr_struct_ident(span, substr.type_ident, zero_fields)
} }
} }

View file

@ -261,8 +261,8 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
// For each item, look through the attributes. If any of them are // For each item, look through the attributes. If any of them are
// decorated with "item decorators", then use that function to transform // decorated with "item decorators", then use that function to transform
// the item into a new set of items. // the item into a new set of items.
let new_items = do vec::flat_map(module_.items) |item| { let new_items = vec::flat_map(module_.items, |item| {
do item.attrs.rev_iter().fold(~[*item]) |items, attr| { item.attrs.rev_iter().fold(~[*item], |items, attr| {
let mname = attr.name(); let mname = attr.name();
match (*extsbox).find(&intern(mname)) { match (*extsbox).find(&intern(mname)) {
@ -280,8 +280,8 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
}, },
_ => items, _ => items,
} }
} })
}; });
ast::_mod { ast::_mod {
items: new_items, items: new_items,

View file

@ -742,12 +742,12 @@ pub fn expand_args(ecx: @ExtCtxt, sp: Span,
"format argument must be a string literal."); "format argument must be a string literal.");
let mut err = false; let mut err = false;
do parse::parse_error::cond.trap(|m| { parse::parse_error::cond.trap(|m| {
if !err { if !err {
err = true; err = true;
ecx.span_err(efmt.span, m); ecx.span_err(efmt.span, m);
} }
}).inside { }).inside(|| {
for piece in parse::Parser::new(fmt) { for piece in parse::Parser::new(fmt) {
if !err { if !err {
cx.verify_piece(&piece); cx.verify_piece(&piece);
@ -755,7 +755,7 @@ pub fn expand_args(ecx: @ExtCtxt, sp: Span,
cx.pieces.push(piece); cx.pieces.push(piece);
} }
} }
} });
if err { return MRExpr(efmt) } if err { return MRExpr(efmt) }
// Make sure that all arguments were used and all arguments have types. // Make sure that all arguments were used and all arguments have types.

View file

@ -127,12 +127,12 @@ pub fn copy_up(mpu: &matcher_pos_up) -> ~MatcherPos {
} }
pub fn count_names(ms: &[matcher]) -> uint { pub fn count_names(ms: &[matcher]) -> uint {
do ms.iter().fold(0) |ct, m| { ms.iter().fold(0, |ct, m| {
ct + match m.node { ct + match m.node {
match_tok(_) => 0u, match_tok(_) => 0u,
match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)), match_seq(ref more_ms, _, _, _, _) => count_names((*more_ms)),
match_nonterminal(_,_,_) => 1u match_nonterminal(_,_,_) => 1u
}} }})
} }
pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos) pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
@ -416,9 +416,9 @@ pub fn parse(
} }
cur_eis.push(ei); cur_eis.push(ei);
do rust_parser.tokens_consumed.times() || { rust_parser.tokens_consumed.times(|| {
rdr.next_token(); let _ = rdr.next_token();
} });
} }
} }

View file

@ -151,10 +151,10 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
} }
match *t { match *t {
tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => {
do tts.iter().fold(lis_unconstrained) |lis, tt| { tts.iter().fold(lis_unconstrained, |lis, tt| {
let lis2 = lockstep_iter_size(tt, r); let lis2 = lockstep_iter_size(tt, r);
lis_merge(lis, lis2) lis_merge(lis, lis2)
} })
} }
tt_tok(*) => lis_unconstrained, tt_tok(*) => lis_unconstrained,
tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) { tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {

View file

@ -187,12 +187,12 @@ pub trait ast_fold {
} }
PatStruct(ref pth, ref fields, etc) => { PatStruct(ref pth, ref fields, etc) => {
let pth_ = self.fold_path(pth); let pth_ = self.fold_path(pth);
let fs = do fields.map |f| { let fs = fields.map(|f| {
ast::FieldPat { ast::FieldPat {
ident: f.ident, ident: f.ident,
pat: self.fold_pat(f.pat) pat: self.fold_pat(f.pat)
} }
}; });
PatStruct(pth_, fs, etc) PatStruct(pth_, fs, etc)
} }
PatTup(ref elts) => PatTup(elts.map(|x| self.fold_pat(*x))), PatTup(ref elts) => PatTup(elts.map(|x| self.fold_pat(*x))),
@ -455,7 +455,7 @@ fn fold_arg_<T:ast_fold>(a: &arg, fld: &T) -> arg {
// build a new vector of tts by appling the ast_fold's fold_ident to // build a new vector of tts by appling the ast_fold's fold_ident to
// all of the identifiers in the token trees. // all of the identifiers in the token trees.
pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] { pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
do tts.map |tt| { tts.map(|tt| {
match *tt { match *tt {
tt_tok(span, ref tok) => tt_tok(span, ref tok) =>
tt_tok(span,maybe_fold_ident(tok,fld)), tt_tok(span,maybe_fold_ident(tok,fld)),
@ -468,7 +468,7 @@ pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
tt_nonterminal(sp,ref ident) => tt_nonterminal(sp,ref ident) =>
tt_nonterminal(sp,fld.fold_ident(*ident)) tt_nonterminal(sp,fld.fold_ident(*ident))
} }
} })
} }
// apply ident folder if it's an ident, otherwise leave it alone // apply ident folder if it's an ident, otherwise leave it alone
@ -601,11 +601,11 @@ fn fold_field<T:ast_fold>(f: TypeField, folder: &T) -> TypeField {
fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T) fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T)
-> Option<OptVec<TyParamBound>> { -> Option<OptVec<TyParamBound>> {
do b.as_ref().map |bounds| { b.as_ref().map(|bounds| {
do bounds.map |bound| { bounds.map(|bound| {
fold_ty_param_bound(bound, folder) fold_ty_param_bound(bound, folder)
} })
} })
} }
fn fold_variant_arg_<T:ast_fold>(va: &variant_arg, folder: &T) fn fold_variant_arg_<T:ast_fold>(va: &variant_arg, folder: &T)
@ -660,9 +660,9 @@ pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
item_enum(ref enum_definition, ref generics) => { item_enum(ref enum_definition, ref generics) => {
item_enum( item_enum(
ast::enum_def { ast::enum_def {
variants: do enum_definition.variants.map |x| { variants: enum_definition.variants.map(|x| {
folder.fold_variant(x) folder.fold_variant(x)
}, }),
}, },
fold_generics(generics, folder)) fold_generics(generics, folder))
} }
@ -678,12 +678,12 @@ pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
) )
} }
item_trait(ref generics, ref traits, ref methods) => { item_trait(ref generics, ref traits, ref methods) => {
let methods = do methods.map |method| { let methods = methods.map(|method| {
match *method { match *method {
required(ref m) => required(folder.fold_type_method(m)), required(ref m) => required(folder.fold_type_method(m)),
provided(method) => provided(folder.fold_method(method)) provided(method) => provided(folder.fold_method(method))
} }
}; });
item_trait(fold_generics(generics, folder), item_trait(fold_generics(generics, folder),
traits.map(|p| fold_trait_ref(p, folder)), traits.map(|p| fold_trait_ref(p, folder)),
methods) methods)

View file

@ -106,9 +106,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
} }
if can_trim { if can_trim {
do lines.map |line| { lines.map(|line| line.slice(i + 1, line.len()).to_owned())
line.slice(i + 1, line.len()).to_owned()
}
} else { } else {
lines lines
} }
@ -377,10 +375,10 @@ pub fn gather_comments_and_literals(span_diagnostic:
//discard, and look ahead; we're working with internal state //discard, and look ahead; we're working with internal state
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
if token::is_lit(&tok) { if token::is_lit(&tok) {
do with_str_from(rdr, bstart) |s| { with_str_from(rdr, bstart, |s| {
debug!("tok lit: {}", s); debug!("tok lit: {}", s);
literals.push(lit {lit: s.to_owned(), pos: sp.lo}); literals.push(lit {lit: s.to_owned(), pos: sp.lo});
} })
} else { } else {
debug!("tok: {}", token::to_str(get_ident_interner(), &tok)); debug!("tok: {}", token::to_str(get_ident_interner(), &tok));
} }

View file

@ -337,7 +337,7 @@ fn consume_any_line_comment(rdr: @mut StringReader)
while rdr.curr != '\n' && !is_eof(rdr) { while rdr.curr != '\n' && !is_eof(rdr) {
bump(rdr); bump(rdr);
} }
let ret = do with_str_from(rdr, start_bpos) |string| { let ret = with_str_from(rdr, start_bpos, |string| {
// but comments with only more "/"s are not // but comments with only more "/"s are not
if !is_line_non_doc_comment(string) { if !is_line_non_doc_comment(string) {
Some(TokenAndSpan{ Some(TokenAndSpan{
@ -347,7 +347,7 @@ fn consume_any_line_comment(rdr: @mut StringReader)
} else { } else {
None None
} }
}; });
if ret.is_some() { if ret.is_some() {
return ret; return ret;
@ -412,7 +412,7 @@ fn consume_block_comment(rdr: @mut StringReader)
} }
let res = if is_doc_comment { let res = if is_doc_comment {
do with_str_from(rdr, start_bpos) |string| { with_str_from(rdr, start_bpos, |string| {
// but comments with only "*"s between two "/"s are not // but comments with only "*"s between two "/"s are not
if !is_block_non_doc_comment(string) { if !is_block_non_doc_comment(string) {
Some(TokenAndSpan{ Some(TokenAndSpan{
@ -422,7 +422,7 @@ fn consume_block_comment(rdr: @mut StringReader)
} else { } else {
None None
} }
} })
} else { } else {
None None
}; };
@ -652,7 +652,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
bump(rdr); bump(rdr);
} }
return do with_str_from(rdr, start) |string| { return with_str_from(rdr, start, |string| {
if string == "_" { if string == "_" {
token::UNDERSCORE token::UNDERSCORE
} else { } else {
@ -661,7 +661,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
// FIXME: perform NFKC normalization here. (Issue #2253) // FIXME: perform NFKC normalization here. (Issue #2253)
token::IDENT(str_to_ident(string), is_mod_name) token::IDENT(str_to_ident(string), is_mod_name)
} }
} })
} }
if is_dec_digit(c) { if is_dec_digit(c) {
return scan_number(c, rdr); return scan_number(c, rdr);
@ -775,9 +775,9 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
while ident_continue(rdr.curr) { while ident_continue(rdr.curr) {
bump(rdr); bump(rdr);
} }
return do with_str_from(rdr, start) |lifetime_name| { return with_str_from(rdr, start, |lifetime_name| {
token::LIFETIME(str_to_ident(lifetime_name)) token::LIFETIME(str_to_ident(lifetime_name))
} })
} }
// Otherwise it is a character constant: // Otherwise it is a character constant:

View file

@ -1031,11 +1031,11 @@ impl Parser {
// parse the methods in a trait declaration // parse the methods in a trait declaration
pub fn parse_trait_methods(&self) -> ~[trait_method] { pub fn parse_trait_methods(&self) -> ~[trait_method] {
do self.parse_unspanned_seq( self.parse_unspanned_seq(
&token::LBRACE, &token::LBRACE,
&token::RBRACE, &token::RBRACE,
seq_sep_none() seq_sep_none(),
) |p| { |p| {
let attrs = p.parse_outer_attributes(); let attrs = p.parse_outer_attributes();
let lo = p.span.lo; let lo = p.span.lo;
@ -1048,11 +1048,11 @@ impl Parser {
let generics = p.parse_generics(); let generics = p.parse_generics();
let (explicit_self, d) = do self.parse_fn_decl_with_self() |p| { let (explicit_self, d) = self.parse_fn_decl_with_self(|p| {
// This is somewhat dubious; We don't want to allow argument // This is somewhat dubious; We don't want to allow argument
// names to be left off if there is a definition... // names to be left off if there is a definition...
p.parse_arg_general(false) p.parse_arg_general(false)
}; });
let hi = p.last_span.hi; let hi = p.last_span.hi;
debug!("parse_trait_methods(): trait method signature ends in \ debug!("parse_trait_methods(): trait method signature ends in \
@ -1108,7 +1108,7 @@ impl Parser {
); );
} }
} }
} })
} }
// parse a possibly mutable type // parse a possibly mutable type
@ -3000,13 +3000,13 @@ impl Parser {
let mutbl = self.parse_mutability(); let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindByRef(mutbl)); pat = self.parse_pat_ident(BindByRef(mutbl));
} else { } else {
let can_be_enum_or_struct = do self.look_ahead(1) |t| { let can_be_enum_or_struct = self.look_ahead(1, |t| {
match *t { match *t {
token::LPAREN | token::LBRACKET | token::LT | token::LPAREN | token::LBRACKET | token::LT |
token::LBRACE | token::MOD_SEP => true, token::LBRACE | token::MOD_SEP => true,
_ => false, _ => false,
} }
}; });
if self.look_ahead(1, |t| *t == token::DOTDOT) { if self.look_ahead(1, |t| *t == token::DOTDOT) {
let start = self.parse_expr_res(RESTRICT_NO_BAR_OP); let start = self.parse_expr_res(RESTRICT_NO_BAR_OP);
@ -3040,18 +3040,18 @@ impl Parser {
let mut args: ~[@Pat] = ~[]; let mut args: ~[@Pat] = ~[];
match *self.token { match *self.token {
token::LPAREN => { token::LPAREN => {
let is_star = do self.look_ahead(1) |t| { let is_star = self.look_ahead(1, |t| {
match *t { match *t {
token::BINOP(token::STAR) => true, token::BINOP(token::STAR) => true,
_ => false, _ => false,
} }
}; });
let is_dotdot = do self.look_ahead(1) |t| { let is_dotdot = self.look_ahead(1, |t| {
match *t { match *t {
token::DOTDOT => true, token::DOTDOT => true,
_ => false, _ => false,
} }
}; });
if is_star | is_dotdot { if is_star | is_dotdot {
// This is a "top constructor only" pat // This is a "top constructor only" pat
self.bump(); self.bump();
@ -3884,9 +3884,9 @@ impl Parser {
let pur = self.parse_fn_purity(); let pur = self.parse_fn_purity();
let ident = self.parse_ident(); let ident = self.parse_ident();
let generics = self.parse_generics(); let generics = self.parse_generics();
let (explicit_self, decl) = do self.parse_fn_decl_with_self() |p| { let (explicit_self, decl) = self.parse_fn_decl_with_self(|p| {
p.parse_arg() p.parse_arg()
}; });
let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let (inner_attrs, body) = self.parse_inner_attrs_and_block();
let hi = body.span.hi; let hi = body.span.hi;
@ -4027,11 +4027,11 @@ impl Parser {
} else if *self.token == token::LPAREN { } else if *self.token == token::LPAREN {
// It's a tuple-like struct. // It's a tuple-like struct.
is_tuple_like = true; is_tuple_like = true;
fields = do self.parse_unspanned_seq( fields = self.parse_unspanned_seq(
&token::LPAREN, &token::LPAREN,
&token::RPAREN, &token::RPAREN,
seq_sep_trailing_allowed(token::COMMA) seq_sep_trailing_allowed(token::COMMA),
) |p| { |p| {
let attrs = self.parse_outer_attributes(); let attrs = self.parse_outer_attributes();
let lo = p.span.lo; let lo = p.span.lo;
let struct_field_ = ast::struct_field_ { let struct_field_ = ast::struct_field_ {
@ -4041,7 +4041,7 @@ impl Parser {
attrs: attrs, attrs: attrs,
}; };
@spanned(lo, p.span.hi, struct_field_) @spanned(lo, p.span.hi, struct_field_)
}; });
self.expect(&token::SEMI); self.expect(&token::SEMI);
} else if self.eat(&token::SEMI) { } else if self.eat(&token::SEMI) {
// It's a unit-like struct. // It's a unit-like struct.
@ -4259,20 +4259,16 @@ impl Parser {
path: Path, path: Path,
outer_attrs: ~[ast::Attribute], outer_attrs: ~[ast::Attribute],
id_sp: Span) -> (ast::item_, ~[ast::Attribute]) { id_sp: Span) -> (ast::item_, ~[ast::Attribute]) {
let maybe_i = do self.sess.included_mod_stack.iter().position |p| { *p == path }; let maybe_i = self.sess.included_mod_stack.iter().position(|p| *p == path);
match maybe_i { match maybe_i {
Some(i) => { Some(i) => {
let stack = &self.sess.included_mod_stack; let stack = &self.sess.included_mod_stack;
let mut err = ~"circular modules: "; let mut err = ~"circular modules: ";
for p in stack.slice(i, stack.len()).iter() { for p in stack.slice(i, stack.len()).iter() {
do p.display().with_str |s| { p.display().with_str(|s| err.push_str(s));
err.push_str(s);
}
err.push_str(" -> "); err.push_str(" -> ");
} }
do path.display().with_str |s| { path.display().with_str(|s| err.push_str(s));
err.push_str(s);
}
self.span_fatal(id_sp, err); self.span_fatal(id_sp, err);
} }
None => () None => ()

View file

@ -169,9 +169,9 @@ pub fn to_str(input: @ident_interner, t: &Token) -> ~str {
/* Literals */ /* Literals */
LIT_CHAR(c) => { LIT_CHAR(c) => {
let mut res = ~"'"; let mut res = ~"'";
do char::from_u32(c).unwrap().escape_default |c| { char::from_u32(c).unwrap().escape_default(|c| {
res.push_char(c); res.push_char(c);
} });
res.push_char('\''); res.push_char('\'');
res res
} }

View file

@ -704,7 +704,7 @@ pub fn print_struct(s: @ps,
if ast_util::struct_def_is_tuple_like(struct_def) { if ast_util::struct_def_is_tuple_like(struct_def) {
if !struct_def.fields.is_empty() { if !struct_def.fields.is_empty() {
popen(s); popen(s);
do commasep(s, inconsistent, struct_def.fields) |s, field| { commasep(s, inconsistent, struct_def.fields, |s, field| {
match field.node.kind { match field.node.kind {
ast::named_field(*) => fail!("unexpected named field"), ast::named_field(*) => fail!("unexpected named field"),
ast::unnamed_field => { ast::unnamed_field => {
@ -712,7 +712,7 @@ pub fn print_struct(s: @ps,
print_type(s, &field.node.ty); print_type(s, &field.node.ty);
} }
} }
} });
pclose(s); pclose(s);
} }
word(s.s, ";"); word(s.s, ";");
@ -1699,9 +1699,7 @@ pub fn print_pat(s: @ps, pat: &ast::Pat) {
} }
ast::PatVec(ref before, slice, ref after) => { ast::PatVec(ref before, slice, ref after) => {
word(s.s, "["); word(s.s, "[");
do commasep(s, inconsistent, *before) |s, &p| { commasep(s, inconsistent, *before, |s, &p| print_pat(s, p));
print_pat(s, p);
}
for &p in slice.iter() { for &p in slice.iter() {
if !before.is_empty() { word_space(s, ","); } if !before.is_empty() { word_space(s, ","); }
match p { match p {
@ -1713,9 +1711,7 @@ pub fn print_pat(s: @ps, pat: &ast::Pat) {
print_pat(s, p); print_pat(s, p);
if !after.is_empty() { word_space(s, ","); } if !after.is_empty() { word_space(s, ","); }
} }
do commasep(s, inconsistent, *after) |s, &p| { commasep(s, inconsistent, *after, |s, &p| print_pat(s, p));
print_pat(s, p);
}
word(s.s, "]"); word(s.s, "]");
} }
} }
@ -1937,9 +1933,9 @@ pub fn print_view_path(s: @ps, vp: &ast::view_path) {
ast::view_path_list(ref path, ref idents, _) => { ast::view_path_list(ref path, ref idents, _) => {
print_path(s, path, false); print_path(s, path, false);
word(s.s, "::{"); word(s.s, "::{");
do commasep(s, inconsistent, (*idents)) |s, w| { commasep(s, inconsistent, (*idents), |s, w| {
print_ident(s, w.node.name); print_ident(s, w.node.name);
} });
word(s.s, "}"); word(s.s, "}");
} }
} }
@ -2053,9 +2049,7 @@ pub fn print_ty_fn(s: @ps,
match id { Some(id) => { word(s.s, " "); print_ident(s, id); } _ => () } match id { Some(id) => { word(s.s, " "); print_ident(s, id); } _ => () }
if opt_sigil != Some(ast::BorrowedSigil) { if opt_sigil != Some(ast::BorrowedSigil) {
do opt_bounds.as_ref().map |bounds| { opt_bounds.as_ref().map(|bounds| print_bounds(s, bounds, true));
print_bounds(s, bounds, true);
};
} }
match generics { Some(g) => print_generics(s, g), _ => () } match generics { Some(g) => print_generics(s, g), _ => () }
@ -2157,9 +2151,7 @@ pub fn print_literal(s: @ps, lit: &ast::lit) {
ast::lit_str(st, style) => print_string(s, st, style), ast::lit_str(st, style) => print_string(s, st, style),
ast::lit_char(ch) => { ast::lit_char(ch) => {
let mut res = ~"'"; let mut res = ~"'";
do char::from_u32(ch).unwrap().escape_default |c| { char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c));
res.push_char(c);
}
res.push_char('\''); res.push_char('\'');
word(s.s, res); word(s.s, res);
} }