libsyntax: Get rid of uses of move
and don't parse it.
This commit is contained in:
parent
99b3c07b4e
commit
e244f103c9
14 changed files with 123 additions and 130 deletions
|
@ -270,7 +270,7 @@ pub fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
|||
// This is sort of stupid here, converting to a vec of mutables and back
|
||||
let mut v: ~[@ast::meta_item] = items;
|
||||
std::sort::quick_sort(v, lteq);
|
||||
move v
|
||||
v
|
||||
}
|
||||
|
||||
pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
|
||||
|
|
|
@ -152,7 +152,7 @@ pub impl<D: Decoder> span: Decodable<D> {
|
|||
}
|
||||
|
||||
pub pure fn spanned<T>(+lo: BytePos, +hi: BytePos, +t: T) -> spanned<T> {
|
||||
respan(mk_sp(lo, hi), move t)
|
||||
respan(mk_sp(lo, hi), t)
|
||||
}
|
||||
|
||||
pub pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
|
||||
|
@ -160,7 +160,7 @@ pub pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
|
|||
}
|
||||
|
||||
pub pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
|
||||
respan(dummy_sp(), move t)
|
||||
respan(dummy_sp(), t)
|
||||
}
|
||||
|
||||
/* assuming that we're not in macro expansion */
|
||||
|
|
|
@ -281,7 +281,7 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
|
|||
mod_path: ~[],
|
||||
trace_mac: false
|
||||
};
|
||||
move ((move imp) as @ext_ctxt)
|
||||
((imp) as @ext_ctxt)
|
||||
}
|
||||
|
||||
pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {
|
||||
|
|
|
@ -73,7 +73,7 @@ pub fn mk_raw_path_(sp: span,
|
|||
global: false,
|
||||
idents: idents,
|
||||
rp: None,
|
||||
types: move types }
|
||||
types: types }
|
||||
}
|
||||
pub fn mk_raw_path_global(sp: span, idents: ~[ast::ident]) -> @ast::path {
|
||||
@ast::path { span: sp,
|
||||
|
@ -156,7 +156,7 @@ pub fn mk_field(sp: span, f: &{ident: ast::ident, ex: @ast::expr})
|
|||
}
|
||||
pub fn mk_fields(sp: span, fields: ~[{ident: ast::ident, ex: @ast::expr}])
|
||||
-> ~[ast::field] {
|
||||
move fields.map(|f| mk_field(sp, f))
|
||||
fields.map(|f| mk_field(sp, f))
|
||||
}
|
||||
pub fn mk_rec_e(cx: ext_ctxt,
|
||||
sp: span,
|
||||
|
@ -288,33 +288,33 @@ pub fn mk_pat_ident_with_binding_mode(cx: ext_ctxt,
|
|||
bm: ast::binding_mode) -> @ast::pat {
|
||||
let path = mk_raw_path(span, ~[ ident ]);
|
||||
let pat = ast::pat_ident(bm, path, None);
|
||||
mk_pat(cx, span, move pat)
|
||||
mk_pat(cx, span, pat)
|
||||
}
|
||||
pub fn mk_pat_enum(cx: ext_ctxt,
|
||||
span: span,
|
||||
path: @ast::path,
|
||||
+subpats: ~[@ast::pat])
|
||||
-> @ast::pat {
|
||||
let pat = ast::pat_enum(path, Some(move subpats));
|
||||
mk_pat(cx, span, move pat)
|
||||
let pat = ast::pat_enum(path, Some(subpats));
|
||||
mk_pat(cx, span, pat)
|
||||
}
|
||||
pub fn mk_pat_struct(cx: ext_ctxt,
|
||||
span: span,
|
||||
path: @ast::path,
|
||||
+field_pats: ~[ast::field_pat])
|
||||
-> @ast::pat {
|
||||
let pat = ast::pat_struct(path, move field_pats, false);
|
||||
mk_pat(cx, span, move pat)
|
||||
let pat = ast::pat_struct(path, field_pats, false);
|
||||
mk_pat(cx, span, pat)
|
||||
}
|
||||
pub fn mk_bool(cx: ext_ctxt, span: span, value: bool) -> @ast::expr {
|
||||
let lit_expr = ast::expr_lit(@codemap::spanned {
|
||||
node: ast::lit_bool(value),
|
||||
span: span });
|
||||
build::mk_expr(cx, span, move lit_expr)
|
||||
build::mk_expr(cx, span, lit_expr)
|
||||
}
|
||||
pub fn mk_stmt(cx: ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt {
|
||||
let stmt_ = ast::stmt_semi(expr, cx.next_id());
|
||||
@codemap::spanned { node: move stmt_, span: span }
|
||||
@codemap::spanned { node: stmt_, span: span }
|
||||
}
|
||||
pub fn mk_ty_path(cx: ext_ctxt,
|
||||
span: span,
|
||||
|
@ -322,7 +322,7 @@ pub fn mk_ty_path(cx: ext_ctxt,
|
|||
-> @ast::Ty {
|
||||
let ty = build::mk_raw_path(span, idents);
|
||||
let ty = ast::ty_path(ty, cx.next_id());
|
||||
let ty = @ast::Ty { id: cx.next_id(), node: move ty, span: span };
|
||||
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
||||
ty
|
||||
}
|
||||
pub fn mk_ty_path_global(cx: ext_ctxt,
|
||||
|
@ -331,7 +331,7 @@ pub fn mk_ty_path_global(cx: ext_ctxt,
|
|||
-> @ast::Ty {
|
||||
let ty = build::mk_raw_path_global(span, idents);
|
||||
let ty = ast::ty_path(ty, cx.next_id());
|
||||
let ty = @ast::Ty { id: cx.next_id(), node: move ty, span: span };
|
||||
let ty = @ast::Ty { id: cx.next_id(), node: ty, span: span };
|
||||
ty
|
||||
}
|
||||
pub fn mk_simple_ty_path(cx: ext_ctxt,
|
||||
|
|
|
@ -95,19 +95,19 @@ fn expand_deriving(cx: ext_ctxt,
|
|||
span,
|
||||
struct_def,
|
||||
item.ident,
|
||||
move ty_params));
|
||||
ty_params));
|
||||
}
|
||||
item_enum(ref enum_definition, copy ty_params) => {
|
||||
result.push(expand_deriving_enum_def(cx,
|
||||
span,
|
||||
enum_definition,
|
||||
item.ident,
|
||||
move ty_params));
|
||||
ty_params));
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
dvec::unwrap(move result)
|
||||
dvec::unwrap(result)
|
||||
}
|
||||
|
||||
fn create_impl_item(cx: ext_ctxt, span: span, +item: item_) -> @item {
|
||||
|
@ -115,7 +115,7 @@ fn create_impl_item(cx: ext_ctxt, span: span, +item: item_) -> @item {
|
|||
ident: clownshoes_extensions,
|
||||
attrs: ~[],
|
||||
id: cx.next_id(),
|
||||
node: move item,
|
||||
node: item,
|
||||
vis: public,
|
||||
span: span,
|
||||
}
|
||||
|
@ -161,7 +161,7 @@ fn create_eq_method(cx: ext_ctxt,
|
|||
};
|
||||
|
||||
// Create the function declaration.
|
||||
let fn_decl = build::mk_fn_decl(~[ move arg ], output_type);
|
||||
let fn_decl = build::mk_fn_decl(~[ arg ], output_type);
|
||||
|
||||
// Create the body block.
|
||||
let body_block = build::mk_simple_block(cx, span, body);
|
||||
|
@ -174,8 +174,8 @@ fn create_eq_method(cx: ext_ctxt,
|
|||
tps: ~[],
|
||||
self_ty: self_ty,
|
||||
purity: pure_fn,
|
||||
decl: move fn_decl,
|
||||
body: move body_block,
|
||||
decl: fn_decl,
|
||||
body: body_block,
|
||||
id: cx.next_id(),
|
||||
span: span,
|
||||
self_id: cx.next_id(),
|
||||
|
@ -194,14 +194,14 @@ fn create_self_type_with_params(cx: ext_ctxt,
|
|||
let self_ty_param = build::mk_simple_ty_path(cx,
|
||||
span,
|
||||
ty_param.ident);
|
||||
self_ty_params.push(move self_ty_param);
|
||||
self_ty_params.push(self_ty_param);
|
||||
}
|
||||
let self_ty_params = dvec::unwrap(move self_ty_params);
|
||||
let self_ty_params = dvec::unwrap(self_ty_params);
|
||||
|
||||
// Create the type of `self`.
|
||||
let self_type = build::mk_raw_path_(span,
|
||||
~[ type_ident ],
|
||||
move self_ty_params);
|
||||
self_ty_params);
|
||||
let self_type = ty_path(self_type, cx.next_id());
|
||||
@ast::Ty { id: cx.next_id(), node: self_type, span: span }
|
||||
}
|
||||
|
@ -221,9 +221,9 @@ fn create_derived_impl(cx: ext_ctxt,
|
|||
trait_path.map(|x| *x));
|
||||
let bounds = @~[ TraitTyParamBound(bound) ];
|
||||
let impl_ty_param = build::mk_ty_param(cx, ty_param.ident, bounds);
|
||||
impl_ty_params.push(move impl_ty_param);
|
||||
impl_ty_params.push(impl_ty_param);
|
||||
}
|
||||
let impl_ty_params = dvec::unwrap(move impl_ty_params);
|
||||
let impl_ty_params = dvec::unwrap(impl_ty_params);
|
||||
|
||||
// Create the reference to the trait.
|
||||
let trait_path = ast::path {
|
||||
|
@ -233,12 +233,12 @@ fn create_derived_impl(cx: ext_ctxt,
|
|||
rp: None,
|
||||
types: ~[]
|
||||
};
|
||||
let trait_path = @move trait_path;
|
||||
let trait_path = @trait_path;
|
||||
let trait_ref = ast::trait_ref {
|
||||
path: trait_path,
|
||||
ref_id: cx.next_id()
|
||||
};
|
||||
let trait_ref = @move trait_ref;
|
||||
let trait_ref = @trait_ref;
|
||||
|
||||
// Create the type of `self`.
|
||||
let self_type = create_self_type_with_params(cx,
|
||||
|
@ -247,11 +247,11 @@ fn create_derived_impl(cx: ext_ctxt,
|
|||
ty_params);
|
||||
|
||||
// Create the impl item.
|
||||
let impl_item = item_impl(move impl_ty_params,
|
||||
let impl_item = item_impl(impl_ty_params,
|
||||
Some(trait_ref),
|
||||
self_type,
|
||||
methods.map(|x| *x));
|
||||
return create_impl_item(cx, span, move impl_item);
|
||||
return create_impl_item(cx, span, impl_item);
|
||||
}
|
||||
|
||||
fn create_derived_eq_impl(cx: ext_ctxt,
|
||||
|
@ -310,11 +310,11 @@ fn create_iter_bytes_method(cx: ext_ctxt,
|
|||
let output_type = @ast::Ty { id: cx.next_id(), node: ty_nil, span: span };
|
||||
|
||||
// Create the function declaration.
|
||||
let inputs = ~[ move lsb0_arg, move f_arg ];
|
||||
let fn_decl = build::mk_fn_decl(move inputs, output_type);
|
||||
let inputs = ~[ lsb0_arg, f_arg ];
|
||||
let fn_decl = build::mk_fn_decl(inputs, output_type);
|
||||
|
||||
// Create the body block.
|
||||
let body_block = build::mk_block_(cx, span, move statements);
|
||||
let body_block = build::mk_block_(cx, span, statements);
|
||||
|
||||
// Create the method.
|
||||
let self_ty = spanned { node: sty_region(m_imm), span: span };
|
||||
|
@ -325,8 +325,8 @@ fn create_iter_bytes_method(cx: ext_ctxt,
|
|||
tps: ~[],
|
||||
self_ty: self_ty,
|
||||
purity: pure_fn,
|
||||
decl: move fn_decl,
|
||||
body: move body_block,
|
||||
decl: fn_decl,
|
||||
body: body_block,
|
||||
id: cx.next_id(),
|
||||
span: span,
|
||||
self_id: cx.next_id(),
|
||||
|
@ -348,10 +348,10 @@ fn create_subpatterns(cx: ext_ctxt,
|
|||
// Create the subpattern.
|
||||
let subpath = build::mk_raw_path(span, ~[ ident ]);
|
||||
let subpat = pat_ident(bind_by_ref(m_imm), subpath, None);
|
||||
let subpat = build::mk_pat(cx, span, move subpat);
|
||||
let subpat = build::mk_pat(cx, span, subpat);
|
||||
subpats.push(subpat);
|
||||
}
|
||||
return dvec::unwrap(move subpats);
|
||||
return dvec::unwrap(subpats);
|
||||
}
|
||||
|
||||
fn create_enum_variant_pattern(cx: ext_ctxt,
|
||||
|
@ -373,7 +373,7 @@ fn create_enum_variant_pattern(cx: ext_ctxt,
|
|||
prefix,
|
||||
variant_args.len());
|
||||
|
||||
return build::mk_pat_enum(cx, span, matching_path, move subpats);
|
||||
return build::mk_pat_enum(cx, span, matching_path, subpats);
|
||||
}
|
||||
struct_variant_kind(struct_def) => {
|
||||
let matching_path = build::mk_raw_path(span, ~[ variant_ident ]);
|
||||
|
@ -508,7 +508,7 @@ fn expand_deriving_eq_struct_def(cx: ext_ctxt,
|
|||
return create_derived_eq_impl(cx,
|
||||
span,
|
||||
type_ident,
|
||||
move ty_params,
|
||||
ty_params,
|
||||
eq_method,
|
||||
ne_method);
|
||||
}
|
||||
|
@ -541,7 +541,7 @@ fn expand_deriving_eq_enum_def(cx: ext_ctxt,
|
|||
return create_derived_eq_impl(cx,
|
||||
span,
|
||||
type_ident,
|
||||
move ty_params,
|
||||
ty_params,
|
||||
eq_method,
|
||||
ne_method);
|
||||
}
|
||||
|
@ -561,7 +561,7 @@ fn expand_deriving_iter_bytes_struct_def(cx: ext_ctxt,
|
|||
return create_derived_iter_bytes_impl(cx,
|
||||
span,
|
||||
type_ident,
|
||||
move ty_params,
|
||||
ty_params,
|
||||
method);
|
||||
}
|
||||
|
||||
|
@ -580,7 +580,7 @@ fn expand_deriving_iter_bytes_enum_def(cx: ext_ctxt,
|
|||
return create_derived_iter_bytes_impl(cx,
|
||||
span,
|
||||
type_ident,
|
||||
move ty_params,
|
||||
ty_params,
|
||||
method);
|
||||
}
|
||||
|
||||
|
@ -671,8 +671,8 @@ fn expand_deriving_iter_bytes_struct_method(cx: ext_ctxt,
|
|||
}
|
||||
|
||||
// Create the method itself.
|
||||
let statements = dvec::unwrap(move statements);
|
||||
return create_iter_bytes_method(cx, span, move statements);
|
||||
let statements = dvec::unwrap(statements);
|
||||
return create_iter_bytes_method(cx, span, statements);
|
||||
}
|
||||
|
||||
fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
||||
|
@ -738,9 +738,9 @@ fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
|||
let matching_arm = ast::arm {
|
||||
pats: ~[ matching_pat ],
|
||||
guard: None,
|
||||
body: move matching_body_block
|
||||
body: matching_body_block
|
||||
};
|
||||
other_arms.push(move matching_arm);
|
||||
other_arms.push(matching_arm);
|
||||
|
||||
// Maybe generate a non-matching case. If there is only one
|
||||
// variant then there will always be a match.
|
||||
|
@ -777,11 +777,11 @@ fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
|||
// Create the self pattern body.
|
||||
let other_expr = build::mk_path(cx, span, ~[ other_ident ]);
|
||||
let other_expr = build::mk_unary(cx, span, deref, other_expr);
|
||||
let other_arms = dvec::unwrap(move other_arms);
|
||||
let other_match_expr = expr_match(other_expr, move other_arms);
|
||||
let other_arms = dvec::unwrap(other_arms);
|
||||
let other_match_expr = expr_match(other_expr, other_arms);
|
||||
let other_match_expr = build::mk_expr(cx,
|
||||
span,
|
||||
move other_match_expr);
|
||||
other_match_expr);
|
||||
let other_match_body_block = build::mk_simple_block(cx,
|
||||
span,
|
||||
other_match_expr);
|
||||
|
@ -792,15 +792,15 @@ fn expand_deriving_eq_enum_method(cx: ext_ctxt,
|
|||
guard: None,
|
||||
body: other_match_body_block,
|
||||
};
|
||||
self_arms.push(move self_arm);
|
||||
self_arms.push(self_arm);
|
||||
}
|
||||
|
||||
// Create the method body.
|
||||
let self_expr = build::mk_path(cx, span, ~[ self_ident ]);
|
||||
let self_expr = build::mk_unary(cx, span, deref, self_expr);
|
||||
let self_arms = dvec::unwrap(move self_arms);
|
||||
let self_match_expr = expr_match(self_expr, move self_arms);
|
||||
let self_match_expr = build::mk_expr(cx, span, move self_match_expr);
|
||||
let self_arms = dvec::unwrap(self_arms);
|
||||
let self_match_expr = expr_match(self_expr, self_arms);
|
||||
let self_match_expr = build::mk_expr(cx, span, self_match_expr);
|
||||
|
||||
// Create the method.
|
||||
return create_eq_method(cx,
|
||||
|
@ -848,8 +848,8 @@ fn expand_deriving_iter_bytes_enum_method(cx: ext_ctxt,
|
|||
}
|
||||
|
||||
// Create the pattern body.
|
||||
let stmts = dvec::unwrap(move stmts);
|
||||
let match_body_block = build::mk_block_(cx, span, move stmts);
|
||||
let stmts = dvec::unwrap(stmts);
|
||||
let match_body_block = build::mk_block_(cx, span, stmts);
|
||||
|
||||
// Create the arm.
|
||||
ast::arm {
|
||||
|
|
|
@ -53,7 +53,7 @@ pub fn analyze(proto: protocol, _cx: ext_ctxt) {
|
|||
for state.reachable |s| {
|
||||
bv.set(s.id, true);
|
||||
}
|
||||
move bv
|
||||
bv
|
||||
};
|
||||
|
||||
let mut i = 0;
|
||||
|
|
|
@ -73,10 +73,10 @@ pub impl message: gen_send {
|
|||
|
||||
if this.proto.is_bounded() {
|
||||
let (sp, rp) = match (this.dir, next.dir) {
|
||||
(send, send) => (~"move c", ~"move s"),
|
||||
(send, send) => (~"c", ~"s"),
|
||||
(send, recv) => (~"s", ~"c"),
|
||||
(recv, send) => (~"s", ~"c"),
|
||||
(recv, recv) => (~"move c", ~"move s")
|
||||
(recv, recv) => (~"c", ~"s")
|
||||
};
|
||||
|
||||
body += ~"let b = pipe.reuse_buffer();\n";
|
||||
|
@ -89,10 +89,10 @@ pub impl message: gen_send {
|
|||
}
|
||||
else {
|
||||
let pat = match (this.dir, next.dir) {
|
||||
(send, send) => "(move c, move s)",
|
||||
(send, send) => "(c, s)",
|
||||
(send, recv) => "(s, c)",
|
||||
(recv, send) => "(s, c)",
|
||||
(recv, recv) => "(move c, move s)"
|
||||
(recv, recv) => "(c, s)"
|
||||
};
|
||||
|
||||
body += fmt!("let %s = ::pipes::entangle();\n", pat);
|
||||
|
@ -100,17 +100,17 @@ pub impl message: gen_send {
|
|||
body += fmt!("let message = %s(%s);\n",
|
||||
self.name(),
|
||||
str::connect(vec::append_one(
|
||||
arg_names.map(|x| ~"move " + cx.str_of(*x)),
|
||||
~"move s"), ~", "));
|
||||
arg_names.map(|x| cx.str_of(*x)),
|
||||
~"s"), ~", "));
|
||||
|
||||
if !try {
|
||||
body += fmt!("::pipes::send(move pipe, move message);\n");
|
||||
body += fmt!("::pipes::send(pipe, message);\n");
|
||||
// return the new channel
|
||||
body += ~"move c }";
|
||||
body += ~"c }";
|
||||
}
|
||||
else {
|
||||
body += fmt!("if ::pipes::send(move pipe, move message) {\n \
|
||||
::pipes::rt::make_some(move c) \
|
||||
body += fmt!("if ::pipes::send(pipe, message) {\n \
|
||||
::pipes::rt::make_some(c) \
|
||||
} else { ::pipes::rt::make_none() } }");
|
||||
}
|
||||
|
||||
|
@ -153,7 +153,7 @@ pub impl message: gen_send {
|
|||
~""
|
||||
}
|
||||
else {
|
||||
~"(" + str::connect(arg_names.map(|x| ~"move " + *x),
|
||||
~"(" + str::connect(arg_names.map(|x| *x),
|
||||
~", ") + ~")"
|
||||
};
|
||||
|
||||
|
@ -164,10 +164,10 @@ pub impl message: gen_send {
|
|||
message_args);
|
||||
|
||||
if !try {
|
||||
body += fmt!("::pipes::send(move pipe, move message);\n");
|
||||
body += fmt!("::pipes::send(pipe, message);\n");
|
||||
body += ~" }";
|
||||
} else {
|
||||
body += fmt!("if ::pipes::send(move pipe, move message) \
|
||||
body += fmt!("if ::pipes::send(pipe, message) \
|
||||
{ \
|
||||
::pipes::rt::make_some(()) \
|
||||
} else { \
|
||||
|
@ -319,7 +319,7 @@ pub impl protocol: gen_init {
|
|||
recv => {
|
||||
quote_expr!({
|
||||
let (s, c) = ::pipes::entangle();
|
||||
(move c, move s)
|
||||
(c, s)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -331,7 +331,7 @@ pub impl protocol: gen_init {
|
|||
recv => {
|
||||
quote_expr!({
|
||||
let (s, c) = $body;
|
||||
(move c, move s)
|
||||
(c, s)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -375,7 +375,7 @@ pub impl protocol: gen_init {
|
|||
|
||||
quote_expr!({
|
||||
let buffer = $buffer;
|
||||
do ::pipes::entangle_buffer(move buffer) |buffer, data| {
|
||||
do ::pipes::entangle_buffer(buffer) |buffer, data| {
|
||||
$entangle_body
|
||||
}
|
||||
})
|
||||
|
|
|
@ -207,7 +207,7 @@ pub impl protocol {
|
|||
span: self.span,
|
||||
dir: dir,
|
||||
ty_params: ty_params,
|
||||
messages: move messages,
|
||||
messages: messages,
|
||||
proto: self
|
||||
});
|
||||
|
||||
|
|
|
@ -279,7 +279,7 @@ pub fn parse(sess: parse_sess,
|
|||
}
|
||||
|
||||
new_pos.idx += 1;
|
||||
cur_eis.push(move new_pos);
|
||||
cur_eis.push(new_pos);
|
||||
}
|
||||
|
||||
// can we go around again?
|
||||
|
@ -288,19 +288,19 @@ pub fn parse(sess: parse_sess,
|
|||
match copy ei.sep {
|
||||
Some(ref t) if idx == len => { // we need a separator
|
||||
if tok == (*t) { //pass the separator
|
||||
let ei_t = move ei;
|
||||
let ei_t = ei;
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(move ei_t);
|
||||
next_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
_ => { // we don't need a separator
|
||||
let ei_t = move ei;
|
||||
let ei_t = ei;
|
||||
ei_t.idx = 0;
|
||||
cur_eis.push(move ei_t);
|
||||
cur_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eof_eis.push(move ei);
|
||||
eof_eis.push(ei);
|
||||
}
|
||||
} else {
|
||||
match copy ei.elts[idx].node {
|
||||
|
@ -315,26 +315,26 @@ pub fn parse(sess: parse_sess,
|
|||
new_ei.matches[idx].push(@matched_seq(~[], sp));
|
||||
}
|
||||
|
||||
cur_eis.push(move new_ei);
|
||||
cur_eis.push(new_ei);
|
||||
}
|
||||
|
||||
let matches = vec::map(ei.matches, // fresh, same size:
|
||||
|_m| DVec::<@named_match>());
|
||||
let ei_t = move ei;
|
||||
let ei_t = ei;
|
||||
cur_eis.push(~{
|
||||
elts: (*matchers), sep: (*sep), mut idx: 0u,
|
||||
mut up: matcher_pos_up(Some(move ei_t)),
|
||||
matches: move matches,
|
||||
mut up: matcher_pos_up(Some(ei_t)),
|
||||
matches: matches,
|
||||
match_lo: match_idx_lo, match_hi: match_idx_hi,
|
||||
sp_lo: sp.lo
|
||||
});
|
||||
}
|
||||
match_nonterminal(_,_,_) => { bb_eis.push(move ei) }
|
||||
match_nonterminal(_,_,_) => { bb_eis.push(ei) }
|
||||
match_tok(ref t) => {
|
||||
let ei_t = move ei;
|
||||
let ei_t = ei;
|
||||
if (*t) == tok {
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(move ei_t);
|
||||
next_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -388,7 +388,7 @@ pub fn parse(sess: parse_sess,
|
|||
}
|
||||
_ => fail!()
|
||||
}
|
||||
cur_eis.push(move ei);
|
||||
cur_eis.push(ei);
|
||||
|
||||
for rust_parser.tokens_consumed.times() || {
|
||||
rdr.next_token();
|
||||
|
|
|
@ -252,7 +252,7 @@ pub fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
|
|||
};
|
||||
item_trait(fold_ty_params(tps, fld),
|
||||
vec::map(traits, |p| fold_trait_ref(*p, fld)),
|
||||
move methods)
|
||||
methods)
|
||||
}
|
||||
item_mac(ref m) => {
|
||||
// FIXME #2888: we might actually want to do something here.
|
||||
|
|
|
@ -151,7 +151,7 @@ pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
|
|||
p.reader.fatal(~"expected end-of-string");
|
||||
}
|
||||
p.abort_if_errors();
|
||||
move r
|
||||
r
|
||||
}
|
||||
|
||||
pub fn next_node_id(sess: parse_sess) -> node_id {
|
||||
|
@ -177,16 +177,16 @@ pub fn new_parser_from_file(sess: parse_sess,
|
|||
path: &Path)
|
||||
-> Result<Parser, ~str> {
|
||||
match io::read_whole_file_str(path) {
|
||||
result::Ok(move src) => {
|
||||
result::Ok(src) => {
|
||||
|
||||
let filemap = sess.cm.new_filemap(path.to_str(), @move src);
|
||||
let filemap = sess.cm.new_filemap(path.to_str(), @src);
|
||||
let srdr = lexer::new_string_reader(sess.span_diagnostic,
|
||||
filemap,
|
||||
sess.interner);
|
||||
Ok(Parser(sess, cfg, srdr as reader))
|
||||
|
||||
}
|
||||
result::Err(move e) => Err(move e)
|
||||
result::Err(e) => Err(e)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -195,8 +195,8 @@ pub fn new_parser_from_file(sess: parse_sess,
|
|||
pub fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
path: &Path) -> Parser {
|
||||
match new_parser_from_file(sess, cfg, path) {
|
||||
Ok(move parser) => move parser,
|
||||
Err(move e) => {
|
||||
Ok(parser) => parser,
|
||||
Err(e) => {
|
||||
sess.span_diagnostic.handler().fatal(e)
|
||||
}
|
||||
}
|
||||
|
@ -207,8 +207,8 @@ pub fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
|
|||
pub fn new_sub_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
path: &Path, sp: span) -> Parser {
|
||||
match new_parser_from_file(sess, cfg, path) {
|
||||
Ok(move parser) => move parser,
|
||||
Err(move e) => {
|
||||
Ok(parser) => parser,
|
||||
Err(e) => {
|
||||
sess.span_diagnostic.span_fatal(sp, e)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -190,8 +190,8 @@ pub fn Parser(sess: parse_sess
|
|||
let interner = rdr.interner();
|
||||
|
||||
Parser {
|
||||
reader: move rdr,
|
||||
interner: move interner,
|
||||
reader: rdr,
|
||||
interner: interner,
|
||||
sess: sess,
|
||||
cfg: cfg,
|
||||
token: tok0.tok,
|
||||
|
@ -1083,9 +1083,6 @@ pub impl Parser {
|
|||
let e = self.parse_expr();
|
||||
ex = expr_copy(e);
|
||||
hi = e.span.hi;
|
||||
} else if self.eat_keyword(~"move") {
|
||||
// XXX move keyword is no longer important, remove after snapshot
|
||||
return self.parse_expr();
|
||||
} else if self.token == token::MOD_SEP ||
|
||||
is_ident(self.token) && !self.is_keyword(~"true") &&
|
||||
!self.is_keyword(~"false") {
|
||||
|
@ -1197,7 +1194,7 @@ pub impl Parser {
|
|||
hi = self.span.hi;
|
||||
|
||||
let nd = expr_method_call(e, i, tys, es, NoSugar);
|
||||
e = self.mk_expr(lo, hi, move nd);
|
||||
e = self.mk_expr(lo, hi, nd);
|
||||
}
|
||||
_ => {
|
||||
e = self.mk_expr(lo, hi, expr_field(e, i, tys));
|
||||
|
@ -2099,10 +2096,6 @@ pub impl Parser {
|
|||
} else if self.eat_keyword(~"copy") {
|
||||
pat = self.parse_pat_ident(refutable, bind_by_copy);
|
||||
} else {
|
||||
if self.eat_keyword(~"move") {
|
||||
/* XXX---remove move keyword */
|
||||
}
|
||||
|
||||
// XXX---refutable match bindings should work same as let
|
||||
let binding_mode =
|
||||
if refutable {bind_infer} else {bind_by_copy};
|
||||
|
@ -2372,7 +2365,7 @@ pub impl Parser {
|
|||
self.obsolete(copy self.span, ObsoleteUnsafeBlock);
|
||||
}
|
||||
self.expect(token::LBRACE);
|
||||
let {inner: move inner, next: move next} =
|
||||
let {inner: inner, next: next} =
|
||||
maybe_parse_inner_attrs_and_next(self, parse_attrs);
|
||||
return (inner, self.parse_block_tail_(lo, default_blk, next));
|
||||
}
|
||||
|
@ -2397,8 +2390,8 @@ pub impl Parser {
|
|||
let mut stmts = ~[];
|
||||
let mut expr = None;
|
||||
|
||||
let {attrs_remaining: move attrs_remaining,
|
||||
view_items: move view_items,
|
||||
let {attrs_remaining: attrs_remaining,
|
||||
view_items: view_items,
|
||||
items: items, _} =
|
||||
self.parse_items_and_view_items(first_item_attrs,
|
||||
IMPORTS_AND_ITEMS_ALLOWED, false);
|
||||
|
@ -2570,7 +2563,7 @@ pub impl Parser {
|
|||
}
|
||||
}
|
||||
}
|
||||
return @move bounds;
|
||||
return @bounds;
|
||||
}
|
||||
|
||||
fn parse_ty_param() -> ty_param {
|
||||
|
@ -3083,13 +3076,13 @@ pub impl Parser {
|
|||
fn parse_mod_items(term: token::Token,
|
||||
+first_item_attrs: ~[attribute]) -> _mod {
|
||||
// Shouldn't be any view items since we've already parsed an item attr
|
||||
let {attrs_remaining: move attrs_remaining,
|
||||
view_items: move view_items,
|
||||
let {attrs_remaining: attrs_remaining,
|
||||
view_items: view_items,
|
||||
items: starting_items, _} =
|
||||
self.parse_items_and_view_items(first_item_attrs,
|
||||
VIEW_ITEMS_AND_ITEMS_ALLOWED,
|
||||
true);
|
||||
let mut items: ~[@item] = move starting_items;
|
||||
let mut items: ~[@item] = starting_items;
|
||||
|
||||
let mut first = true;
|
||||
while self.token != term {
|
||||
|
@ -3140,7 +3133,7 @@ pub impl Parser {
|
|||
self.bump();
|
||||
// This mod is in an external file. Let's go get it!
|
||||
let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span);
|
||||
(id, m, Some(move attrs))
|
||||
(id, m, Some(attrs))
|
||||
} else {
|
||||
self.push_mod_path(id, outer_attrs);
|
||||
self.expect(token::LBRACE);
|
||||
|
@ -3300,9 +3293,9 @@ pub impl Parser {
|
|||
fn parse_foreign_item(+attrs: ~[attribute]) -> @foreign_item {
|
||||
let vis = self.parse_visibility();
|
||||
if self.is_keyword(~"const") {
|
||||
self.parse_item_foreign_const(vis, move attrs)
|
||||
self.parse_item_foreign_const(vis, attrs)
|
||||
} else {
|
||||
self.parse_item_foreign_fn( move attrs)
|
||||
self.parse_item_foreign_fn(attrs)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3311,15 +3304,15 @@ pub impl Parser {
|
|||
+first_item_attrs: ~[attribute])
|
||||
-> foreign_mod {
|
||||
// Shouldn't be any view items since we've already parsed an item attr
|
||||
let {attrs_remaining: move attrs_remaining,
|
||||
view_items: move view_items,
|
||||
let {attrs_remaining: attrs_remaining,
|
||||
view_items: view_items,
|
||||
items: _,
|
||||
foreign_items: move foreign_items} =
|
||||
foreign_items: foreign_items} =
|
||||
self.parse_items_and_view_items(first_item_attrs,
|
||||
VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED,
|
||||
true);
|
||||
|
||||
let mut items: ~[@foreign_item] = move foreign_items;
|
||||
let mut items: ~[@foreign_item] = foreign_items;
|
||||
let mut initial_attrs = attrs_remaining;
|
||||
while self.token != token::RBRACE {
|
||||
let attrs = vec::append(initial_attrs,
|
||||
|
@ -3329,7 +3322,7 @@ pub impl Parser {
|
|||
}
|
||||
ast::foreign_mod {
|
||||
sort: sort,
|
||||
abi: move abi,
|
||||
abi: abi,
|
||||
view_items: view_items,
|
||||
items: items
|
||||
}
|
||||
|
@ -3382,14 +3375,14 @@ pub impl Parser {
|
|||
// extern mod { ... }
|
||||
if items_allowed && self.eat(token::LBRACE) {
|
||||
let abi;
|
||||
match move abi_opt {
|
||||
Some(move found_abi) => abi = move found_abi,
|
||||
match abi_opt {
|
||||
Some(found_abi) => abi = found_abi,
|
||||
None => abi = special_idents::c_abi,
|
||||
}
|
||||
|
||||
let extra_attrs = self.parse_inner_attrs_and_next();
|
||||
let m = self.parse_foreign_mod_items(sort,
|
||||
move abi,
|
||||
abi,
|
||||
extra_attrs.next);
|
||||
self.expect(token::RBRACE);
|
||||
|
||||
|
@ -3513,7 +3506,7 @@ pub impl Parser {
|
|||
ident = self.parse_ident();
|
||||
self.expect(token::LBRACE);
|
||||
let nested_enum_def = self.parse_enum_def(ty_params);
|
||||
kind = enum_variant_kind(move nested_enum_def);
|
||||
kind = enum_variant_kind(nested_enum_def);
|
||||
needs_comma = false;
|
||||
} else {
|
||||
ident = self.parse_value_ident();
|
||||
|
|
|
@ -147,11 +147,11 @@ pub fn mk_printer(out: @io::Writer, linewidth: uint) -> @mut Printer {
|
|||
space: linewidth as int,
|
||||
left: 0,
|
||||
right: 0,
|
||||
token: move token,
|
||||
size: move size,
|
||||
token: token,
|
||||
size: size,
|
||||
left_total: 0,
|
||||
right_total: 0,
|
||||
scan_stack: move scan_stack,
|
||||
scan_stack: scan_stack,
|
||||
scan_stack_empty: true,
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
|
|
|
@ -24,7 +24,7 @@ pub fn mk<T:Eq IterBytes Hash Const Copy>() -> Interner<T> {
|
|||
let m = oldmap::HashMap::<T, uint>();
|
||||
let hi: hash_interner<T> =
|
||||
{map: m, vect: DVec()};
|
||||
move ((move hi) as Interner::<T>)
|
||||
((hi) as Interner::<T>)
|
||||
}
|
||||
|
||||
pub fn mk_prefill<T:Eq IterBytes Hash Const Copy>(init: &[T]) -> Interner<T> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue