Make vectors uglier ([]/~). Sorry. Should be temporary. Closes #2725.
This commit is contained in:
parent
c087aaf56b
commit
329eca6044
418 changed files with 4123 additions and 4034 deletions
|
@ -13,6 +13,7 @@ import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
|
|||
seq_sep_none, token_to_str};
|
||||
import common::*;//{parser_common};
|
||||
import dvec::{dvec, extensions};
|
||||
import vec::{push};
|
||||
|
||||
export file_type;
|
||||
export parser;
|
||||
|
@ -51,10 +52,10 @@ enum pexpr {
|
|||
*/
|
||||
enum class_contents { ctor_decl(fn_decl, blk, codemap::span),
|
||||
dtor_decl(blk, codemap::span),
|
||||
members([@class_member]) }
|
||||
members([@class_member]/~) }
|
||||
|
||||
type arg_or_capture_item = either<arg, capture_item>;
|
||||
type item_info = (ident, item_, option<[attribute]>);
|
||||
type item_info = (ident, item_, option<[attribute]/~>);
|
||||
|
||||
class parser {
|
||||
let sess: parse_sess;
|
||||
|
@ -176,14 +177,14 @@ class parser {
|
|||
// functions can't have constrained types. Not sure whether
|
||||
// that would be desirable anyway. See bug for the story on
|
||||
// constrained types.
|
||||
let constrs: [@constr] = [];
|
||||
let constrs: [@constr]/~ = []/~;
|
||||
let (ret_style, ret_ty) = self.parse_ret_ty();
|
||||
ret {inputs: inputs, output: ret_ty,
|
||||
purity: purity, cf: ret_style,
|
||||
constraints: constrs};
|
||||
}
|
||||
|
||||
fn parse_ty_methods() -> [ty_method] {
|
||||
fn parse_ty_methods() -> [ty_method]/~ {
|
||||
self.parse_unspanned_seq(token::LBRACE, token::RBRACE,
|
||||
seq_sep_none()) { |p|
|
||||
let attrs = p.parse_outer_attributes();
|
||||
|
@ -215,7 +216,7 @@ class parser {
|
|||
|
||||
// if i is the jth ident in args, return j
|
||||
// otherwise, fail
|
||||
fn ident_index(args: [arg], i: ident) -> uint {
|
||||
fn ident_index(args: [arg]/~, i: ident) -> uint {
|
||||
let mut j = 0u;
|
||||
for args.each {|a| if a.ident == i { ret j; } j += 1u; }
|
||||
self.fatal("unbound variable `" + *i + "` in constraint arg");
|
||||
|
@ -235,7 +236,7 @@ class parser {
|
|||
ret @{node: carg, span: sp};
|
||||
}
|
||||
|
||||
fn parse_constr_arg(args: [arg]) -> @constr_arg {
|
||||
fn parse_constr_arg(args: [arg]/~) -> @constr_arg {
|
||||
let sp = self.span;
|
||||
let mut carg = carg_base;
|
||||
if self.token == token::BINOP(token::STAR) {
|
||||
|
@ -247,7 +248,7 @@ class parser {
|
|||
ret @{node: carg, span: sp};
|
||||
}
|
||||
|
||||
fn parse_ty_constr(fn_args: [arg]) -> @constr {
|
||||
fn parse_ty_constr(fn_args: [arg]/~) -> @constr {
|
||||
let lo = self.span.lo;
|
||||
let path = self.parse_path_without_tps();
|
||||
let args = self.parse_unspanned_seq(
|
||||
|
@ -261,7 +262,7 @@ class parser {
|
|||
fn parse_constr_in_type() -> @ty_constr {
|
||||
let lo = self.span.lo;
|
||||
let path = self.parse_path_without_tps();
|
||||
let args: [@ty_constr_arg] = self.parse_unspanned_seq(
|
||||
let args: [@ty_constr_arg]/~ = self.parse_unspanned_seq(
|
||||
token::LPAREN, token::RPAREN,
|
||||
seq_sep_trailing_disallowed(token::COMMA),
|
||||
{|p| p.parse_type_constr_arg()});
|
||||
|
@ -272,17 +273,17 @@ class parser {
|
|||
|
||||
|
||||
fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) ->
|
||||
[@constr_general<T>] {
|
||||
let mut constrs: [@constr_general<T>] = [];
|
||||
[@constr_general<T>]/~ {
|
||||
let mut constrs: [@constr_general<T>]/~ = []/~;
|
||||
loop {
|
||||
let constr = pser(self);
|
||||
constrs += [constr];
|
||||
constrs += [constr]/~;
|
||||
if self.token == token::COMMA { self.bump(); }
|
||||
else { ret constrs; }
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_type_constraints() -> [@ty_constr] {
|
||||
fn parse_type_constraints() -> [@ty_constr]/~ {
|
||||
ret self.parse_constrs({|p| p.parse_constr_in_type()});
|
||||
}
|
||||
|
||||
|
@ -359,10 +360,10 @@ class parser {
|
|||
self.bump();
|
||||
ty_nil
|
||||
} else {
|
||||
let mut ts = [self.parse_ty(false)];
|
||||
let mut ts = [self.parse_ty(false)]/~;
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
ts += [self.parse_ty(false)];
|
||||
ts += [self.parse_ty(false)]/~;
|
||||
}
|
||||
let t = if vec::len(ts) == 1u { ts[0].node }
|
||||
else { ty_tup(ts) };
|
||||
|
@ -583,22 +584,22 @@ class parser {
|
|||
|
||||
let lo = self.span.lo;
|
||||
let global = self.eat(token::MOD_SEP);
|
||||
let mut ids = [];
|
||||
let mut ids = []/~;
|
||||
loop {
|
||||
let is_not_last =
|
||||
self.look_ahead(2u) != token::LT
|
||||
&& self.look_ahead(1u) == token::MOD_SEP;
|
||||
|
||||
if is_not_last {
|
||||
ids += [parse_ident(self)];
|
||||
ids += [parse_ident(self)]/~;
|
||||
self.expect(token::MOD_SEP);
|
||||
} else {
|
||||
ids += [parse_last_ident(self)];
|
||||
ids += [parse_last_ident(self)]/~;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@{span: mk_sp(lo, self.last_span.hi), global: global,
|
||||
idents: ids, rp: none, types: []}
|
||||
idents: ids, rp: none, types: []/~}
|
||||
}
|
||||
|
||||
fn parse_value_path() -> @path {
|
||||
|
@ -639,7 +640,7 @@ class parser {
|
|||
self.parse_seq_lt_gt(some(token::COMMA),
|
||||
{|p| p.parse_ty(false)})
|
||||
} else {
|
||||
{node: [], span: path.span}
|
||||
{node: []/~, span: path.span}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -715,9 +716,9 @@ class parser {
|
|||
let lit = @spanned(lo, hi, lit_nil);
|
||||
ret self.mk_pexpr(lo, hi, expr_lit(lit));
|
||||
}
|
||||
let mut es = [self.parse_expr()];
|
||||
let mut es = [self.parse_expr()]/~;
|
||||
while self.token == token::COMMA {
|
||||
self.bump(); es += [self.parse_expr()];
|
||||
self.bump(); es += [self.parse_expr()]/~;
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.expect(token::RPAREN);
|
||||
|
@ -733,7 +734,7 @@ class parser {
|
|||
if self.is_keyword("mut") ||
|
||||
is_plain_ident(self.token)
|
||||
&& self.look_ahead(1u) == token::COLON {
|
||||
let mut fields = [self.parse_field(token::COLON)];
|
||||
let mut fields = [self.parse_field(token::COLON)]/~;
|
||||
let mut base = none;
|
||||
while self.token != token::RBRACE {
|
||||
// optional comma before "with"
|
||||
|
@ -750,7 +751,7 @@ class parser {
|
|||
// record ends by an optional trailing comma
|
||||
break;
|
||||
}
|
||||
fields += [self.parse_field(token::COLON)];
|
||||
fields += [self.parse_field(token::COLON)]/~;
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.expect(token::RBRACE);
|
||||
|
@ -997,7 +998,7 @@ class parser {
|
|||
self.expect(token::LT);
|
||||
self.parse_seq_to_gt(some(token::COMMA),
|
||||
{|p| p.parse_ty(false)})
|
||||
} else { [] };
|
||||
} else { []/~ };
|
||||
e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
|
||||
self.get_str(i),
|
||||
tys));
|
||||
|
@ -1027,13 +1028,13 @@ class parser {
|
|||
let blk = self.parse_fn_block_expr();
|
||||
alt e.node {
|
||||
expr_call(f, args, false) {
|
||||
e = pexpr(@{node: expr_call(f, args + [blk], true)
|
||||
e = pexpr(@{node: expr_call(f, args + [blk]/~, true)
|
||||
with *self.to_expr(e)});
|
||||
}
|
||||
_ {
|
||||
e = self.mk_pexpr(lo, self.last_span.hi,
|
||||
expr_call(self.to_expr(e),
|
||||
[blk], true));
|
||||
[blk]/~, true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1085,10 +1086,10 @@ class parser {
|
|||
ret alt self.token {
|
||||
token::LPAREN | token::LBRACE | token::LBRACKET {
|
||||
let ket = flip(self.token);
|
||||
tt_delim([parse_tt_flat(self, true)] +
|
||||
tt_delim([parse_tt_flat(self, true)]/~ +
|
||||
self.parse_seq_to_before_end(ket, seq_sep_none(),
|
||||
{|p| p.parse_token_tree()})
|
||||
+ [parse_tt_flat(self, true)])
|
||||
+ [parse_tt_flat(self, true)]/~)
|
||||
}
|
||||
_ { parse_tt_flat(self, false) }
|
||||
};
|
||||
|
@ -1354,7 +1355,7 @@ class parser {
|
|||
let b_arg = vec::last(args);
|
||||
let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi,
|
||||
ctor(b_arg));
|
||||
@{node: expr_call(f, vec::init(args) + [last], true)
|
||||
@{node: expr_call(f, vec::init(args) + [last]/~, true)
|
||||
with *call}
|
||||
}
|
||||
_ {
|
||||
|
@ -1385,14 +1386,14 @@ class parser {
|
|||
else { alt_exhaustive };
|
||||
let discriminant = self.parse_expr();
|
||||
self.expect(token::LBRACE);
|
||||
let mut arms: [arm] = [];
|
||||
let mut arms: [arm]/~ = []/~;
|
||||
while self.token != token::RBRACE {
|
||||
let pats = self.parse_pats();
|
||||
let mut guard = none;
|
||||
if self.eat_keyword("if") { guard = some(self.parse_expr()); }
|
||||
if self.token == token::FAT_ARROW { self.bump(); }
|
||||
let blk = self.parse_block();
|
||||
arms += [{pats: pats, guard: guard, body: blk}];
|
||||
arms += [{pats: pats, guard: guard, body: blk}]/~;
|
||||
}
|
||||
let mut hi = self.span.hi;
|
||||
self.bump();
|
||||
|
@ -1434,10 +1435,10 @@ class parser {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_pats() -> [@pat] {
|
||||
let mut pats = [];
|
||||
fn parse_pats() -> [@pat]/~ {
|
||||
let mut pats = []/~;
|
||||
loop {
|
||||
pats += [self.parse_pat()];
|
||||
pats += [self.parse_pat()]/~;
|
||||
if self.token == token::BINOP(token::OR) { self.bump(); }
|
||||
else { ret pats; }
|
||||
};
|
||||
|
@ -1463,7 +1464,7 @@ class parser {
|
|||
}
|
||||
token::LBRACE {
|
||||
self.bump();
|
||||
let mut fields = [];
|
||||
let mut fields = []/~;
|
||||
let mut etc = false;
|
||||
let mut first = true;
|
||||
while self.token != token::RBRACE {
|
||||
|
@ -1498,7 +1499,7 @@ class parser {
|
|||
node: pat_ident(fieldpath, none),
|
||||
span: mk_sp(lo, hi)};
|
||||
}
|
||||
fields += [{ident: fieldname, pat: subpat}];
|
||||
fields += [{ident: fieldname, pat: subpat}]/~;
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
|
@ -1513,10 +1514,10 @@ class parser {
|
|||
let expr = self.mk_expr(lo, hi, expr_lit(lit));
|
||||
pat = pat_lit(expr);
|
||||
} else {
|
||||
let mut fields = [self.parse_pat()];
|
||||
let mut fields = [self.parse_pat()]/~;
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
fields += [self.parse_pat()];
|
||||
fields += [self.parse_pat()]/~;
|
||||
}
|
||||
if vec::len(fields) == 1u { self.expect(token::COMMA); }
|
||||
hi = self.span.hi;
|
||||
|
@ -1548,7 +1549,7 @@ class parser {
|
|||
} else {
|
||||
let enum_path = self.parse_path_with_tps(true);
|
||||
hi = enum_path.span.hi;
|
||||
let mut args: [@pat] = [];
|
||||
let mut args: [@pat]/~ = []/~;
|
||||
let mut star_pat = false;
|
||||
alt self.token {
|
||||
token::LPAREN {
|
||||
|
@ -1604,9 +1605,9 @@ class parser {
|
|||
fn parse_let() -> @decl {
|
||||
let is_mutbl = self.eat_keyword("mut");
|
||||
let lo = self.span.lo;
|
||||
let mut locals = [self.parse_local(is_mutbl, true)];
|
||||
let mut locals = [self.parse_local(is_mutbl, true)]/~;
|
||||
while self.eat(token::COMMA) {
|
||||
locals += [self.parse_local(is_mutbl, true)];
|
||||
locals += [self.parse_local(is_mutbl, true)]/~;
|
||||
}
|
||||
ret @spanned(lo, self.last_span.hi, decl_local(locals));
|
||||
}
|
||||
|
@ -1628,8 +1629,8 @@ class parser {
|
|||
span: mk_sp(lo, self.last_span.hi)};
|
||||
}
|
||||
|
||||
fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt {
|
||||
fn check_expected_item(p: parser, current_attrs: [attribute]) {
|
||||
fn parse_stmt(+first_item_attrs: [attribute]/~) -> @stmt {
|
||||
fn check_expected_item(p: parser, current_attrs: [attribute]/~) {
|
||||
// If we have attributes then we should have an item
|
||||
if vec::is_not_empty(current_attrs) {
|
||||
p.fatal("expected item");
|
||||
|
@ -1645,7 +1646,7 @@ class parser {
|
|||
} else {
|
||||
let mut item_attrs;
|
||||
alt self.parse_outer_attrs_or_ext(first_item_attrs) {
|
||||
none { item_attrs = []; }
|
||||
none { item_attrs = []/~; }
|
||||
some(left(attrs)) { item_attrs = attrs; }
|
||||
some(right(ext)) {
|
||||
ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id()));
|
||||
|
@ -1685,14 +1686,15 @@ class parser {
|
|||
ret blk;
|
||||
}
|
||||
|
||||
fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) {
|
||||
fn parse_inner_attrs_and_block(parse_attrs: bool)
|
||||
-> ([attribute]/~, blk) {
|
||||
|
||||
fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
|
||||
{inner: [attribute], next: [attribute]} {
|
||||
{inner: [attribute]/~, next: [attribute]/~} {
|
||||
if parse_attrs {
|
||||
p.parse_inner_attrs_and_next()
|
||||
} else {
|
||||
{inner: [], next: []}
|
||||
{inner: []/~, next: []/~}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1727,12 +1729,12 @@ class parser {
|
|||
// necessary, and this should take a qualifier.
|
||||
// some blocks start with "#{"...
|
||||
fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
|
||||
self.parse_block_tail_(lo, s, [])
|
||||
self.parse_block_tail_(lo, s, []/~)
|
||||
}
|
||||
|
||||
fn parse_block_tail_(lo: uint, s: blk_check_mode,
|
||||
+first_item_attrs: [attribute]) -> blk {
|
||||
let mut stmts = [];
|
||||
+first_item_attrs: [attribute]/~) -> blk {
|
||||
let mut stmts = []/~;
|
||||
let mut expr = none;
|
||||
let {attrs_remaining, view_items} =
|
||||
self.parse_view(first_item_attrs, true);
|
||||
|
@ -1749,13 +1751,14 @@ class parser {
|
|||
}
|
||||
_ {
|
||||
let stmt = self.parse_stmt(initial_attrs);
|
||||
initial_attrs = [];
|
||||
initial_attrs = []/~;
|
||||
alt stmt.node {
|
||||
stmt_expr(e, stmt_id) { // Expression without semicolon:
|
||||
alt self.token {
|
||||
token::SEMI {
|
||||
self.bump();
|
||||
stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}];
|
||||
push(stmts,
|
||||
@{node: stmt_semi(e, stmt_id) with *stmt});
|
||||
}
|
||||
token::RBRACE {
|
||||
expr = some(e);
|
||||
|
@ -1766,13 +1769,13 @@ class parser {
|
|||
but found '"
|
||||
+ token_to_str(self.reader, t) + "'");
|
||||
}
|
||||
stmts += [stmt];
|
||||
stmts += [stmt]/~;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ { // All other kinds of statements:
|
||||
stmts += [stmt];
|
||||
stmts += [stmt]/~;
|
||||
|
||||
if classify::stmt_ends_with_semi(*stmt) {
|
||||
self.expect(token::SEMI);
|
||||
|
@ -1790,30 +1793,32 @@ class parser {
|
|||
}
|
||||
|
||||
fn parse_ty_param() -> ty_param {
|
||||
let mut bounds = [];
|
||||
let mut bounds = []/~;
|
||||
let ident = self.parse_ident();
|
||||
if self.eat(token::COLON) {
|
||||
while self.token != token::COMMA && self.token != token::GT {
|
||||
if self.eat_keyword("send") { bounds += [bound_send]; }
|
||||
else if self.eat_keyword("copy") { bounds += [bound_copy]; }
|
||||
else if self.eat_keyword("const") { bounds += [bound_const]; }
|
||||
else { bounds += [bound_iface(self.parse_ty(false))]; }
|
||||
if self.eat_keyword("send") { push(bounds, bound_send); }
|
||||
else if self.eat_keyword("copy") { push(bounds, bound_copy) }
|
||||
else if self.eat_keyword("const") {
|
||||
push(bounds, bound_const)
|
||||
}
|
||||
else { push(bounds, bound_iface(self.parse_ty(false))); }
|
||||
}
|
||||
}
|
||||
ret {ident: ident, id: self.get_id(), bounds: @bounds};
|
||||
}
|
||||
|
||||
fn parse_ty_params() -> [ty_param] {
|
||||
fn parse_ty_params() -> [ty_param]/~ {
|
||||
if self.eat(token::LT) {
|
||||
self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()})
|
||||
} else { [] }
|
||||
} else { []/~ }
|
||||
}
|
||||
|
||||
fn parse_fn_decl(purity: purity,
|
||||
parse_arg_fn: fn(parser) -> arg_or_capture_item)
|
||||
-> (fn_decl, capture_clause) {
|
||||
|
||||
let args_or_capture_items: [arg_or_capture_item] =
|
||||
let args_or_capture_items: [arg_or_capture_item]/~ =
|
||||
self.parse_unspanned_seq(
|
||||
token::LPAREN, token::RPAREN,
|
||||
seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn);
|
||||
|
@ -1824,7 +1829,7 @@ class parser {
|
|||
// Use the args list to translate each bound variable
|
||||
// mentioned in a constraint to an arg index.
|
||||
// Seems weird to do this in the parser, but I'm not sure how else to.
|
||||
let mut constrs = [];
|
||||
let mut constrs = []/~;
|
||||
if self.token == token::COLON {
|
||||
self.bump();
|
||||
constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) });
|
||||
|
@ -1840,7 +1845,7 @@ class parser {
|
|||
fn parse_fn_block_decl() -> (fn_decl, capture_clause) {
|
||||
let inputs_captures = {
|
||||
if self.eat(token::OROR) {
|
||||
[]
|
||||
[]/~
|
||||
} else {
|
||||
self.parse_unspanned_seq(
|
||||
token::BINOP(token::OR), token::BINOP(token::OR),
|
||||
|
@ -1857,11 +1862,11 @@ class parser {
|
|||
output: output,
|
||||
purity: impure_fn,
|
||||
cf: return_val,
|
||||
constraints: []},
|
||||
constraints: []/~},
|
||||
@either::rights(inputs_captures));
|
||||
}
|
||||
|
||||
fn parse_fn_header() -> {ident: ident, tps: [ty_param]} {
|
||||
fn parse_fn_header() -> {ident: ident, tps: [ty_param]/~} {
|
||||
let id = self.parse_value_ident();
|
||||
let ty_params = self.parse_ty_params();
|
||||
ret {ident: id, tps: ty_params};
|
||||
|
@ -1869,7 +1874,7 @@ class parser {
|
|||
|
||||
fn mk_item(lo: uint, hi: uint, +ident: ident,
|
||||
+node: item_, vis: visibility,
|
||||
+attrs: [attribute]) -> @item {
|
||||
+attrs: [attribute]/~) -> @item {
|
||||
ret @{ident: ident,
|
||||
attrs: attrs,
|
||||
id: self.get_id(),
|
||||
|
@ -1922,9 +1927,9 @@ class parser {
|
|||
}
|
||||
|
||||
// Parses three variants (with the region/type params always optional):
|
||||
// impl /&<T: copy> of to_str for [T] { ... }
|
||||
// impl name/&<T> of to_str for [T] { ... }
|
||||
// impl name/&<T> for [T] { ... }
|
||||
// impl /&<T: copy> of to_str for [T]/~ { ... }
|
||||
// impl name/&<T> of to_str for [T]/~ { ... }
|
||||
// impl name/&<T> for [T]/~ { ... }
|
||||
fn parse_item_impl() -> item_info {
|
||||
fn wrap_path(p: parser, pt: @path) -> @ty {
|
||||
@{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
|
||||
|
@ -1936,7 +1941,7 @@ class parser {
|
|||
(none, self.parse_region_param(), self.parse_ty_params())
|
||||
}
|
||||
else if self.is_keyword("of") {
|
||||
(none, rp_none, [])
|
||||
(none, rp_none, []/~)
|
||||
} else {
|
||||
let id = self.parse_ident();
|
||||
let rp = self.parse_region_param();
|
||||
|
@ -1956,10 +1961,10 @@ class parser {
|
|||
};
|
||||
self.expect_keyword("for");
|
||||
let ty = self.parse_ty(false);
|
||||
let mut meths = [];
|
||||
let mut meths = []/~;
|
||||
self.expect(token::LBRACE);
|
||||
while !self.eat(token::RBRACE) {
|
||||
meths += [self.parse_method(public)];
|
||||
meths += [self.parse_method(public)]/~;
|
||||
}
|
||||
(ident, item_impl(tps, rp, ifce, ty, meths), none)
|
||||
}
|
||||
|
@ -1969,7 +1974,7 @@ class parser {
|
|||
// the return type of the ctor function.
|
||||
fn ident_to_path_tys(i: ident,
|
||||
rp: region_param,
|
||||
typarams: [ty_param]) -> @path {
|
||||
typarams: [ty_param]/~) -> @path {
|
||||
let s = self.last_span;
|
||||
|
||||
// Hack. But then, this whole function is in service of a hack.
|
||||
|
@ -1978,7 +1983,7 @@ class parser {
|
|||
rp_self { some(self.region_from_name(some(@"self"))) }
|
||||
};
|
||||
|
||||
@{span: s, global: false, idents: [i],
|
||||
@{span: s, global: false, idents: [i]/~,
|
||||
rp: a_r,
|
||||
types: vec::map(typarams, {|tp|
|
||||
@{id: self.get_id(),
|
||||
|
@ -1992,7 +1997,7 @@ class parser {
|
|||
id: self.get_id()}
|
||||
}
|
||||
|
||||
fn parse_iface_ref_list() -> [@iface_ref] {
|
||||
fn parse_iface_ref_list() -> [@iface_ref]/~ {
|
||||
self.parse_seq_to_before_end(
|
||||
token::LBRACE, seq_sep_trailing_disallowed(token::COMMA),
|
||||
{|p| p.parse_iface_ref()})
|
||||
|
@ -2003,11 +2008,11 @@ class parser {
|
|||
let rp = self.parse_region_param();
|
||||
let ty_params = self.parse_ty_params();
|
||||
let class_path = self.ident_to_path_tys(class_name, rp, ty_params);
|
||||
let ifaces : [@iface_ref] = if self.eat(token::COLON)
|
||||
let ifaces : [@iface_ref]/~ = if self.eat(token::COLON)
|
||||
{ self.parse_iface_ref_list() }
|
||||
else { [] };
|
||||
else { []/~ };
|
||||
self.expect(token::LBRACE);
|
||||
let mut ms: [@class_member] = [];
|
||||
let mut ms: [@class_member]/~ = []/~;
|
||||
let ctor_id = self.get_id();
|
||||
let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none;
|
||||
let mut the_dtor : option<(blk, codemap::span)> = none;
|
||||
|
@ -2092,16 +2097,16 @@ class parser {
|
|||
}
|
||||
else if self.eat_keyword("priv") {
|
||||
self.expect(token::LBRACE);
|
||||
let mut results = [];
|
||||
let mut results = []/~;
|
||||
while self.token != token::RBRACE {
|
||||
results += [self.parse_single_class_item(private)];
|
||||
results += [self.parse_single_class_item(private)]/~;
|
||||
}
|
||||
self.bump();
|
||||
ret members(results);
|
||||
}
|
||||
else {
|
||||
// Probably need to parse attrs
|
||||
ret members([self.parse_single_class_item(public)]);
|
||||
ret members([self.parse_single_class_item(public)]/~);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2112,11 +2117,11 @@ class parser {
|
|||
}
|
||||
|
||||
fn parse_mod_items(term: token::token,
|
||||
+first_item_attrs: [attribute]) -> _mod {
|
||||
+first_item_attrs: [attribute]/~) -> _mod {
|
||||
// Shouldn't be any view items since we've already parsed an item attr
|
||||
let {attrs_remaining, view_items} =
|
||||
self.parse_view(first_item_attrs, false);
|
||||
let mut items: [@item] = [];
|
||||
let mut items: [@item]/~ = []/~;
|
||||
let mut first = true;
|
||||
while self.token != term {
|
||||
let mut attrs = self.parse_outer_attributes();
|
||||
|
@ -2124,7 +2129,7 @@ class parser {
|
|||
#debug["parse_mod_items: parse_item(attrs=%?)", attrs];
|
||||
let vis = self.parse_visibility(private);
|
||||
alt self.parse_item(attrs, vis) {
|
||||
some(i) { items += [i]; }
|
||||
some(i) { items += [i]/~; }
|
||||
_ {
|
||||
self.fatal("expected item but found '" +
|
||||
token_to_str(self.reader, self.token) + "'");
|
||||
|
@ -2160,7 +2165,7 @@ class parser {
|
|||
(id, item_mod(m), some(inner_attrs.inner))
|
||||
}
|
||||
|
||||
fn parse_item_native_fn(+attrs: [attribute],
|
||||
fn parse_item_native_fn(+attrs: [attribute]/~,
|
||||
purity: purity) -> @native_item {
|
||||
let lo = self.last_span.lo;
|
||||
let t = self.parse_fn_header();
|
||||
|
@ -2186,22 +2191,22 @@ class parser {
|
|||
else { self.unexpected(); }
|
||||
}
|
||||
|
||||
fn parse_native_item(+attrs: [attribute]) ->
|
||||
fn parse_native_item(+attrs: [attribute]/~) ->
|
||||
@native_item {
|
||||
self.parse_item_native_fn(attrs, self.parse_fn_purity())
|
||||
}
|
||||
|
||||
fn parse_native_mod_items(+first_item_attrs: [attribute]) ->
|
||||
fn parse_native_mod_items(+first_item_attrs: [attribute]/~) ->
|
||||
native_mod {
|
||||
// Shouldn't be any view items since we've already parsed an item attr
|
||||
let {attrs_remaining, view_items} =
|
||||
self.parse_view(first_item_attrs, false);
|
||||
let mut items: [@native_item] = [];
|
||||
let mut items: [@native_item]/~ = []/~;
|
||||
let mut initial_attrs = attrs_remaining;
|
||||
while self.token != token::RBRACE {
|
||||
let attrs = initial_attrs + self.parse_outer_attributes();
|
||||
initial_attrs = [];
|
||||
items += [self.parse_native_item(attrs)];
|
||||
initial_attrs = []/~;
|
||||
items += [self.parse_native_item(attrs)]/~;
|
||||
}
|
||||
ret {view_items: view_items,
|
||||
items: items};
|
||||
|
@ -2246,7 +2251,7 @@ class parser {
|
|||
let id = self.parse_ident();
|
||||
let rp = self.parse_region_param();
|
||||
let ty_params = self.parse_ty_params();
|
||||
let mut variants: [variant] = [];
|
||||
let mut variants: [variant]/~ = []/~;
|
||||
// Newtype syntax
|
||||
if self.token == token::EQ {
|
||||
self.check_restricted_keywords_(*id);
|
||||
|
@ -2256,12 +2261,12 @@ class parser {
|
|||
let variant =
|
||||
spanned(ty.span.lo, ty.span.hi,
|
||||
{name: id,
|
||||
attrs: [],
|
||||
args: [{ty: ty, id: self.get_id()}],
|
||||
attrs: []/~,
|
||||
args: [{ty: ty, id: self.get_id()}]/~,
|
||||
id: self.get_id(),
|
||||
disr_expr: none,
|
||||
vis: public});
|
||||
ret (id, item_enum([variant], ty_params, rp), none);
|
||||
ret (id, item_enum([variant]/~, ty_params, rp), none);
|
||||
}
|
||||
self.expect(token::LBRACE);
|
||||
|
||||
|
@ -2272,7 +2277,7 @@ class parser {
|
|||
let vlo = self.span.lo;
|
||||
let vis = self.parse_visibility(default_vis);
|
||||
let ident = self.parse_value_ident();
|
||||
let mut args = [], disr_expr = none;
|
||||
let mut args = []/~, disr_expr = none;
|
||||
if self.token == token::LPAREN {
|
||||
all_nullary = false;
|
||||
let arg_tys = self.parse_unspanned_seq(
|
||||
|
@ -2280,7 +2285,7 @@ class parser {
|
|||
seq_sep_trailing_disallowed(token::COMMA),
|
||||
{|p| p.parse_ty(false)});
|
||||
for arg_tys.each {|ty|
|
||||
args += [{ty: ty, id: self.get_id()}];
|
||||
args += [{ty: ty, id: self.get_id()}]/~;
|
||||
}
|
||||
} else if self.eat(token::EQ) {
|
||||
have_disr = true;
|
||||
|
@ -2290,7 +2295,7 @@ class parser {
|
|||
let vr = {name: ident, attrs: variant_attrs,
|
||||
args: args, id: self.get_id(),
|
||||
disr_expr: disr_expr, vis: vis};
|
||||
variants += [spanned(vlo, self.last_span.hi, vr)];
|
||||
variants += [spanned(vlo, self.last_span.hi, vr)]/~;
|
||||
|
||||
if !self.eat(token::COMMA) { break; }
|
||||
}
|
||||
|
@ -2333,7 +2338,7 @@ class parser {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_item(+attrs: [attribute], vis: visibility)
|
||||
fn parse_item(+attrs: [attribute]/~, vis: visibility)
|
||||
-> option<@item> {
|
||||
let lo = self.span.lo;
|
||||
let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
|
||||
|
@ -2384,20 +2389,20 @@ class parser {
|
|||
fn parse_view_path() -> @view_path {
|
||||
let lo = self.span.lo;
|
||||
let first_ident = self.parse_ident();
|
||||
let mut path = [first_ident];
|
||||
let mut path = [first_ident]/~;
|
||||
#debug("parsed view_path: %s", *first_ident);
|
||||
alt self.token {
|
||||
token::EQ {
|
||||
// x = foo::bar
|
||||
self.bump();
|
||||
path = [self.parse_ident()];
|
||||
path = [self.parse_ident()]/~;
|
||||
while self.token == token::MOD_SEP {
|
||||
self.bump();
|
||||
let id = self.parse_ident();
|
||||
path += [id];
|
||||
path += [id]/~;
|
||||
}
|
||||
let path = @{span: mk_sp(lo, self.span.hi), global: false,
|
||||
idents: path, rp: none, types: []};
|
||||
idents: path, rp: none, types: []/~};
|
||||
ret @spanned(lo, self.span.hi,
|
||||
view_path_simple(first_ident, path, self.get_id()));
|
||||
}
|
||||
|
@ -2411,7 +2416,7 @@ class parser {
|
|||
|
||||
token::IDENT(i, _) {
|
||||
self.bump();
|
||||
path += [self.get_str(i)];
|
||||
path += [self.get_str(i)]/~;
|
||||
}
|
||||
|
||||
// foo::bar::{a,b,c}
|
||||
|
@ -2422,7 +2427,7 @@ class parser {
|
|||
{|p| p.parse_path_list_ident()});
|
||||
let path = @{span: mk_sp(lo, self.span.hi),
|
||||
global: false, idents: path,
|
||||
rp: none, types: []};
|
||||
rp: none, types: []/~};
|
||||
ret @spanned(lo, self.span.hi,
|
||||
view_path_list(path, idents, self.get_id()));
|
||||
}
|
||||
|
@ -2432,7 +2437,7 @@ class parser {
|
|||
self.bump();
|
||||
let path = @{span: mk_sp(lo, self.span.hi),
|
||||
global: false, idents: path,
|
||||
rp: none, types: []};
|
||||
rp: none, types: []/~};
|
||||
ret @spanned(lo, self.span.hi,
|
||||
view_path_glob(path, self.get_id()));
|
||||
}
|
||||
|
@ -2445,16 +2450,16 @@ class parser {
|
|||
}
|
||||
let last = path[vec::len(path) - 1u];
|
||||
let path = @{span: mk_sp(lo, self.span.hi), global: false,
|
||||
idents: path, rp: none, types: []};
|
||||
idents: path, rp: none, types: []/~};
|
||||
ret @spanned(lo, self.span.hi,
|
||||
view_path_simple(last, path, self.get_id()));
|
||||
}
|
||||
|
||||
fn parse_view_paths() -> [@view_path] {
|
||||
let mut vp = [self.parse_view_path()];
|
||||
fn parse_view_paths() -> [@view_path]/~ {
|
||||
let mut vp = [self.parse_view_path()]/~;
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
vp += [self.parse_view_path()];
|
||||
vp += [self.parse_view_path()]/~;
|
||||
}
|
||||
ret vp;
|
||||
}
|
||||
|
@ -2468,7 +2473,7 @@ class parser {
|
|||
|| self.token_is_keyword("export", tok)
|
||||
}
|
||||
|
||||
fn parse_view_item(+attrs: [attribute]) -> @view_item {
|
||||
fn parse_view_item(+attrs: [attribute]/~) -> @view_item {
|
||||
let lo = self.span.lo, vis = self.parse_visibility(private);
|
||||
let node = if self.eat_keyword("use") {
|
||||
self.parse_use()
|
||||
|
@ -2482,14 +2487,14 @@ class parser {
|
|||
vis: vis, span: mk_sp(lo, self.last_span.hi)}
|
||||
}
|
||||
|
||||
fn parse_view(+first_item_attrs: [attribute],
|
||||
only_imports: bool) -> {attrs_remaining: [attribute],
|
||||
view_items: [@view_item]} {
|
||||
fn parse_view(+first_item_attrs: [attribute]/~,
|
||||
only_imports: bool) -> {attrs_remaining: [attribute]/~,
|
||||
view_items: [@view_item]/~} {
|
||||
let mut attrs = first_item_attrs + self.parse_outer_attributes();
|
||||
let mut items = [];
|
||||
let mut items = []/~;
|
||||
while if only_imports { self.is_keyword("import") }
|
||||
else { self.is_view_item() } {
|
||||
items += [self.parse_view_item(attrs)];
|
||||
items += [self.parse_view_item(attrs)]/~;
|
||||
attrs = self.parse_outer_attributes();
|
||||
}
|
||||
{attrs_remaining: attrs, view_items: items}
|
||||
|
@ -2502,7 +2507,7 @@ class parser {
|
|||
let first_item_outer_attrs = crate_attrs.next;
|
||||
let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
|
||||
ret @spanned(lo, self.span.lo,
|
||||
{directives: [],
|
||||
{directives: []/~,
|
||||
module: m,
|
||||
attrs: crate_attrs.inner,
|
||||
config: self.cfg});
|
||||
|
@ -2523,7 +2528,7 @@ class parser {
|
|||
//
|
||||
// Each directive imperatively extends its environment with 0 or more
|
||||
// items.
|
||||
fn parse_crate_directive(first_outer_attr: [attribute]) ->
|
||||
fn parse_crate_directive(first_outer_attr: [attribute]/~) ->
|
||||
crate_directive {
|
||||
|
||||
// Collect the next attributes
|
||||
|
@ -2564,8 +2569,8 @@ class parser {
|
|||
}
|
||||
|
||||
fn parse_crate_directives(term: token::token,
|
||||
first_outer_attr: [attribute]) ->
|
||||
[@crate_directive] {
|
||||
first_outer_attr: [attribute]/~) ->
|
||||
[@crate_directive]/~ {
|
||||
|
||||
// This is pretty ugly. If we have an outer attribute then we can't
|
||||
// accept seeing the terminator next, so if we do see it then fail the
|
||||
|
@ -2574,12 +2579,12 @@ class parser {
|
|||
self.expect_keyword("mod");
|
||||
}
|
||||
|
||||
let mut cdirs: [@crate_directive] = [];
|
||||
let mut cdirs: [@crate_directive]/~ = []/~;
|
||||
let mut first_outer_attr = first_outer_attr;
|
||||
while self.token != term {
|
||||
let cdir = @self.parse_crate_directive(first_outer_attr);
|
||||
cdirs += [cdir];
|
||||
first_outer_attr = [];
|
||||
cdirs += [cdir]/~;
|
||||
first_outer_attr = []/~;
|
||||
}
|
||||
ret cdirs;
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue