1
Fork 0

libsyntax: De-mut the parser. rs=demuting

This commit is contained in:
Patrick Walton 2013-02-21 18:12:13 -08:00
commit 934c938f90
10 changed files with 332 additions and 333 deletions

View file

@ -339,7 +339,7 @@ pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree])
cx.cfg(), cx.cfg(),
tts); tts);
let mut es = ~[]; let mut es = ~[];
while p.token != token::EOF { while *p.token != token::EOF {
if es.len() != 0 { if es.len() != 0 {
p.eat(token::COMMA); p.eat(token::COMMA);
} }

View file

@ -25,7 +25,7 @@ pub trait proto_parser {
pub impl proto_parser for parser::Parser { pub impl proto_parser for parser::Parser {
fn parse_proto(&self, id: ~str) -> protocol { fn parse_proto(&self, id: ~str) -> protocol {
let proto = protocol(id, self.span); let proto = protocol(id, *self.span);
self.parse_seq_to_before_end(token::EOF, SeqSep { self.parse_seq_to_before_end(token::EOF, SeqSep {
sep: None, sep: None,
@ -40,7 +40,7 @@ pub impl proto_parser for parser::Parser {
let name = *self.interner.get(id); let name = *self.interner.get(id);
self.expect(token::COLON); self.expect(token::COLON);
let dir = match copy self.token { let dir = match *self.token {
token::IDENT(n, _) => self.interner.get(n), token::IDENT(n, _) => self.interner.get(n),
_ => fail!() _ => fail!()
}; };
@ -51,10 +51,11 @@ pub impl proto_parser for parser::Parser {
_ => fail!() _ => fail!()
}; };
let typarms = if self.token == token::LT { let typarms = if *self.token == token::LT {
self.parse_ty_params() self.parse_ty_params()
} } else {
else { ~[] }; ~[]
};
let state = proto.add_state_poly(name, id, dir, typarms); let state = proto.add_state_poly(name, id, dir, typarms);
@ -69,7 +70,7 @@ pub impl proto_parser for parser::Parser {
fn parse_message(&self, state: state) { fn parse_message(&self, state: state) {
let mname = *self.interner.get(self.parse_ident()); let mname = *self.interner.get(self.parse_ident());
let args = if self.token == token::LPAREN { let args = if *self.token == token::LPAREN {
self.parse_unspanned_seq(token::LPAREN, self.parse_unspanned_seq(token::LPAREN,
token::RPAREN, SeqSep { token::RPAREN, SeqSep {
sep: Some(token::COMMA), sep: Some(token::COMMA),
@ -80,10 +81,10 @@ pub impl proto_parser for parser::Parser {
self.expect(token::RARROW); self.expect(token::RARROW);
let next = match copy self.token { let next = match *self.token {
token::IDENT(_, _) => { token::IDENT(_, _) => {
let name = *self.interner.get(self.parse_ident()); let name = *self.interner.get(self.parse_ident());
let ntys = if self.token == token::LT { let ntys = if *self.token == token::LT {
self.parse_unspanned_seq(token::LT, self.parse_unspanned_seq(token::LT,
token::GT, SeqSep { token::GT, SeqSep {
sep: Some(token::COMMA), sep: Some(token::COMMA),
@ -101,7 +102,7 @@ pub impl proto_parser for parser::Parser {
_ => self.fatal(~"invalid next state") _ => self.fatal(~"invalid next state")
}; };
state.add_message(mname, copy self.span, args, next); state.add_message(mname, *self.span, args, next);
} }
} }

View file

@ -49,6 +49,9 @@ pub trait gen_init {
pub impl gen_send for message { pub impl gen_send for message {
fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item { fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item {
debug!("pipec: gen_send"); debug!("pipec: gen_send");
let name = self.name();
let params = self.get_params();
match *self { match *self {
message(ref _id, span, ref tys, this, Some(ref next_state)) => { message(ref _id, span, ref tys, this, Some(ref next_state)) => {
debug!("pipec: next state exists"); debug!("pipec: next state exists");
@ -67,7 +70,7 @@ pub impl gen_send for message {
args_ast); args_ast);
let mut body = ~"{\n"; let mut body = ~"{\n";
body += fmt!("use super::%s;\n", self.name()); body += fmt!("use super::%s;\n", name);
if this.proto.is_bounded() { if this.proto.is_bounded() {
let (sp, rp) = match (this.dir, next.dir) { let (sp, rp) = match (this.dir, next.dir) {
@ -96,7 +99,7 @@ pub impl gen_send for message {
body += fmt!("let %s = ::core::pipes::entangle();\n", pat); body += fmt!("let %s = ::core::pipes::entangle();\n", pat);
} }
body += fmt!("let message = %s(%s);\n", body += fmt!("let message = %s(%s);\n",
self.name(), name,
str::connect(vec::append_one( str::connect(vec::append_one(
arg_names.map(|x| cx.str_of(*x)), arg_names.map(|x| cx.str_of(*x)),
~"s"), ~", ")); ~"s"), ~", "));
@ -121,13 +124,12 @@ pub impl gen_send for message {
rty = cx.ty_option(rty); rty = cx.ty_option(rty);
} }
let name = cx.ident_of(if try { ~"try_" + self.name() let name = cx.ident_of(if try { ~"try_" + name } else { name } );
} else { self.name() } );
cx.item_fn_poly(name, cx.item_fn_poly(name,
args_ast, args_ast,
rty, rty,
self.get_params(), params,
cx.expr_block(body)) cx.expr_block(body))
} }
@ -156,10 +158,8 @@ pub impl gen_send for message {
}; };
let mut body = ~"{ "; let mut body = ~"{ ";
body += fmt!("use super::%s;\n", self.name()); body += fmt!("use super::%s;\n", name);
body += fmt!("let message = %s%s;\n", body += fmt!("let message = %s%s;\n", name, message_args);
self.name(),
message_args);
if !try { if !try {
body += fmt!("::core::pipes::send(pipe, message);\n"); body += fmt!("::core::pipes::send(pipe, message);\n");
@ -175,10 +175,7 @@ pub impl gen_send for message {
let body = cx.parse_expr(body); let body = cx.parse_expr(body);
let name = if try { let name = if try { ~"try_" + name } else { name };
~"try_" + self.name()
}
else { self.name() };
cx.item_fn_poly(cx.ident_of(name), cx.item_fn_poly(cx.ident_of(name),
args_ast, args_ast,
@ -187,7 +184,7 @@ pub impl gen_send for message {
} else { } else {
cx.ty_nil_ast_builder() cx.ty_nil_ast_builder()
}, },
self.get_params(), params,
cx.expr_block(body)) cx.expr_block(body))
} }
} }

View file

@ -515,7 +515,7 @@ fn expand_tts(cx: ext_ctxt,
// try removing it when enough of them are gone. // try removing it when enough of them are gone.
let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts);
p.quote_depth += 1u; *p.quote_depth += 1u;
let tts = p.parse_all_token_trees(); let tts = p.parse_all_token_trees();
p.abort_if_errors(); p.abort_if_errors();

View file

@ -421,16 +421,16 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal {
~"expr" => token::nt_expr(p.parse_expr()), ~"expr" => token::nt_expr(p.parse_expr()),
~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)),
// this could be handled like a token, since it is one // this could be handled like a token, since it is one
~"ident" => match copy p.token { ~"ident" => match *p.token {
token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
_ => p.fatal(~"expected ident, found " _ => p.fatal(~"expected ident, found "
+ token::to_str(p.reader.interner(), copy p.token)) + token::to_str(p.reader.interner(), *p.token))
}, },
~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"path" => token::nt_path(p.parse_path_with_tps(false)),
~"tt" => { ~"tt" => {
p.quote_depth += 1u; //but in theory, non-quoted tts might be useful *p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
let res = token::nt_tt(@p.parse_token_tree()); let res = token::nt_tt(@p.parse_token_tree());
p.quote_depth -= 1u; *p.quote_depth -= 1u;
res res
} }
~"matchers" => token::nt_matchers(p.parse_matchers()), ~"matchers" => token::nt_matchers(p.parse_matchers()),

View file

@ -37,7 +37,7 @@ impl parser_attr for Parser {
fn parse_outer_attributes() -> ~[ast::attribute] { fn parse_outer_attributes() -> ~[ast::attribute] {
let mut attrs: ~[ast::attribute] = ~[]; let mut attrs: ~[ast::attribute] = ~[];
loop { loop {
match copy self.token { match *self.token {
token::POUND => { token::POUND => {
if self.look_ahead(1u) != token::LBRACKET { if self.look_ahead(1u) != token::LBRACKET {
break; break;
@ -90,14 +90,14 @@ impl parser_attr for Parser {
let mut inner_attrs: ~[ast::attribute] = ~[]; let mut inner_attrs: ~[ast::attribute] = ~[];
let mut next_outer_attrs: ~[ast::attribute] = ~[]; let mut next_outer_attrs: ~[ast::attribute] = ~[];
loop { loop {
match copy self.token { match *self.token {
token::POUND => { token::POUND => {
if self.look_ahead(1u) != token::LBRACKET { if self.look_ahead(1u) != token::LBRACKET {
// This is an extension // This is an extension
break; break;
} }
let attr = self.parse_attribute(ast::attr_inner); let attr = self.parse_attribute(ast::attr_inner);
if self.token == token::SEMI { if *self.token == token::SEMI {
self.bump(); self.bump();
inner_attrs += ~[attr]; inner_attrs += ~[attr];
} else { } else {
@ -131,7 +131,7 @@ impl parser_attr for Parser {
fn parse_meta_item() -> @ast::meta_item { fn parse_meta_item() -> @ast::meta_item {
let lo = self.span.lo; let lo = self.span.lo;
let name = self.id_to_str(self.parse_ident()); let name = self.id_to_str(self.parse_ident());
match self.token { match *self.token {
token::EQ => { token::EQ => {
self.bump(); self.bump();
let lit = self.parse_lit(); let lit = self.parse_lit();
@ -157,7 +157,7 @@ impl parser_attr for Parser {
} }
fn parse_optional_meta() -> ~[@ast::meta_item] { fn parse_optional_meta() -> ~[@ast::meta_item] {
match self.token { match *self.token {
token::LPAREN => return self.parse_meta_seq(), token::LPAREN => return self.parse_meta_seq(),
_ => return ~[] _ => return ~[]
} }

View file

@ -53,25 +53,25 @@ pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
pub impl Parser { pub impl Parser {
fn unexpected_last(t: token::Token) -> ! { fn unexpected_last(t: token::Token) -> ! {
self.span_fatal( self.span_fatal(
copy self.last_span, *self.last_span,
~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
} }
fn unexpected() -> ! { fn unexpected() -> ! {
self.fatal(~"unexpected token: `" self.fatal(~"unexpected token: `"
+ token_to_str(self.reader, self.token) + ~"`"); + token_to_str(self.reader, *self.token) + ~"`");
} }
// expect and consume the token t. Signal an error if // expect and consume the token t. Signal an error if
// the next token is not t. // the next token is not t.
fn expect(t: token::Token) { fn expect(t: token::Token) {
if self.token == t { if *self.token == t {
self.bump(); self.bump();
} else { } else {
let mut s: ~str = ~"expected `"; let mut s: ~str = ~"expected `";
s += token_to_str(self.reader, t); s += token_to_str(self.reader, t);
s += ~"` but found `"; s += ~"` but found `";
s += token_to_str(self.reader, self.token); s += token_to_str(self.reader, *self.token);
self.fatal(s + ~"`"); self.fatal(s + ~"`");
} }
} }
@ -79,12 +79,12 @@ pub impl Parser {
fn parse_ident() -> ast::ident { fn parse_ident() -> ast::ident {
self.check_strict_keywords(); self.check_strict_keywords();
self.check_reserved_keywords(); self.check_reserved_keywords();
match copy self.token { match *self.token {
token::IDENT(i, _) => { self.bump(); return i; } token::IDENT(i, _) => { self.bump(); return i; }
token::INTERPOLATED(token::nt_ident(*)) => { self.bug( token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
~"ident interpolation not converted to real token"); } ~"ident interpolation not converted to real token"); }
_ => { self.fatal(~"expected ident, found `" _ => { self.fatal(~"expected ident, found `"
+ token_to_str(self.reader, self.token) + token_to_str(self.reader, *self.token)
+ ~"`"); } + ~"`"); }
} }
} }
@ -104,7 +104,7 @@ pub impl Parser {
// consume token 'tok' if it exists. Returns true if the given // consume token 'tok' if it exists. Returns true if the given
// token was present, false otherwise. // token was present, false otherwise.
fn eat(tok: token::Token) -> bool { fn eat(tok: token::Token) -> bool {
return if self.token == tok { self.bump(); true } else { false }; return if *self.token == tok { self.bump(); true } else { false };
} }
// Storing keywords as interned idents instead of strings would be nifty. // Storing keywords as interned idents instead of strings would be nifty.
@ -129,7 +129,7 @@ pub impl Parser {
} }
fn is_keyword(word: ~str) -> bool { fn is_keyword(word: ~str) -> bool {
self.token_is_keyword(word, self.token) self.token_is_keyword(word, *self.token)
} }
fn is_any_keyword(tok: token::Token) -> bool { fn is_any_keyword(tok: token::Token) -> bool {
@ -143,7 +143,7 @@ pub impl Parser {
fn eat_keyword(word: ~str) -> bool { fn eat_keyword(word: ~str) -> bool {
self.require_keyword(word); self.require_keyword(word);
let is_kw = match self.token { let is_kw = match *self.token {
token::IDENT(sid, false) => (word == *self.id_to_str(sid)), token::IDENT(sid, false) => (word == *self.id_to_str(sid)),
_ => false _ => false
}; };
@ -155,7 +155,7 @@ pub impl Parser {
self.require_keyword(word); self.require_keyword(word);
if !self.eat_keyword(word) { if !self.eat_keyword(word) {
self.fatal(~"expected `" + word + ~"`, found `" + self.fatal(~"expected `" + word + ~"`, found `" +
token_to_str(self.reader, self.token) + token_to_str(self.reader, *self.token) +
~"`"); ~"`");
} }
} }
@ -165,9 +165,9 @@ pub impl Parser {
} }
fn check_strict_keywords() { fn check_strict_keywords() {
match self.token { match *self.token {
token::IDENT(_, false) => { token::IDENT(_, false) => {
let w = token_to_str(self.reader, self.token); let w = token_to_str(self.reader, *self.token);
self.check_strict_keywords_(w); self.check_strict_keywords_(w);
} }
_ => () _ => ()
@ -185,9 +185,9 @@ pub impl Parser {
} }
fn check_reserved_keywords() { fn check_reserved_keywords() {
match self.token { match *self.token {
token::IDENT(_, false) => { token::IDENT(_, false) => {
let w = token_to_str(self.reader, self.token); let w = token_to_str(self.reader, *self.token);
self.check_reserved_keywords_(w); self.check_reserved_keywords_(w);
} }
_ => () _ => ()
@ -203,9 +203,9 @@ pub impl Parser {
// expect and consume a GT. if a >> is seen, replace it // expect and consume a GT. if a >> is seen, replace it
// with a single > and continue. // with a single > and continue.
fn expect_gt() { fn expect_gt() {
if self.token == token::GT { if *self.token == token::GT {
self.bump(); self.bump();
} else if self.token == token::BINOP(token::SHR) { } else if *self.token == token::BINOP(token::SHR) {
self.replace_token(token::GT, self.replace_token(token::GT,
self.span.lo + BytePos(1u), self.span.lo + BytePos(1u),
self.span.hi); self.span.hi);
@ -213,7 +213,7 @@ pub impl Parser {
let mut s: ~str = ~"expected `"; let mut s: ~str = ~"expected `";
s += token_to_str(self.reader, token::GT); s += token_to_str(self.reader, token::GT);
s += ~"`, found `"; s += ~"`, found `";
s += token_to_str(self.reader, self.token); s += token_to_str(self.reader, *self.token);
s += ~"`"; s += ~"`";
self.fatal(s); self.fatal(s);
} }
@ -225,8 +225,8 @@ pub impl Parser {
f: fn(Parser) -> T) -> ~[T] { f: fn(Parser) -> T) -> ~[T] {
let mut first = true; let mut first = true;
let mut v = ~[]; let mut v = ~[];
while self.token != token::GT while *self.token != token::GT
&& self.token != token::BINOP(token::SHR) { && *self.token != token::BINOP(token::SHR) {
match sep { match sep {
Some(ref t) => { Some(ref t) => {
if first { first = false; } if first { first = false; }
@ -276,7 +276,7 @@ pub impl Parser {
f: fn(Parser) -> T) -> ~[T] { f: fn(Parser) -> T) -> ~[T] {
let mut first: bool = true; let mut first: bool = true;
let mut v: ~[T] = ~[]; let mut v: ~[T] = ~[];
while self.token != ket { while *self.token != ket {
match sep.sep { match sep.sep {
Some(ref t) => { Some(ref t) => {
if first { first = false; } if first { first = false; }
@ -284,7 +284,7 @@ pub impl Parser {
} }
_ => () _ => ()
} }
if sep.trailing_sep_allowed && self.token == ket { break; } if sep.trailing_sep_allowed && *self.token == ket { break; }
v.push(f(self)); v.push(f(self));
} }
return v; return v;
@ -293,8 +293,8 @@ pub impl Parser {
// parse a sequence, including the closing delimiter. The function // parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or // f must consume tokens until reaching the next separator or
// closing bracket. // closing bracket.
fn parse_unspanned_seq<T:Copy>(bra: token::Token, fn parse_unspanned_seq<T:Copy>(+bra: token::Token,
ket: token::Token, +ket: token::Token,
sep: SeqSep, sep: SeqSep,
f: fn(Parser) -> T) -> ~[T] { f: fn(Parser) -> T) -> ~[T] {
self.expect(bra); self.expect(bra);

View file

@ -141,7 +141,7 @@ pub fn parse_tts_from_source_str(name: ~str,
sess: @mut ParseSess) -> ~[ast::token_tree] { sess: @mut ParseSess) -> ~[ast::token_tree] {
let p = new_parser_from_source_str(sess, cfg, name, let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source); codemap::FssNone, source);
p.quote_depth += 1u; *p.quote_depth += 1u;
let r = p.parse_all_token_trees(); let r = p.parse_all_token_trees();
p.abort_if_errors(); p.abort_if_errors();
return r; return r;

View file

@ -158,7 +158,7 @@ pub impl Parser {
} }
fn is_obsolete_ident(ident: &str) -> bool { fn is_obsolete_ident(ident: &str) -> bool {
self.token_is_obsolete_ident(ident, copy self.token) self.token_is_obsolete_ident(ident, *self.token)
} }
fn eat_obsolete_ident(ident: &str) -> bool { fn eat_obsolete_ident(ident: &str) -> bool {
@ -172,7 +172,7 @@ pub impl Parser {
fn try_parse_obsolete_struct_ctor() -> bool { fn try_parse_obsolete_struct_ctor() -> bool {
if self.eat_obsolete_ident("new") { if self.eat_obsolete_ident("new") {
self.obsolete(copy self.last_span, ObsoleteStructCtor); self.obsolete(*self.last_span, ObsoleteStructCtor);
self.parse_fn_decl(|p| p.parse_arg()); self.parse_fn_decl(|p| p.parse_arg());
self.parse_block(); self.parse_block();
true true
@ -182,13 +182,13 @@ pub impl Parser {
} }
fn try_parse_obsolete_with() -> bool { fn try_parse_obsolete_with() -> bool {
if self.token == token::COMMA if *self.token == token::COMMA
&& self.token_is_obsolete_ident("with", && self.token_is_obsolete_ident("with",
self.look_ahead(1u)) { self.look_ahead(1u)) {
self.bump(); self.bump();
} }
if self.eat_obsolete_ident("with") { if self.eat_obsolete_ident("with") {
self.obsolete(copy self.last_span, ObsoleteWith); self.obsolete(*self.last_span, ObsoleteWith);
self.parse_expr(); self.parse_expr();
true true
} else { } else {
@ -198,10 +198,10 @@ pub impl Parser {
fn try_parse_obsolete_priv_section() -> bool { fn try_parse_obsolete_priv_section() -> bool {
if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE { if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE {
self.obsolete(copy self.span, ObsoletePrivSection); self.obsolete(*self.span, ObsoletePrivSection);
self.eat_keyword(~"priv"); self.eat_keyword(~"priv");
self.bump(); self.bump();
while self.token != token::RBRACE { while *self.token != token::RBRACE {
self.parse_single_class_item(ast::private); self.parse_single_class_item(ast::private);
} }
self.bump(); self.bump();

File diff suppressed because it is too large Load diff