auto merge of #4922 : jbclements/rust/add-deriving-eq-to-asts, r=catamorphism

r?

Apply deriving_eq to the data structures in ast.rs, and get rid of the custom definitions of eq that were everywhere. resulting ast.rs is about 400 lines shorter.

Also: add a few test cases and a bunch of comments.

Also: change ast_ty_to_ty_cache to use node ids rather than ast::ty's. I believe this was a suggestion related to my changes, and it appears to pass all tests.

Also: tiny doc fix, remove references to crate keywords.
This commit is contained in:
bors 2013-02-13 18:49:29 -08:00
commit 6efa3543a8
10 changed files with 242 additions and 556 deletions

View file

@ -182,7 +182,8 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
/* ident is handled by common.rs */
pub fn Parser(sess: parse_sess,
pub fn Parser(sess: parse_sess
,
cfg: ast::crate_cfg,
+rdr: reader) -> Parser {
@ -1238,6 +1239,8 @@ pub impl Parser {
return e;
}
// parse an optional separator followed by a kleene-style
// repetition token (+ or *).
fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
if self.token == token::BINOP(token::STAR)
|| self.token == token::BINOP(token::PLUS) {
@ -1258,20 +1261,18 @@ pub impl Parser {
}
}
// parse a single token tree from the input.
fn parse_token_tree() -> token_tree {
maybe_whole!(deref self, nt_tt);
fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
fn parse_non_delim_tt_tok(p: Parser) -> token_tree {
maybe_whole!(deref p, nt_tt);
match p.token {
token::RPAREN | token::RBRACE | token::RBRACKET
if !delim_ok => {
=> {
p.fatal(~"incorrect close delimiter: `"
+ token_to_str(p.reader, p.token) + ~"`");
}
token::EOF => {
p.fatal(~"file ended in the middle of a macro invocation");
}
/* we ought to allow different depths of unquotation */
token::DOLLAR if p.quote_depth > 0u => {
p.bump();
@ -1282,32 +1283,43 @@ pub impl Parser {
seq_sep_none(),
|p| p.parse_token_tree());
let (s, z) = p.parse_sep_and_zerok();
return tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z);
tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z)
} else {
return tt_nonterminal(sp, p.parse_ident());
tt_nonterminal(sp, p.parse_ident())
}
}
_ => { /* ok */ }
_ => {
parse_any_tt_tok(p)
}
}
let res = tt_tok(p.span, p.token);
p.bump();
return res;
}
return match self.token {
// turn the next token into a tt_tok:
fn parse_any_tt_tok(p: Parser) -> token_tree{
let res = tt_tok(p.span, p.token);
p.bump();
res
}
match self.token {
token::EOF => {
self.fatal(~"file ended in the middle of a macro invocation");
}
token::LPAREN | token::LBRACE | token::LBRACKET => {
// tjc: ??????
let ket = token::flip_delimiter(copy self.token);
tt_delim(vec::append(
~[parse_tt_tok(self, true)],
// the open delimiter:
~[parse_any_tt_tok(self)],
vec::append(
self.parse_seq_to_before_end(
ket, seq_sep_none(),
|p| p.parse_token_tree()),
~[parse_tt_tok(self, true)])))
// the close delimiter:
~[parse_any_tt_tok(self)])))
}
_ => parse_tt_tok(self, false)
};
_ => parse_non_delim_tt_tok(self)
}
}
fn parse_all_token_trees() -> ~[token_tree] {
@ -3999,6 +4011,7 @@ pub impl Parser {
}
}
//
// Local Variables:
// mode: rust