auto merge of #4922 : jbclements/rust/add-deriving-eq-to-asts, r=catamorphism
r? Apply deriving_eq to the data structures in ast.rs, and get rid of the custom definitions of eq that were everywhere. resulting ast.rs is about 400 lines shorter. Also: add a few test cases and a bunch of comments. Also: change ast_ty_to_ty_cache to use node ids rather than ast::ty's. I believe this was a suggestion related to my changes, and it appears to pass all tests. Also: tiny doc fix, remove references to crate keywords.
This commit is contained in:
commit
6efa3543a8
10 changed files with 242 additions and 556 deletions
|
@ -202,13 +202,7 @@ grammar as double-quoted strings. Other tokens have exact rules given.
|
||||||
|
|
||||||
### Keywords
|
### Keywords
|
||||||
|
|
||||||
The keywords in [crate files](#crate-files) are the following strings:
|
The keywords are the following strings:
|
||||||
|
|
||||||
~~~~~~~~ {.keyword}
|
|
||||||
mod priv pub use
|
|
||||||
~~~~~~~~
|
|
||||||
|
|
||||||
The keywords in [source files](#source-files) are the following strings:
|
|
||||||
|
|
||||||
~~~~~~~~ {.keyword}
|
~~~~~~~~ {.keyword}
|
||||||
as assert
|
as assert
|
||||||
|
|
|
@ -262,7 +262,7 @@ struct ctxt_ {
|
||||||
needs_drop_cache: HashMap<t, bool>,
|
needs_drop_cache: HashMap<t, bool>,
|
||||||
needs_unwind_cleanup_cache: HashMap<t, bool>,
|
needs_unwind_cleanup_cache: HashMap<t, bool>,
|
||||||
mut tc_cache: LinearMap<uint, TypeContents>,
|
mut tc_cache: LinearMap<uint, TypeContents>,
|
||||||
ast_ty_to_ty_cache: HashMap<@ast::Ty, ast_ty_to_ty_cache_entry>,
|
ast_ty_to_ty_cache: HashMap<node_id, ast_ty_to_ty_cache_entry>,
|
||||||
enum_var_cache: HashMap<def_id, @~[VariantInfo]>,
|
enum_var_cache: HashMap<def_id, @~[VariantInfo]>,
|
||||||
trait_method_cache: HashMap<def_id, @~[method]>,
|
trait_method_cache: HashMap<def_id, @~[method]>,
|
||||||
ty_param_bounds: HashMap<ast::node_id, param_bounds>,
|
ty_param_bounds: HashMap<ast::node_id, param_bounds>,
|
||||||
|
|
|
@ -277,7 +277,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||||
|
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
|
|
||||||
match tcx.ast_ty_to_ty_cache.find(&ast_ty) {
|
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
|
||||||
Some(ty::atttce_resolved(ty)) => return ty,
|
Some(ty::atttce_resolved(ty)) => return ty,
|
||||||
Some(ty::atttce_unresolved) => {
|
Some(ty::atttce_unresolved) => {
|
||||||
tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \
|
tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \
|
||||||
|
@ -287,7 +287,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||||
None => { /* go on */ }
|
None => { /* go on */ }
|
||||||
}
|
}
|
||||||
|
|
||||||
tcx.ast_ty_to_ty_cache.insert(ast_ty, ty::atttce_unresolved);
|
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
|
||||||
let typ = match /*bad*/copy ast_ty.node {
|
let typ = match /*bad*/copy ast_ty.node {
|
||||||
ast::ty_nil => ty::mk_nil(tcx),
|
ast::ty_nil => ty::mk_nil(tcx),
|
||||||
ast::ty_bot => ty::mk_bot(tcx),
|
ast::ty_bot => ty::mk_bot(tcx),
|
||||||
|
@ -409,7 +409,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
tcx.ast_ty_to_ty_cache.insert(ast_ty, ty::atttce_resolved(typ));
|
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_resolved(typ));
|
||||||
return typ;
|
return typ;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -130,6 +130,7 @@ pub struct span {
|
||||||
|
|
||||||
#[auto_encode]
|
#[auto_encode]
|
||||||
#[auto_decode]
|
#[auto_decode]
|
||||||
|
#[deriving_eq]
|
||||||
pub struct spanned<T> { node: T, span: span }
|
pub struct spanned<T> { node: T, span: span }
|
||||||
|
|
||||||
pub impl span : cmp::Eq {
|
pub impl span : cmp::Eq {
|
||||||
|
|
|
@ -44,8 +44,8 @@ pub struct SyntaxExpanderTT {
|
||||||
span: Option<span>
|
span: Option<span>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SyntaxExpanderTTFun = fn@(ext_ctxt, span, ~[ast::token_tree])
|
pub type SyntaxExpanderTTFun
|
||||||
-> MacResult;
|
= fn@(ext_ctxt, span, ~[ast::token_tree]) -> MacResult;
|
||||||
|
|
||||||
pub struct SyntaxExpanderTTItem {
|
pub struct SyntaxExpanderTTItem {
|
||||||
expander: SyntaxExpanderTTItemFun,
|
expander: SyntaxExpanderTTItemFun,
|
||||||
|
@ -78,9 +78,11 @@ pub enum SyntaxExtension {
|
||||||
// A temporary hard-coded map of methods for expanding syntax extension
|
// A temporary hard-coded map of methods for expanding syntax extension
|
||||||
// AST nodes into full ASTs
|
// AST nodes into full ASTs
|
||||||
pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
|
pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
|
||||||
|
// utility function to simplify creating NormalTT syntax extensions
|
||||||
fn builtin_normal_tt(f: SyntaxExpanderTTFun) -> SyntaxExtension {
|
fn builtin_normal_tt(f: SyntaxExpanderTTFun) -> SyntaxExtension {
|
||||||
NormalTT(SyntaxExpanderTT{expander: f, span: None})
|
NormalTT(SyntaxExpanderTT{expander: f, span: None})
|
||||||
}
|
}
|
||||||
|
// utility function to simplify creating ItemTT syntax extensions
|
||||||
fn builtin_item_tt(f: SyntaxExpanderTTItemFun) -> SyntaxExtension {
|
fn builtin_item_tt(f: SyntaxExpanderTTItemFun) -> SyntaxExtension {
|
||||||
ItemTT(SyntaxExpanderTTItem{expander: f, span: None})
|
ItemTT(SyntaxExpanderTTItem{expander: f, span: None})
|
||||||
}
|
}
|
||||||
|
@ -112,8 +114,8 @@ pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
|
||||||
ext::deriving::expand_deriving_iter_bytes));
|
ext::deriving::expand_deriving_iter_bytes));
|
||||||
|
|
||||||
// Quasi-quoting expanders
|
// Quasi-quoting expanders
|
||||||
syntax_expanders.insert(
|
syntax_expanders.insert(~"quote_tokens",
|
||||||
~"quote_tokens", builtin_normal_tt(ext::quote::expand_quote_tokens));
|
builtin_normal_tt(ext::quote::expand_quote_tokens));
|
||||||
syntax_expanders.insert(~"quote_expr",
|
syntax_expanders.insert(~"quote_expr",
|
||||||
builtin_normal_tt(ext::quote::expand_quote_expr));
|
builtin_normal_tt(ext::quote::expand_quote_expr));
|
||||||
syntax_expanders.insert(~"quote_ty",
|
syntax_expanders.insert(~"quote_ty",
|
||||||
|
|
|
@ -20,6 +20,8 @@ use core::option::{None, Option, Some};
|
||||||
use core::option;
|
use core::option;
|
||||||
use std::oldmap::HashMap;
|
use std::oldmap::HashMap;
|
||||||
|
|
||||||
|
// seq_sep : a sequence separator (token)
|
||||||
|
// and whether a trailing separator is allowed.
|
||||||
pub type seq_sep = {
|
pub type seq_sep = {
|
||||||
sep: Option<token::Token>,
|
sep: Option<token::Token>,
|
||||||
trailing_sep_allowed: bool
|
trailing_sep_allowed: bool
|
||||||
|
@ -51,6 +53,8 @@ pub impl Parser {
|
||||||
+ token_to_str(self.reader, self.token) + ~"`");
|
+ token_to_str(self.reader, self.token) + ~"`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// expect and consume the token t. Signal an error if
|
||||||
|
// the next token is not t.
|
||||||
fn expect(t: token::Token) {
|
fn expect(t: token::Token) {
|
||||||
if self.token == t {
|
if self.token == t {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -88,6 +92,8 @@ pub impl Parser {
|
||||||
return self.parse_ident();
|
return self.parse_ident();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// consume token 'tok' if it exists. Returns true if the given
|
||||||
|
// token was present, false otherwise.
|
||||||
fn eat(tok: token::Token) -> bool {
|
fn eat(tok: token::Token) -> bool {
|
||||||
return if self.token == tok { self.bump(); true } else { false };
|
return if self.token == tok { self.bump(); true } else { false };
|
||||||
}
|
}
|
||||||
|
@ -185,6 +191,8 @@ pub impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// expect and consume a GT. if a >> is seen, replace it
|
||||||
|
// with a single > and continue.
|
||||||
fn expect_gt() {
|
fn expect_gt() {
|
||||||
if self.token == token::GT {
|
if self.token == token::GT {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -202,6 +210,8 @@ pub impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a sequence bracketed by '<' and '>', stopping
|
||||||
|
// before the '>'.
|
||||||
fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
|
fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
|
||||||
f: fn(Parser) -> T) -> ~[T] {
|
f: fn(Parser) -> T) -> ~[T] {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
@ -211,7 +221,7 @@ pub impl Parser {
|
||||||
match sep {
|
match sep {
|
||||||
Some(ref t) => {
|
Some(ref t) => {
|
||||||
if first { first = false; }
|
if first { first = false; }
|
||||||
else { self.expect((*t)); }
|
else { self.expect(*t); }
|
||||||
}
|
}
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
|
@ -229,6 +239,7 @@ pub impl Parser {
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a sequence bracketed by '<' and '>'
|
||||||
fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
|
fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
|
||||||
f: fn(Parser) -> T) -> spanned<~[T]> {
|
f: fn(Parser) -> T) -> spanned<~[T]> {
|
||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
|
@ -239,6 +250,9 @@ pub impl Parser {
|
||||||
return spanned(lo, hi, result);
|
return spanned(lo, hi, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a sequence, including the closing delimiter. The function
|
||||||
|
// f must consume tokens until reaching the next separator or
|
||||||
|
// closing bracket.
|
||||||
fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
|
fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
|
||||||
f: fn(Parser) -> T) -> ~[T] {
|
f: fn(Parser) -> T) -> ~[T] {
|
||||||
let val = self.parse_seq_to_before_end(ket, sep, f);
|
let val = self.parse_seq_to_before_end(ket, sep, f);
|
||||||
|
@ -246,7 +260,9 @@ pub impl Parser {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a sequence, not including the closing delimiter. The function
|
||||||
|
// f must consume tokens until reaching the next separator or
|
||||||
|
// closing bracket.
|
||||||
fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
|
fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
|
||||||
f: fn(Parser) -> T) -> ~[T] {
|
f: fn(Parser) -> T) -> ~[T] {
|
||||||
let mut first: bool = true;
|
let mut first: bool = true;
|
||||||
|
@ -255,7 +271,7 @@ pub impl Parser {
|
||||||
match sep.sep {
|
match sep.sep {
|
||||||
Some(ref t) => {
|
Some(ref t) => {
|
||||||
if first { first = false; }
|
if first { first = false; }
|
||||||
else { self.expect((*t)); }
|
else { self.expect(*t); }
|
||||||
}
|
}
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
|
@ -265,6 +281,9 @@ pub impl Parser {
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a sequence, including the closing delimiter. The function
|
||||||
|
// f must consume tokens until reaching the next separator or
|
||||||
|
// closing bracket.
|
||||||
fn parse_unspanned_seq<T: Copy>(bra: token::Token,
|
fn parse_unspanned_seq<T: Copy>(bra: token::Token,
|
||||||
ket: token::Token,
|
ket: token::Token,
|
||||||
sep: seq_sep,
|
sep: seq_sep,
|
||||||
|
|
|
@ -183,7 +183,6 @@ pub fn new_parser_from_file(sess: parse_sess,
|
||||||
let srdr = lexer::new_string_reader(sess.span_diagnostic,
|
let srdr = lexer::new_string_reader(sess.span_diagnostic,
|
||||||
filemap,
|
filemap,
|
||||||
sess.interner);
|
sess.interner);
|
||||||
|
|
||||||
Ok(Parser(sess, cfg, srdr as reader))
|
Ok(Parser(sess, cfg, srdr as reader))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -222,3 +221,58 @@ pub fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg,
|
||||||
return Parser(sess, cfg, trdr as reader)
|
return Parser(sess, cfg, trdr as reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use std::serialize::Encodable;
|
||||||
|
use std;
|
||||||
|
use core::dvec;
|
||||||
|
use core::str;
|
||||||
|
use util::testing::*;
|
||||||
|
|
||||||
|
#[test] fn to_json_str (val: Encodable<std::json::Encoder>) -> ~str {
|
||||||
|
let bw = @io::BytesWriter {bytes: dvec::DVec(), pos: 0};
|
||||||
|
val.encode(~std::json::Encoder(bw as io::Writer));
|
||||||
|
str::from_bytes(bw.bytes.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] fn alltts () {
|
||||||
|
let tts = parse_tts_from_source_str(
|
||||||
|
~"bogofile",
|
||||||
|
@~"fn foo (x : int) { x; }",
|
||||||
|
~[],
|
||||||
|
new_parse_sess(None));
|
||||||
|
check_equal(to_json_str(tts as Encodable::<std::json::Encoder>),
|
||||||
|
~"[[\"tt_tok\",[,[\"IDENT\",[\"fn\",false]]]],\
|
||||||
|
[\"tt_tok\",[,[\"IDENT\",[\"foo\",false]]]],\
|
||||||
|
[\"tt_delim\",[[[\"tt_tok\",[,[\"LPAREN\",[]]]],\
|
||||||
|
[\"tt_tok\",[,[\"IDENT\",[\"x\",false]]]],\
|
||||||
|
[\"tt_tok\",[,[\"COLON\",[]]]],\
|
||||||
|
[\"tt_tok\",[,[\"IDENT\",[\"int\",false]]]],\
|
||||||
|
[\"tt_tok\",[,[\"RPAREN\",[]]]]]]],\
|
||||||
|
[\"tt_delim\",[[[\"tt_tok\",[,[\"LBRACE\",[]]]],\
|
||||||
|
[\"tt_tok\",[,[\"IDENT\",[\"x\",false]]]],\
|
||||||
|
[\"tt_tok\",[,[\"SEMI\",[]]]],\
|
||||||
|
[\"tt_tok\",[,[\"RBRACE\",[]]]]]]]]"
|
||||||
|
);
|
||||||
|
let ast1 = new_parser_from_tts(new_parse_sess(None),~[],tts)
|
||||||
|
.parse_item(~[]);
|
||||||
|
let ast2 = parse_item_from_source_str(
|
||||||
|
~"bogofile",
|
||||||
|
@~"fn foo (x : int) { x; }",
|
||||||
|
~[],~[],
|
||||||
|
new_parse_sess(None));
|
||||||
|
check_equal(ast1,ast2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Local Variables:
|
||||||
|
// mode: rust
|
||||||
|
// fill-column: 78;
|
||||||
|
// indent-tabs-mode: nil
|
||||||
|
// c-basic-offset: 4
|
||||||
|
// buffer-file-coding-system: utf-8-unix
|
||||||
|
// End:
|
||||||
|
//
|
||||||
|
|
|
@ -182,7 +182,8 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
|
||||||
|
|
||||||
/* ident is handled by common.rs */
|
/* ident is handled by common.rs */
|
||||||
|
|
||||||
pub fn Parser(sess: parse_sess,
|
pub fn Parser(sess: parse_sess
|
||||||
|
,
|
||||||
cfg: ast::crate_cfg,
|
cfg: ast::crate_cfg,
|
||||||
+rdr: reader) -> Parser {
|
+rdr: reader) -> Parser {
|
||||||
|
|
||||||
|
@ -1238,6 +1239,8 @@ pub impl Parser {
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse an optional separator followed by a kleene-style
|
||||||
|
// repetition token (+ or *).
|
||||||
fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
|
fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
|
||||||
if self.token == token::BINOP(token::STAR)
|
if self.token == token::BINOP(token::STAR)
|
||||||
|| self.token == token::BINOP(token::PLUS) {
|
|| self.token == token::BINOP(token::PLUS) {
|
||||||
|
@ -1258,20 +1261,18 @@ pub impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse a single token tree from the input.
|
||||||
fn parse_token_tree() -> token_tree {
|
fn parse_token_tree() -> token_tree {
|
||||||
maybe_whole!(deref self, nt_tt);
|
maybe_whole!(deref self, nt_tt);
|
||||||
|
|
||||||
fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
|
fn parse_non_delim_tt_tok(p: Parser) -> token_tree {
|
||||||
maybe_whole!(deref p, nt_tt);
|
maybe_whole!(deref p, nt_tt);
|
||||||
match p.token {
|
match p.token {
|
||||||
token::RPAREN | token::RBRACE | token::RBRACKET
|
token::RPAREN | token::RBRACE | token::RBRACKET
|
||||||
if !delim_ok => {
|
=> {
|
||||||
p.fatal(~"incorrect close delimiter: `"
|
p.fatal(~"incorrect close delimiter: `"
|
||||||
+ token_to_str(p.reader, p.token) + ~"`");
|
+ token_to_str(p.reader, p.token) + ~"`");
|
||||||
}
|
}
|
||||||
token::EOF => {
|
|
||||||
p.fatal(~"file ended in the middle of a macro invocation");
|
|
||||||
}
|
|
||||||
/* we ought to allow different depths of unquotation */
|
/* we ought to allow different depths of unquotation */
|
||||||
token::DOLLAR if p.quote_depth > 0u => {
|
token::DOLLAR if p.quote_depth > 0u => {
|
||||||
p.bump();
|
p.bump();
|
||||||
|
@ -1282,32 +1283,43 @@ pub impl Parser {
|
||||||
seq_sep_none(),
|
seq_sep_none(),
|
||||||
|p| p.parse_token_tree());
|
|p| p.parse_token_tree());
|
||||||
let (s, z) = p.parse_sep_and_zerok();
|
let (s, z) = p.parse_sep_and_zerok();
|
||||||
return tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z);
|
tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z)
|
||||||
} else {
|
} else {
|
||||||
return tt_nonterminal(sp, p.parse_ident());
|
tt_nonterminal(sp, p.parse_ident())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => { /* ok */ }
|
_ => {
|
||||||
|
parse_any_tt_tok(p)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let res = tt_tok(p.span, p.token);
|
|
||||||
p.bump();
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return match self.token {
|
// turn the next token into a tt_tok:
|
||||||
|
fn parse_any_tt_tok(p: Parser) -> token_tree{
|
||||||
|
let res = tt_tok(p.span, p.token);
|
||||||
|
p.bump();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.token {
|
||||||
|
token::EOF => {
|
||||||
|
self.fatal(~"file ended in the middle of a macro invocation");
|
||||||
|
}
|
||||||
token::LPAREN | token::LBRACE | token::LBRACKET => {
|
token::LPAREN | token::LBRACE | token::LBRACKET => {
|
||||||
// tjc: ??????
|
// tjc: ??????
|
||||||
let ket = token::flip_delimiter(copy self.token);
|
let ket = token::flip_delimiter(copy self.token);
|
||||||
tt_delim(vec::append(
|
tt_delim(vec::append(
|
||||||
~[parse_tt_tok(self, true)],
|
// the open delimiter:
|
||||||
|
~[parse_any_tt_tok(self)],
|
||||||
vec::append(
|
vec::append(
|
||||||
self.parse_seq_to_before_end(
|
self.parse_seq_to_before_end(
|
||||||
ket, seq_sep_none(),
|
ket, seq_sep_none(),
|
||||||
|p| p.parse_token_tree()),
|
|p| p.parse_token_tree()),
|
||||||
~[parse_tt_tok(self, true)])))
|
// the close delimiter:
|
||||||
|
~[parse_any_tt_tok(self)])))
|
||||||
|
}
|
||||||
|
_ => parse_non_delim_tt_tok(self)
|
||||||
}
|
}
|
||||||
_ => parse_tt_tok(self, false)
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_all_token_trees() -> ~[token_tree] {
|
fn parse_all_token_trees() -> ~[token_tree] {
|
||||||
|
@ -3999,6 +4011,7 @@ pub impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// Local Variables:
|
// Local Variables:
|
||||||
// mode: rust
|
// mode: rust
|
||||||
|
|
|
@ -25,6 +25,7 @@ use std::oldmap::HashMap;
|
||||||
|
|
||||||
#[auto_encode]
|
#[auto_encode]
|
||||||
#[auto_decode]
|
#[auto_decode]
|
||||||
|
#[deriving_eq]
|
||||||
pub enum binop {
|
pub enum binop {
|
||||||
PLUS,
|
PLUS,
|
||||||
MINUS,
|
MINUS,
|
||||||
|
@ -86,6 +87,7 @@ pub enum Token {
|
||||||
LIT_STR(ast::ident),
|
LIT_STR(ast::ident),
|
||||||
|
|
||||||
/* Name components */
|
/* Name components */
|
||||||
|
// an identifier contains an "is_mod_name" boolean.
|
||||||
IDENT(ast::ident, bool),
|
IDENT(ast::ident, bool),
|
||||||
UNDERSCORE,
|
UNDERSCORE,
|
||||||
LIFETIME(ast::ident),
|
LIFETIME(ast::ident),
|
||||||
|
@ -517,12 +519,6 @@ pub fn reserved_keyword_table() -> HashMap<~str, ()> {
|
||||||
words
|
words
|
||||||
}
|
}
|
||||||
|
|
||||||
impl binop : cmp::Eq {
|
|
||||||
pure fn eq(&self, other: &binop) -> bool {
|
|
||||||
((*self) as uint) == ((*other) as uint)
|
|
||||||
}
|
|
||||||
pure fn ne(&self, other: &binop) -> bool { !(*self).eq(other) }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Token : cmp::Eq {
|
impl Token : cmp::Eq {
|
||||||
pure fn eq(&self, other: &Token) -> bool {
|
pure fn eq(&self, other: &Token) -> bool {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue