1
Fork 0

libsyntax: "import" -> "use"

This commit is contained in:
Patrick Walton 2012-09-04 11:37:29 -07:00
parent 3f92cf2659
commit 8ff18acc82
44 changed files with 203 additions and 208 deletions

View file

@ -1,7 +1,7 @@
// The Rust abstract syntax tree. // The Rust abstract syntax tree.
import codemap::{span, filename}; use codemap::{span, filename};
import std::serialization::{serializer, use std::serialization::{serializer,
deserializer, deserializer,
serialize_Option, serialize_Option,
deserialize_Option, deserialize_Option,
@ -17,7 +17,7 @@ import std::serialization::{serializer,
deserialize_str, deserialize_str,
serialize_bool, serialize_bool,
deserialize_bool}; deserialize_bool};
import parse::token; use parse::token;
/* Note #1972 -- spans are serialized but not deserialized */ /* Note #1972 -- spans are serialized but not deserialized */
fn serialize_span<S>(_s: S, _v: span) { fn serialize_span<S>(_s: S, _v: span) {

View file

@ -1,10 +1,10 @@
import std::map; use std::map;
import std::map::hashmap; use std::map::hashmap;
import ast::*; use ast::*;
import print::pprust; use print::pprust;
import ast_util::{path_to_ident, stmt_id}; use ast_util::{path_to_ident, stmt_id};
import diagnostic::span_handler; use diagnostic::span_handler;
import parse::token::ident_interner; use parse::token::ident_interner;
enum path_elt { enum path_elt {
path_mod(ident), path_mod(ident),

View file

@ -1,5 +1,5 @@
import codemap::span; use codemap::span;
import ast::*; use ast::*;
pure fn spanned<T>(lo: uint, hi: uint, +t: T) -> spanned<T> { pure fn spanned<T>(lo: uint, hi: uint, +t: T) -> spanned<T> {
respan(mk_sp(lo, hi), t) respan(mk_sp(lo, hi), t)

View file

@ -1,11 +1,11 @@
// Functions dealing with attributes and meta_items // Functions dealing with attributes and meta_items
import std::map; use std::map;
import std::map::hashmap; use std::map::hashmap;
import either::Either; use either::Either;
import diagnostic::span_handler; use diagnostic::span_handler;
import ast_util::{spanned, dummy_spanned}; use ast_util::{spanned, dummy_spanned};
import parse::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
// Constructors // Constructors
export mk_name_value_item_str; export mk_name_value_item_str;

View file

@ -1,4 +1,4 @@
import dvec::DVec; use dvec::DVec;
export filename; export filename;
export filemap; export filemap;

View file

@ -1,6 +1,6 @@
import std::term; use std::term;
import io::WriterUtil; use io::WriterUtil;
import codemap::span; use codemap::span;
export emitter, emit; export emitter, emit;
export level, fatal, error, warning, note; export level, fatal, error, warning, note;

View file

@ -69,10 +69,10 @@ into the tree. This is intended to prevent us from inserting the same
node twice. node twice.
*/ */
import base::*; use base::*;
import codemap::span; use codemap::span;
import std::map; use std::map;
import std::map::hashmap; use std::map::hashmap;
export expand; export expand;

View file

@ -1,8 +1,8 @@
import std::map::hashmap; use std::map::hashmap;
import parse::parser; use parse::parser;
import diagnostic::span_handler; use diagnostic::span_handler;
import codemap::{codemap, span, expn_info, expanded_from}; use codemap::{codemap, span, expn_info, expanded_from};
import std::map::str_hash; use std::map::str_hash;
// obsolete old-style #macro code: // obsolete old-style #macro code:
// //

View file

@ -1,5 +1,5 @@
import codemap::span; use codemap::span;
import base::ext_ctxt; use base::ext_ctxt;
fn mk_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) -> fn mk_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) ->
@ast::expr { @ast::expr {

View file

@ -1,4 +1,4 @@
import base::*; use base::*;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr { _body: ast::mac_body) -> @ast::expr {

View file

@ -4,8 +4,8 @@
* should all get sucked into either the compiler syntax extension plugin * should all get sucked into either the compiler syntax extension plugin
* interface. * interface.
*/ */
import base::*; use base::*;
import build::mk_uniq_str; use build::mk_uniq_str;
export expand_syntax_ext; export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,

View file

@ -1,14 +1,14 @@
import std::map::hashmap; use std::map::hashmap;
import ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt, use ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
tt_delim, tt_tok, item_mac}; tt_delim, tt_tok, item_mac};
import fold::*; use fold::*;
import ext::base::*; use ext::base::*;
import ext::qquote::{qq_helper}; use ext::qquote::{qq_helper};
import parse::{parser, parse_expr_from_source_str, new_parser_from_tt}; use parse::{parser, parse_expr_from_source_str, new_parser_from_tt};
import codemap::{span, expanded_from}; use codemap::{span, expanded_from};
fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
e: expr_, s: span, fld: ast_fold, e: expr_, s: span, fld: ast_fold,

View file

@ -5,10 +5,10 @@
* should all get sucked into either the standard library extfmt module or the * should all get sucked into either the standard library extfmt module or the
* compiler syntax extension plugin interface. * compiler syntax extension plugin interface.
*/ */
import extfmt::ct::*; use extfmt::ct::*;
import base::*; use base::*;
import codemap::span; use codemap::span;
import ext::build::*; use ext::build::*;
export expand_syntax_ext; export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,

View file

@ -1,6 +1,5 @@
import base::*; use base::*;
import build::mk_uniq_str; use build::mk_uniq_str;
import option;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr { _body: ast::mac_body) -> @ast::expr {

View file

@ -1,5 +1,5 @@
import base::*; use base::*;
import io::WriterUtil; use io::WriterUtil;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, tt: ~[ast::token_tree]) fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, tt: ~[ast::token_tree])
-> base::mac_result { -> base::mac_result {

View file

@ -33,16 +33,16 @@ FIXME (#3072) - This is still incomplete.
*/ */
import codemap::span; use codemap::span;
import ext::base::ext_ctxt; use ext::base::ext_ctxt;
import ast::tt_delim; use ast::tt_delim;
import parse::lexer::{new_tt_reader, reader}; use parse::lexer::{new_tt_reader, reader};
import parse::parser::{parser, SOURCE_FILE}; use parse::parser::{parser, SOURCE_FILE};
import parse::common::parser_common; use parse::common::parser_common;
import pipes::parse_proto::proto_parser; use pipes::parse_proto::proto_parser;
import pipes::proto::{visit, protocol}; use pipes::proto::{visit, protocol};
fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
tt: ~[ast::token_tree]) -> base::mac_result tt: ~[ast::token_tree]) -> base::mac_result

View file

@ -3,10 +3,10 @@
// To start with, it will be use dummy spans, but it might someday do // To start with, it will be use dummy spans, but it might someday do
// something smarter. // something smarter.
import ast::{ident, node_id}; use ast::{ident, node_id};
import ast_util::respan; use ast_util::respan;
import codemap::span; use codemap::span;
import ext::base::mk_ctxt; use ext::base::mk_ctxt;
// Transitional reexports so qquote can find the paths it is looking for // Transitional reexports so qquote can find the paths it is looking for
mod syntax { mod syntax {

View file

@ -19,10 +19,10 @@ that.
*/ */
import ext::base::ext_ctxt; use ext::base::ext_ctxt;
import proto::{state, protocol, next_state}; use proto::{state, protocol, next_state};
import ast_builder::empty_span; use ast_builder::empty_span;
impl ext_ctxt: proto::visitor<(), (), ()> { impl ext_ctxt: proto::visitor<(), (), ()> {
fn visit_proto(_proto: protocol, fn visit_proto(_proto: protocol,

View file

@ -27,9 +27,9 @@ updating the states using rule (2) until there are no changes.
*/ */
import std::bitv::{Bitv}; use std::bitv::{Bitv};
import ast_builder::empty_span; use ast_builder::empty_span;
fn analyze(proto: protocol, _cx: ext_ctxt) { fn analyze(proto: protocol, _cx: ext_ctxt) {
debug!("initializing colive analysis"); debug!("initializing colive analysis");

View file

@ -1,9 +1,9 @@
// Parsing pipes protocols from token trees. // Parsing pipes protocols from token trees.
import parse::parser; use parse::parser;
import parse::token; use parse::token;
import pipec::*; use pipec::*;
trait proto_parser { trait proto_parser {
fn parse_proto(id: ~str) -> protocol; fn parse_proto(id: ~str) -> protocol;

View file

@ -1,19 +1,18 @@
// A protocol compiler for Rust. // A protocol compiler for Rust.
import to_str::ToStr; use to_str::ToStr;
import dvec::DVec; use dvec::DVec;
import ast::ident; use ast::ident;
import util::interner; use util::interner;
import print::pprust; use print::pprust;
import pprust::{item_to_str, ty_to_str}; use pprust::{item_to_str, ty_to_str};
import ext::base::{mk_ctxt, ext_ctxt}; use ext::base::{mk_ctxt, ext_ctxt};
import parse; use parse::*;
import parse::*; use proto::*;
import proto::*;
import ast_builder::{append_types, path, empty_span}; use ast_builder::{append_types, path, empty_span};
// Transitional reexports so qquote can find the paths it is looking for // Transitional reexports so qquote can find the paths it is looking for
mod syntax { mod syntax {

View file

@ -1,7 +1,7 @@
import to_str::ToStr; use to_str::ToStr;
import dvec::DVec; use dvec::DVec;
import ast_builder::{path, append_types}; use ast_builder::{path, append_types};
enum direction { send, recv } enum direction { send, recv }

View file

@ -1,18 +1,18 @@
import ast::{crate, expr_, mac_invoc, use ast::{crate, expr_, mac_invoc,
mac_aq, mac_var}; mac_aq, mac_var};
import parse::parser; use parse::parser;
import parse::parser::parse_from_source_str; use parse::parser::parse_from_source_str;
import dvec::DVec; use dvec::DVec;
import parse::token::ident_interner; use parse::token::ident_interner;
import fold::*; use fold::*;
import visit::*; use visit::*;
import ext::base::*; use ext::base::*;
import ext::build::*; use ext::build::*;
import print::*; use print::*;
import io::*; use io::*;
import codemap::span; use codemap::span;
struct gather_item { struct gather_item {
lo: uint; lo: uint;

View file

@ -1,12 +1,12 @@
import codemap::span; use codemap::span;
import std::map::{hashmap, str_hash, uint_hash}; use std::map::{hashmap, str_hash, uint_hash};
import dvec::DVec; use dvec::DVec;
import base::*; use base::*;
import fold::*; use fold::*;
import ast_util::respan; use ast_util::respan;
import ast::{ident, path, ty, blk_, expr, expr_path, use ast::{ident, path, ty, blk_, expr, expr_path,
expr_vec, expr_mac, mac_invoc, node_id, expr_index}; expr_vec, expr_mac, mac_invoc, node_id, expr_index};
export add_new_extension; export add_new_extension;

View file

@ -1,8 +1,7 @@
import base::*; use base::*;
import ast; use codemap::span;
import codemap::span; use print::pprust;
import print::pprust; use build::{mk_base_vec_e,mk_uint,mk_u8,mk_uniq_str};
import build::{mk_base_vec_e,mk_uint,mk_u8,mk_uniq_str};
export expand_line; export expand_line;
export expand_col; export expand_col;

View file

@ -1,9 +1,9 @@
import codemap::span; use codemap::span;
import ext::base::ext_ctxt; use ext::base::ext_ctxt;
import ast::tt_delim; use ast::tt_delim;
import parse::lexer::{new_tt_reader, reader}; use parse::lexer::{new_tt_reader, reader};
import parse::parser::{parser, SOURCE_FILE}; use parse::parser::{parser, SOURCE_FILE};
import parse::common::parser_common; use parse::common::parser_common;
fn expand_trace_macros(cx: ext_ctxt, sp: span, fn expand_trace_macros(cx: ext_ctxt, sp: span,
tt: ~[ast::token_tree]) -> base::mac_result tt: ~[ast::token_tree]) -> base::mac_result

View file

@ -1,16 +1,16 @@
// Earley-like parser for macros. // Earley-like parser for macros.
import parse::token; use parse::token;
import parse::token::{token, EOF, to_str, nonterminal}; use parse::token::{token, EOF, to_str, nonterminal};
import parse::lexer::*; //resolve bug? use parse::lexer::*; //resolve bug?
//import parse::lexer::{reader, tt_reader, tt_reader_as_reader}; //import parse::lexer::{reader, tt_reader, tt_reader_as_reader};
import parse::parser::{parser,SOURCE_FILE}; use parse::parser::{parser,SOURCE_FILE};
//import parse::common::parser_common; //import parse::common::parser_common;
import parse::common::*; //resolve bug? use parse::common::*; //resolve bug?
import parse::parse_sess; use parse::parse_sess;
import dvec::DVec; use dvec::DVec;
import ast::{matcher, match_tok, match_seq, match_nonterminal, ident}; use ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
import ast_util::mk_sp; use ast_util::mk_sp;
import std::map::{hashmap, uint_hash}; use std::map::{hashmap, uint_hash};
/* This is an Earley-like parser, without support for in-grammar nonterminals, /* This is an Earley-like parser, without support for in-grammar nonterminals,
only by calling out to the main rust parser for named nonterminals (which it only by calling out to the main rust parser for named nonterminals (which it

View file

@ -1,14 +1,14 @@
import base::{ext_ctxt, mac_result, mr_expr, mr_def, expr_tt}; use base::{ext_ctxt, mac_result, mr_expr, mr_def, expr_tt};
import codemap::span; use codemap::span;
import ast::{ident, matcher_, matcher, match_tok, use ast::{ident, matcher_, matcher, match_tok,
match_nonterminal, match_seq, tt_delim}; match_nonterminal, match_seq, tt_delim};
import parse::lexer::{new_tt_reader, reader}; use parse::lexer::{new_tt_reader, reader};
import parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt}; use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
import parse::parser::{parser, SOURCE_FILE}; use parse::parser::{parser, SOURCE_FILE};
import macro_parser::{parse, parse_or_else, success, failure, named_match, use macro_parser::{parse, parse_or_else, success, failure, named_match,
matched_seq, matched_nonterminal, error}; matched_seq, matched_nonterminal, error};
import std::map::hashmap; use std::map::hashmap;
import parse::token::special_idents; use parse::token::special_idents;
fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
arg: ~[ast::token_tree]) -> base::mac_result { arg: ~[ast::token_tree]) -> base::mac_result {

View file

@ -1,10 +1,10 @@
import diagnostic::span_handler; use diagnostic::span_handler;
import ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident}; use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
import macro_parser::{named_match, matched_seq, matched_nonterminal}; use macro_parser::{named_match, matched_seq, matched_nonterminal};
import codemap::span; use codemap::span;
import parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident, use parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident,
ident_interner}; ident_interner};
import std::map::{hashmap, box_str_hash}; use std::map::{hashmap, box_str_hash};
export tt_reader, new_tt_reader, dup_tt_reader, tt_next_token; export tt_reader, new_tt_reader, dup_tt_reader, tt_next_token;

View file

@ -1,5 +1,5 @@
import codemap::span; use codemap::span;
import ast::*; use ast::*;
export ast_fold_precursor; export ast_fold_precursor;
export ast_fold; export ast_fold;

View file

@ -12,14 +12,14 @@ export parse_expr_from_source_str, parse_item_from_source_str;
export parse_stmt_from_source_str; export parse_stmt_from_source_str;
export parse_from_source_str; export parse_from_source_str;
import parser::parser; use parser::parser;
import attr::parser_attr; use attr::parser_attr;
import common::parser_common; use common::parser_common;
import ast::node_id; use ast::node_id;
import util::interner; use util::interner;
import diagnostic::{span_handler, mk_span_handler, mk_handler, emitter}; use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
import lexer::{reader, string_reader}; use lexer::{reader, string_reader};
import parse::token::{ident_interner, mk_ident_interner}; use parse::token::{ident_interner, mk_ident_interner};
type parse_sess = @{ type parse_sess = @{
cm: codemap::codemap, cm: codemap::codemap,

View file

@ -1,6 +1,6 @@
import either::{Either, Left, Right}; use either::{Either, Left, Right};
import ast_util::spanned; use ast_util::spanned;
import common::*; //resolve bug? use common::*; //resolve bug?
export attr_or_ext; export attr_or_ext;
export parser_attr; export parser_attr;

View file

@ -2,7 +2,7 @@
Predicates on exprs and stmts that the pretty-printer and parser use Predicates on exprs and stmts that the pretty-printer and parser use
*/ */
import ast_util::operator_prec; use ast_util::operator_prec;
fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool { fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
match e.node { match e.node {

View file

@ -1,6 +1,6 @@
import io::println;//XXXXXXXXxxx use io::println;//XXXXXXXXxxx
import util::interner; use util::interner;
import lexer::{string_reader, bump, is_eof, nextch, use lexer::{string_reader, bump, is_eof, nextch,
is_whitespace, get_str_from, reader}; is_whitespace, get_str_from, reader};
export cmnt; export cmnt;

View file

@ -1,7 +1,7 @@
import std::map::{hashmap}; use std::map::{hashmap};
import ast_util::spanned; use ast_util::spanned;
import parser::parser; use parser::parser;
import lexer::reader; use lexer::reader;
type seq_sep = { type seq_sep = {
sep: Option<token::token>, sep: Option<token::token>,

View file

@ -1,5 +1,5 @@
import parser::{parser, SOURCE_FILE}; use parser::{parser, SOURCE_FILE};
import attr::parser_attr; use attr::parser_attr;
export eval_crate_directives_to_mod; export eval_crate_directives_to_mod;

View file

@ -1,6 +1,6 @@
import diagnostic::span_handler; use diagnostic::span_handler;
import codemap::span; use codemap::span;
import ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader, use ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader,
tt_next_token}; tt_next_token};
export reader, string_reader, new_string_reader, is_whitespace; export reader, string_reader, new_string_reader, is_whitespace;

View file

@ -1,21 +1,21 @@
import print::pprust::expr_to_str; use print::pprust::expr_to_str;
import result::Result; use result::Result;
import either::{Either, Left, Right}; use either::{Either, Left, Right};
import std::map::{hashmap, str_hash}; use std::map::{hashmap, str_hash};
import token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident, use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
INTERPOLATED}; INTERPOLATED};
import codemap::{span,fss_none}; use codemap::{span,fss_none};
import util::interner::interner; use util::interner::interner;
import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec}; use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
import lexer::reader; use lexer::reader;
import prec::{as_prec, token_to_binop}; use prec::{as_prec, token_to_binop};
import attr::parser_attr; use attr::parser_attr;
import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed, use common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
seq_sep_none, token_to_str}; seq_sep_none, token_to_str};
import dvec::DVec; use dvec::DVec;
import vec::{push}; use vec::{push};
import ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move, bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move,
bitand, bitor, bitxor, blk, blk_check_mode, bound_const, bitand, bitor, bitxor, blk, blk_check_mode, bound_const,
bound_copy, bound_send, bound_trait, bound_owned, box, by_copy, bound_copy, bound_send, bound_trait, bound_owned, box, by_copy,
@ -69,7 +69,7 @@ export SOURCE_FILE;
// defined in `parse` Fixing this will be easier when we have export // defined in `parse` Fixing this will be easier when we have export
// decls on individual items -- then parse can export this publicly, and // decls on individual items -- then parse can export this publicly, and
// everything else crate-visibly. // everything else crate-visibly.
import parse_from_source_str; use parse::parse_from_source_str;
export parse_from_source_str; export parse_from_source_str;
export item_or_view_item, iovi_none, iovi_view_item, iovi_item; export item_or_view_item, iovi_none, iovi_view_item, iovi_item;

View file

@ -2,9 +2,9 @@ export as_prec;
export unop_prec; export unop_prec;
export token_to_binop; export token_to_binop;
import token::*; use token::*;
import token::token; use token::token;
import ast::*; use ast::*;
/// Unary operators have higher precedence than binary /// Unary operators have higher precedence than binary
const unop_prec: uint = 100u; const unop_prec: uint = 100u;

View file

@ -1,7 +1,7 @@
import util::interner; use util::interner;
import util::interner::interner; use util::interner::interner;
import std::map::{hashmap, str_hash}; use std::map::{hashmap, str_hash};
import std::serialization::{serializer, use std::serialization::{serializer,
deserializer, deserializer,
serialize_uint, serialize_uint,
deserialize_uint, deserialize_uint,

View file

@ -1,5 +1,5 @@
import io::WriterUtil; use io::WriterUtil;
import dvec::DVec; use dvec::DVec;
/* /*
* This pretty-printer is a direct reimplementation of Philip Karlton's * This pretty-printer is a direct reimplementation of Philip Karlton's

View file

@ -1,14 +1,12 @@
import parse::{comments, lexer, token}; use parse::{comments, lexer, token};
import codemap::codemap; use codemap::codemap;
import pp::{break_offset, word, printer, use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks};
space, zerobreak, hardbreak, breaks, consistent, use pp::{consistent, inconsistent, eof};
inconsistent, eof}; use ast::{required, provided};
import diagnostic; use ast_util::{operator_prec};
import ast::{required, provided}; use dvec::DVec;
import ast_util::{operator_prec}; use parse::classify::*;
import dvec::DVec; use parse::token::ident_interner;
import parse::classify::*;
import parse::token::ident_interner;
// The ps is stored here to prevent recursive type. // The ps is stored here to prevent recursive type.
enum ann_node { enum ann_node {

View file

@ -1,9 +1,9 @@
// An "interner" is a data structure that associates values with uint tags and // An "interner" is a data structure that associates values with uint tags and
// allows bidirectional lookup; i.e. given a value, one can easily find the // allows bidirectional lookup; i.e. given a value, one can easily find the
// type, and vice versa. // type, and vice versa.
import std::map; use std::map;
import std::map::{hashmap, hashfn, eqfn}; use std::map::{hashmap, hashfn, eqfn};
import dvec::DVec; use dvec::DVec;
type hash_interner<T: const> = type hash_interner<T: const> =
{map: hashmap<T, uint>, {map: hashmap<T, uint>,

View file

@ -1,6 +1,6 @@
import ast::*; use ast::*;
import codemap::span; use codemap::span;
// Context-passing AST walker. Each overridden visit method has full control // Context-passing AST walker. Each overridden visit method has full control
// over what happens with its node, it can do its own traversal of the node's // over what happens with its node, it can do its own traversal of the node's