rustc: Remove uses of oldmap::HashMap

This commit is contained in:
Alex Crichton 2013-03-22 22:26:41 -04:00
parent 31f6e64324
commit 6f812fef1b
62 changed files with 877 additions and 879 deletions

View file

@ -642,7 +642,7 @@ pub fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t,
pub fn get_symbol_hash(ccx: @CrateContext, t: ty::t) -> @str { pub fn get_symbol_hash(ccx: @CrateContext, t: ty::t) -> @str {
match ccx.type_hashcodes.find(&t) { match ccx.type_hashcodes.find(&t) {
Some(h) => h, Some(&h) => h,
None => { None => {
let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta); let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta);
ccx.type_hashcodes.insert(t, hash); ccx.type_hashcodes.insert(t, hash);

View file

@ -246,7 +246,7 @@ pub fn compile_rest(sess: Session, cfg: ast::crate_cfg,
// These next two const passes can probably be merged // These next two const passes can probably be merged
time(time_passes, ~"const marking", || time(time_passes, ~"const marking", ||
middle::const_eval::process_crate(crate, def_map, ty_cx)); middle::const_eval::process_crate(crate, ty_cx));
time(time_passes, ~"const checking", || time(time_passes, ~"const checking", ||
middle::check_const::check_crate(sess, crate, ast_map, def_map, middle::check_const::check_crate(sess, crate, ast_map, def_map,
@ -546,11 +546,11 @@ pub fn build_session_options(+binary: ~str,
let flags = vec::append(getopts::opt_strs(matches, level_short), let flags = vec::append(getopts::opt_strs(matches, level_short),
getopts::opt_strs(matches, level_name)); getopts::opt_strs(matches, level_name));
for flags.each |lint_name| { for flags.each |lint_name| {
let lint_name = @str::replace(*lint_name, ~"-", ~"_"); let lint_name = str::replace(*lint_name, ~"-", ~"_");
match lint_dict.find(&lint_name) { match lint_dict.find(&lint_name) {
None => { None => {
early_error(demitter, fmt!("unknown %s flag: %s", early_error(demitter, fmt!("unknown %s flag: %s",
level_name, *lint_name)); level_name, lint_name));
} }
Some(lint) => { Some(lint) => {
lint_opts.push((lint.lint, *level)); lint_opts.push((lint.lint, *level));

View file

@ -10,12 +10,12 @@
use core::prelude::*; use core::prelude::*;
use core::hashmap::linear::LinearMap;
use core::libc::{c_char, c_int, c_uint, c_longlong, c_ulonglong}; use core::libc::{c_char, c_int, c_uint, c_longlong, c_ulonglong};
use core::option; use core::option;
use core::ptr; use core::ptr;
use core::str; use core::str;
use core::vec; use core::vec;
use std::oldmap::HashMap;
pub type Opcode = u32; pub type Opcode = u32;
pub type Bool = c_uint; pub type Bool = c_uint;
@ -1467,8 +1467,8 @@ pub fn SetLinkage(Global: ValueRef, Link: Linkage) {
/* Memory-managed object interface to type handles. */ /* Memory-managed object interface to type handles. */
pub struct TypeNames { pub struct TypeNames {
type_names: HashMap<TypeRef, @str>, type_names: @mut LinearMap<TypeRef, @str>,
named_types: HashMap<@str, TypeRef> named_types: @mut LinearMap<@str, TypeRef>
} }
pub fn associate_type(tn: @TypeNames, s: @str, t: TypeRef) { pub fn associate_type(tn: @TypeNames, s: @str, t: TypeRef) {
@ -1477,17 +1477,17 @@ pub fn associate_type(tn: @TypeNames, s: @str, t: TypeRef) {
} }
pub fn type_has_name(tn: @TypeNames, t: TypeRef) -> Option<@str> { pub fn type_has_name(tn: @TypeNames, t: TypeRef) -> Option<@str> {
return tn.type_names.find(&t); return tn.type_names.find(&t).map_consume(|x| *x);
} }
pub fn name_has_type(tn: @TypeNames, s: @str) -> Option<TypeRef> { pub fn name_has_type(tn: @TypeNames, s: @str) -> Option<TypeRef> {
return tn.named_types.find(&s); return tn.named_types.find(&s).map_consume(|x| *x);
} }
pub fn mk_type_names() -> @TypeNames { pub fn mk_type_names() -> @TypeNames {
@TypeNames { @TypeNames {
type_names: HashMap(), type_names: @mut LinearMap::new(),
named_types: HashMap() named_types: @mut LinearMap::new()
} }
} }

View file

@ -19,6 +19,7 @@ use metadata::filesearch::FileSearch;
use metadata::loader; use metadata::loader;
use core::either; use core::either;
use core::hashmap::linear::LinearMap;
use core::vec; use core::vec;
use syntax::attr; use syntax::attr;
use syntax::codemap::{span, dummy_sp}; use syntax::codemap::{span, dummy_sp};
@ -26,7 +27,6 @@ use syntax::diagnostic::span_handler;
use syntax::parse::token::ident_interner; use syntax::parse::token::ident_interner;
use syntax::visit; use syntax::visit;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
use std::oldmap::HashMap;
// Traverses an AST, reading all the information about use'd crates and extern // Traverses an AST, reading all the information about use'd crates and extern
// libraries necessary for later resolving, typechecking, linking, etc. // libraries necessary for later resolving, typechecking, linking, etc.
@ -307,7 +307,7 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
debug!("resolving deps of external crate"); debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate // The map from crate numbers in the crate we're resolving to local crate
// numbers // numbers
let cnum_map = HashMap(); let mut cnum_map = LinearMap::new();
for decoder::get_crate_deps(e.intr, cdata).each |dep| { for decoder::get_crate_deps(e.intr, cdata).each |dep| {
let extrn_cnum = dep.cnum; let extrn_cnum = dep.cnum;
let cname = dep.name; let cname = dep.name;
@ -334,7 +334,7 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
} }
} }
} }
return cnum_map; return @mut cnum_map;
} }
// Local Variables: // Local Variables:

View file

@ -17,8 +17,8 @@ use core::prelude::*;
use metadata::cstore; use metadata::cstore;
use metadata::decoder; use metadata::decoder;
use core::hashmap::linear::LinearMap;
use core::vec; use core::vec;
use std::oldmap;
use std; use std;
use syntax::{ast, attr}; use syntax::{ast, attr};
use syntax::parse::token::ident_interner; use syntax::parse::token::ident_interner;
@ -27,7 +27,7 @@ use syntax::parse::token::ident_interner;
// local crate numbers (as generated during this session). Each external // local crate numbers (as generated during this session). Each external
// crate may refer to types in other external crates, and each has their // crate may refer to types in other external crates, and each has their
// own crate numbers. // own crate numbers.
pub type cnum_map = oldmap::HashMap<ast::crate_num, ast::crate_num>; pub type cnum_map = @mut LinearMap<ast::crate_num, ast::crate_num>;
pub struct crate_metadata { pub struct crate_metadata {
name: @~str, name: @~str,
@ -37,7 +37,7 @@ pub struct crate_metadata {
} }
pub struct CStore { pub struct CStore {
priv metas: oldmap::HashMap<ast::crate_num, @crate_metadata>, priv metas: LinearMap <ast::crate_num, @crate_metadata>,
priv extern_mod_crate_map: extern_mod_crate_map, priv extern_mod_crate_map: extern_mod_crate_map,
priv used_crate_files: ~[Path], priv used_crate_files: ~[Path],
priv used_libraries: ~[~str], priv used_libraries: ~[~str],
@ -46,14 +46,12 @@ pub struct CStore {
} }
// Map from node_id's of local extern mod statements to crate numbers // Map from node_id's of local extern mod statements to crate numbers
type extern_mod_crate_map = oldmap::HashMap<ast::node_id, ast::crate_num>; type extern_mod_crate_map = LinearMap<ast::node_id, ast::crate_num>;
pub fn mk_cstore(intr: @ident_interner) -> CStore { pub fn mk_cstore(intr: @ident_interner) -> CStore {
let meta_cache = oldmap::HashMap();
let crate_map = oldmap::HashMap();
return CStore { return CStore {
metas: meta_cache, metas: LinearMap::new(),
extern_mod_crate_map: crate_map, extern_mod_crate_map: LinearMap::new(),
used_crate_files: ~[], used_crate_files: ~[],
used_libraries: ~[], used_libraries: ~[],
used_link_args: ~[], used_link_args: ~[],
@ -61,96 +59,89 @@ pub fn mk_cstore(intr: @ident_interner) -> CStore {
}; };
} }
pub fn get_crate_data(cstore: @mut CStore, cnum: ast::crate_num) pub fn get_crate_data(cstore: &CStore, cnum: ast::crate_num)
-> @crate_metadata { -> @crate_metadata {
return cstore.metas.get(&cnum); return *cstore.metas.get(&cnum);
} }
pub fn get_crate_hash(cstore: @mut CStore, cnum: ast::crate_num) -> @~str { pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @~str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_hash(cdata.data) decoder::get_crate_hash(cdata.data)
} }
pub fn get_crate_vers(cstore: @mut CStore, cnum: ast::crate_num) -> @~str { pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @~str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_vers(cdata.data) decoder::get_crate_vers(cdata.data)
} }
pub fn set_crate_data(cstore: @mut CStore, pub fn set_crate_data(cstore: &mut CStore,
cnum: ast::crate_num, cnum: ast::crate_num,
data: @crate_metadata) { data: @crate_metadata) {
let metas = cstore.metas; cstore.metas.insert(cnum, data);
metas.insert(cnum, data);
} }
pub fn have_crate_data(cstore: @mut CStore, cnum: ast::crate_num) -> bool { pub fn have_crate_data(cstore: &CStore, cnum: ast::crate_num) -> bool {
cstore.metas.contains_key(&cnum) cstore.metas.contains_key(&cnum)
} }
pub fn iter_crate_data(cstore: @mut CStore, pub fn iter_crate_data(cstore: &CStore,
i: &fn(ast::crate_num, @crate_metadata)) { i: &fn(ast::crate_num, @crate_metadata)) {
let metas = cstore.metas; for cstore.metas.each |&(&k, &v)| {
for metas.each |&k, &v| {
i(k, v); i(k, v);
} }
} }
pub fn add_used_crate_file(cstore: @mut CStore, lib: &Path) { pub fn add_used_crate_file(cstore: &mut CStore, lib: &Path) {
let cstore = &mut *cstore;
if !vec::contains(cstore.used_crate_files, lib) { if !vec::contains(cstore.used_crate_files, lib) {
cstore.used_crate_files.push(copy *lib); cstore.used_crate_files.push(copy *lib);
} }
} }
pub fn get_used_crate_files(cstore: @mut CStore) -> ~[Path] { pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] {
return /*bad*/copy cstore.used_crate_files; return /*bad*/copy cstore.used_crate_files;
} }
pub fn add_used_library(cstore: @mut CStore, lib: @~str) -> bool { pub fn add_used_library(cstore: &mut CStore, lib: @~str) -> bool {
fail_unless!(*lib != ~""); fail_unless!(*lib != ~"");
let cstore = &mut *cstore;
if cstore.used_libraries.contains(&*lib) { return false; } if cstore.used_libraries.contains(&*lib) { return false; }
cstore.used_libraries.push(/*bad*/ copy *lib); cstore.used_libraries.push(/*bad*/ copy *lib);
true true
} }
pub fn get_used_libraries(cstore: @mut CStore) -> ~[~str] { pub fn get_used_libraries(cstore: &CStore) -> ~[~str] {
/*bad*/copy cstore.used_libraries /*bad*/copy cstore.used_libraries
} }
pub fn add_used_link_args(cstore: @mut CStore, args: &str) { pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
for args.each_split_char(' ') |s| { for args.each_split_char(' ') |s| {
cstore.used_link_args.push(s.to_owned()); cstore.used_link_args.push(s.to_owned());
} }
} }
pub fn get_used_link_args(cstore: @mut CStore) -> ~[~str] { pub fn get_used_link_args(cstore: &CStore) -> ~[~str] {
/*bad*/copy cstore.used_link_args /*bad*/copy cstore.used_link_args
} }
pub fn add_extern_mod_stmt_cnum(cstore: @mut CStore, pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore,
emod_id: ast::node_id, emod_id: ast::node_id,
cnum: ast::crate_num) { cnum: ast::crate_num) {
let extern_mod_crate_map = cstore.extern_mod_crate_map; cstore.extern_mod_crate_map.insert(emod_id, cnum);
extern_mod_crate_map.insert(emod_id, cnum);
} }
pub fn find_extern_mod_stmt_cnum(cstore: @mut CStore, pub fn find_extern_mod_stmt_cnum(cstore: &CStore,
emod_id: ast::node_id) emod_id: ast::node_id)
-> Option<ast::crate_num> { -> Option<ast::crate_num> {
let extern_mod_crate_map = cstore.extern_mod_crate_map; cstore.extern_mod_crate_map.find(&emod_id).map_consume(|x| *x)
extern_mod_crate_map.find(&emod_id)
} }
// returns hashes of crates directly used by this crate. Hashes are sorted by // returns hashes of crates directly used by this crate. Hashes are sorted by
// (crate name, crate version, crate hash) in lexicographic order (not semver) // (crate name, crate version, crate hash) in lexicographic order (not semver)
pub fn get_dep_hashes(cstore: @mut CStore) -> ~[~str] { pub fn get_dep_hashes(cstore: &CStore) -> ~[~str] {
struct crate_hash { name: @~str, vers: @~str, hash: @~str } struct crate_hash { name: @~str, vers: @~str, hash: @~str }
let mut result = ~[]; let mut result = ~[];
let extern_mod_crate_map = cstore.extern_mod_crate_map; for cstore.extern_mod_crate_map.each_value |&cnum| {
for extern_mod_crate_map.each_value |&cnum| {
let cdata = cstore::get_crate_data(cstore, cnum); let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data); let hash = decoder::get_crate_hash(cdata.data);
let vers = decoder::get_crate_vers(cdata.data); let vers = decoder::get_crate_vers(cdata.data);

View file

@ -1132,7 +1132,7 @@ pub fn translate_def_id(cdata: cmd, did: ast::def_id) -> ast::def_id {
} }
match cdata.cnum_map.find(&did.crate) { match cdata.cnum_map.find(&did.crate) {
option::Some(n) => ast::def_id { crate: n, node: did.node }, option::Some(&n) => ast::def_id { crate: n, node: did.node },
option::None => fail!(~"didn't find a crate in the cnum_map") option::None => fail!(~"didn't find a crate in the cnum_map")
} }
} }

View file

@ -17,6 +17,7 @@ use metadata::common::*;
use metadata::cstore; use metadata::cstore;
use metadata::decoder; use metadata::decoder;
use metadata::tyencode; use metadata::tyencode;
use middle::trans::reachable;
use middle::ty::node_id_to_type; use middle::ty::node_id_to_type;
use middle::ty; use middle::ty;
use middle; use middle;
@ -24,6 +25,7 @@ use util::ppaux::ty_to_str;
use core::flate; use core::flate;
use core::hash::{Hash, HashUtil}; use core::hash::{Hash, HashUtil};
use core::hashmap::linear::{LinearMap, LinearSet};
use core::int; use core::int;
use core::io::{Writer, WriterUtil}; use core::io::{Writer, WriterUtil};
use core::io; use core::io;
@ -31,9 +33,7 @@ use core::str;
use core::to_bytes::IterBytes; use core::to_bytes::IterBytes;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use std::serialize::Encodable; use std::serialize::Encodable;
use std::{ebml, oldmap};
use std; use std;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast; use syntax::ast;
@ -49,7 +49,7 @@ use syntax;
use writer = std::ebml::writer; use writer = std::ebml::writer;
// used by astencode: // used by astencode:
type abbrev_map = oldmap::HashMap<ty::t, tyencode::ty_abbrev>; type abbrev_map = @mut LinearMap<ty::t, tyencode::ty_abbrev>;
pub type encode_inlined_item = @fn(ecx: @EncodeContext, pub type encode_inlined_item = @fn(ecx: @EncodeContext,
ebml_w: writer::Encoder, ebml_w: writer::Encoder,
@ -59,10 +59,10 @@ pub type encode_inlined_item = @fn(ecx: @EncodeContext,
pub struct EncodeParams { pub struct EncodeParams {
diag: @span_handler, diag: @span_handler,
tcx: ty::ctxt, tcx: ty::ctxt,
reachable: HashMap<ast::node_id, ()>, reachable: reachable::map,
reexports2: middle::resolve::ExportMap2, reexports2: middle::resolve::ExportMap2,
item_symbols: HashMap<ast::node_id, ~str>, item_symbols: @mut LinearMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: @mut LinearMap<ast::node_id, ~str>,
link_meta: LinkMeta, link_meta: LinkMeta,
cstore: @mut cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item encode_inlined_item: encode_inlined_item
@ -86,10 +86,10 @@ pub struct EncodeContext {
diag: @span_handler, diag: @span_handler,
tcx: ty::ctxt, tcx: ty::ctxt,
stats: @mut Stats, stats: @mut Stats,
reachable: HashMap<ast::node_id, ()>, reachable: reachable::map,
reexports2: middle::resolve::ExportMap2, reexports2: middle::resolve::ExportMap2,
item_symbols: HashMap<ast::node_id, ~str>, item_symbols: @mut LinearMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: @mut LinearMap<ast::node_id, ~str>,
link_meta: LinkMeta, link_meta: LinkMeta,
cstore: @mut cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item, encode_inlined_item: encode_inlined_item,
@ -97,7 +97,7 @@ pub struct EncodeContext {
} }
pub fn reachable(ecx: @EncodeContext, id: node_id) -> bool { pub fn reachable(ecx: @EncodeContext, id: node_id) -> bool {
ecx.reachable.contains_key(&id) ecx.reachable.contains(&id)
} }
fn encode_name(ecx: @EncodeContext, ebml_w: writer::Encoder, name: ident) { fn encode_name(ecx: @EncodeContext, ebml_w: writer::Encoder, name: ident) {
@ -188,7 +188,7 @@ fn encode_type_param_bounds(ebml_w: writer::Encoder,
ecx: @EncodeContext, ecx: @EncodeContext,
params: &OptVec<TyParam>) { params: &OptVec<TyParam>) {
let ty_param_bounds = let ty_param_bounds =
@params.map_to_vec(|param| ecx.tcx.ty_param_bounds.get(&param.id)); @params.map_to_vec(|param| *ecx.tcx.ty_param_bounds.get(&param.id));
encode_ty_type_param_bounds(ebml_w, ecx, ty_param_bounds); encode_ty_type_param_bounds(ebml_w, ecx, ty_param_bounds);
} }
@ -229,7 +229,7 @@ fn encode_type(ecx: @EncodeContext, ebml_w: writer::Encoder, typ: ty::t) {
fn encode_symbol(ecx: @EncodeContext, ebml_w: writer::Encoder, id: node_id) { fn encode_symbol(ecx: @EncodeContext, ebml_w: writer::Encoder, id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol); ebml_w.start_tag(tag_items_data_item_symbol);
match ecx.item_symbols.find(&id) { match ecx.item_symbols.find(&id) {
Some(ref x) => { Some(x) => {
debug!("encode_symbol(id=%?, str=%s)", id, *x); debug!("encode_symbol(id=%?, str=%s)", id, *x);
ebml_w.writer.write(str::to_bytes(*x)); ebml_w.writer.write(str::to_bytes(*x));
} }
@ -244,7 +244,7 @@ fn encode_symbol(ecx: @EncodeContext, ebml_w: writer::Encoder, id: node_id) {
fn encode_discriminant(ecx: @EncodeContext, ebml_w: writer::Encoder, fn encode_discriminant(ecx: @EncodeContext, ebml_w: writer::Encoder,
id: node_id) { id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol); ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::to_bytes(ecx.discrim_symbols.get(&id))); ebml_w.writer.write(str::to_bytes(*ecx.discrim_symbols.get(&id)));
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1320,7 +1320,7 @@ pub static metadata_encoding_version : &'static [u8] =
0x74, //'t' as u8, 0x74, //'t' as u8,
0, 0, 0, 1 ]; 0, 0, 0, 1 ];
pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] { pub fn encode_metadata(+parms: EncodeParams, crate: &crate) -> ~[u8] {
let wr = @io::BytesWriter(); let wr = @io::BytesWriter();
let mut stats = Stats { let mut stats = Stats {
inline_bytes: 0, inline_bytes: 0,
@ -1334,18 +1334,21 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
total_bytes: 0, total_bytes: 0,
n_inlines: 0 n_inlines: 0
}; };
let EncodeParams{item_symbols, diag, tcx, reachable, reexports2,
discrim_symbols, cstore, encode_inlined_item,
link_meta, _} = parms;
let ecx = @EncodeContext { let ecx = @EncodeContext {
diag: parms.diag, diag: diag,
tcx: parms.tcx, tcx: tcx,
stats: @mut stats, stats: @mut stats,
reachable: parms.reachable, reachable: reachable,
reexports2: parms.reexports2, reexports2: reexports2,
item_symbols: parms.item_symbols, item_symbols: item_symbols,
discrim_symbols: parms.discrim_symbols, discrim_symbols: discrim_symbols,
link_meta: /*bad*/copy parms.link_meta, link_meta: link_meta,
cstore: parms.cstore, cstore: cstore,
encode_inlined_item: parms.encode_inlined_item, encode_inlined_item: encode_inlined_item,
type_abbrevs: ty::new_ty_hash() type_abbrevs: @mut LinearMap::new()
}; };
let ebml_w = writer::Encoder(wr as @io::Writer); let ebml_w = writer::Encoder(wr as @io::Writer);
@ -1385,7 +1388,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
ecx.stats.total_bytes = wr.pos; ecx.stats.total_bytes = wr.pos;
if (parms.tcx.sess.meta_stats()) { if (tcx.sess.meta_stats()) {
do wr.bytes.each |e| { do wr.bytes.each |e| {
if *e == 0 { if *e == 0 {

View file

@ -337,7 +337,7 @@ fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
pos: pos, pos: pos,
len: len }; len: len };
match st.tcx.rcache.find(&key) { match st.tcx.rcache.find(&key) {
Some(tt) => return tt, Some(&tt) => return tt,
None => { None => {
let ps = @mut PState {pos: pos ,.. copy *st}; let ps = @mut PState {pos: pos ,.. copy *st};
let tt = parse_ty(ps, conv); let tt = parse_ty(ps, conv);

View file

@ -16,11 +16,11 @@ use core::prelude::*;
use middle::ty::{Vid, param_ty}; use middle::ty::{Vid, param_ty};
use middle::ty; use middle::ty;
use core::hashmap::linear::LinearMap;
use core::io::WriterUtil; use core::io::WriterUtil;
use core::io; use core::io;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast::*; use syntax::ast::*;
use syntax::diagnostic::span_handler; use syntax::diagnostic::span_handler;
use syntax::print::pprust::*; use syntax::print::pprust::*;
@ -47,7 +47,7 @@ pub struct ty_abbrev {
pub enum abbrev_ctxt { pub enum abbrev_ctxt {
ac_no_abbrevs, ac_no_abbrevs,
ac_use_abbrevs(HashMap<ty::t, ty_abbrev>), ac_use_abbrevs(@mut LinearMap<ty::t, ty_abbrev>),
} }
fn cx_uses_abbrevs(cx: @ctxt) -> bool { fn cx_uses_abbrevs(cx: @ctxt) -> bool {

View file

@ -861,7 +861,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
} }
} }
for tcx.freevars.find(&id).each |fv| { for tcx.freevars.find(&id).each |&fv| {
do ebml_w.tag(c::tag_table_freevars) { do ebml_w.tag(c::tag_table_freevars) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -873,7 +873,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
} }
let lid = ast::def_id { crate: ast::local_crate, node: id }; let lid = ast::def_id { crate: ast::local_crate, node: id };
for tcx.tcache.find(&lid).each |tpbt| { for tcx.tcache.find(&lid).each |&tpbt| {
do ebml_w.tag(c::tag_table_tcache) { do ebml_w.tag(c::tag_table_tcache) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -882,7 +882,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
} }
} }
for tcx.ty_param_bounds.find(&id).each |pbs| { for tcx.ty_param_bounds.find(&id).each |&pbs| {
do ebml_w.tag(c::tag_table_param_bounds) { do ebml_w.tag(c::tag_table_param_bounds) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -905,7 +905,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
// } // }
//} //}
if maps.mutbl_map.contains_key(&id) { if maps.mutbl_map.contains(&id) {
do ebml_w.tag(c::tag_table_mutbl) { do ebml_w.tag(c::tag_table_mutbl) {
ebml_w.id(id); ebml_w.id(id);
} }
@ -915,7 +915,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
do ebml_w.tag(c::tag_table_last_use) { do ebml_w.tag(c::tag_table_last_use) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
do ebml_w.emit_from_vec(/*bad*/ copy *m) |id| { do ebml_w.emit_from_vec(/*bad*/ copy **m) |id| {
id.encode(&ebml_w); id.encode(&ebml_w);
} }
} }
@ -931,7 +931,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
} }
} }
for maps.vtable_map.find(&id).each |dr| { for maps.vtable_map.find(&id).each |&dr| {
do ebml_w.tag(c::tag_table_vtable_map) { do ebml_w.tag(c::tag_table_vtable_map) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -949,13 +949,13 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
} }
} }
for maps.moves_map.find(&id).each |_| { if maps.moves_map.contains(&id) {
do ebml_w.tag(c::tag_table_moves_map) { do ebml_w.tag(c::tag_table_moves_map) {
ebml_w.id(id); ebml_w.id(id);
} }
} }
for maps.capture_map.find(&id).each |cap_vars| { for maps.capture_map.find(&id).each |&cap_vars| {
do ebml_w.tag(c::tag_table_capture_map) { do ebml_w.tag(c::tag_table_capture_map) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -1097,9 +1097,9 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
tag, id, id0); tag, id, id0);
if tag == (c::tag_table_mutbl as uint) { if tag == (c::tag_table_mutbl as uint) {
dcx.maps.mutbl_map.insert(id, ()); dcx.maps.mutbl_map.insert(id);
} else if tag == (c::tag_table_moves_map as uint) { } else if tag == (c::tag_table_moves_map as uint) {
dcx.maps.moves_map.insert(id, ()); dcx.maps.moves_map.insert(id);
} else { } else {
let val_doc = entry_doc[c::tag_table_val as uint]; let val_doc = entry_doc[c::tag_table_val as uint];
let val_dsr = &reader::Decoder(val_doc); let val_dsr = &reader::Decoder(val_doc);

View file

@ -31,8 +31,8 @@ use middle::mem_categorization::{lp_comp, lp_deref, lp_local};
use middle::ty; use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use core::hashmap::linear::LinearSet;
use core::uint; use core::uint;
use std::oldmap::HashMap;
use syntax::ast::{m_const, m_imm, m_mutbl}; use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
@ -44,7 +44,7 @@ struct CheckLoanCtxt {
bccx: @BorrowckCtxt, bccx: @BorrowckCtxt,
req_maps: ReqMaps, req_maps: ReqMaps,
reported: HashMap<ast::node_id, ()>, reported: LinearSet<ast::node_id>,
declared_purity: @mut ast::purity, declared_purity: @mut ast::purity,
fn_args: @mut @~[ast::node_id] fn_args: @mut @~[ast::node_id]
@ -63,12 +63,12 @@ enum purity_cause {
} }
pub fn check_loans(bccx: @BorrowckCtxt, pub fn check_loans(bccx: @BorrowckCtxt,
req_maps: ReqMaps, +req_maps: ReqMaps,
crate: @ast::crate) { crate: @ast::crate) {
let clcx = @mut CheckLoanCtxt { let clcx = @mut CheckLoanCtxt {
bccx: bccx, bccx: bccx,
req_maps: req_maps, req_maps: req_maps,
reported: HashMap(), reported: LinearSet::new(),
declared_purity: @mut ast::impure_fn, declared_purity: @mut ast::impure_fn,
fn_args: @mut @~[] fn_args: @mut @~[]
}; };
@ -104,9 +104,9 @@ pub impl assignment_type {
} }
pub impl CheckLoanCtxt { pub impl CheckLoanCtxt {
fn tcx(@mut self) -> ty::ctxt { self.bccx.tcx } fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
fn purity(@mut self, scope_id: ast::node_id) -> Option<purity_cause> { fn purity(&mut self, scope_id: ast::node_id) -> Option<purity_cause> {
let default_purity = match *self.declared_purity { let default_purity = match *self.declared_purity {
// an unsafe declaration overrides all // an unsafe declaration overrides all
ast::unsafe_fn => return None, ast::unsafe_fn => return None,
@ -122,43 +122,38 @@ pub impl CheckLoanCtxt {
// purity. if so, that overrides the declaration. // purity. if so, that overrides the declaration.
let mut scope_id = scope_id; let mut scope_id = scope_id;
let region_map = self.tcx().region_map;
let pure_map = self.req_maps.pure_map;
loop { loop {
match pure_map.find(&scope_id) { match self.req_maps.pure_map.find(&scope_id) {
None => (), None => (),
Some(ref e) => return Some(pc_cmt((*e))) Some(e) => return Some(pc_cmt(*e))
} }
match region_map.find(&scope_id) { match self.tcx().region_map.find(&scope_id) {
None => return default_purity, None => return default_purity,
Some(next_scope_id) => scope_id = next_scope_id Some(&next_scope_id) => scope_id = next_scope_id
} }
} }
} }
fn walk_loans(@mut self, fn walk_loans(&self,
scope_id: ast::node_id, mut scope_id: ast::node_id,
f: &fn(v: &Loan) -> bool) { f: &fn(v: &Loan) -> bool) {
let mut scope_id = scope_id;
let region_map = self.tcx().region_map;
let req_loan_map = self.req_maps.req_loan_map;
loop { loop {
for req_loan_map.find(&scope_id).each |loans| { for self.req_maps.req_loan_map.find(&scope_id).each |loans| {
for loans.each |loan| { for loans.each |loan| {
if !f(loan) { return; } if !f(loan) { return; }
} }
} }
match region_map.find(&scope_id) { match self.tcx().region_map.find(&scope_id) {
None => return, None => return,
Some(next_scope_id) => scope_id = next_scope_id, Some(&next_scope_id) => scope_id = next_scope_id,
} }
} }
} }
fn walk_loans_of(@mut self, fn walk_loans_of(&mut self,
scope_id: ast::node_id, scope_id: ast::node_id,
lp: @loan_path, lp: @loan_path,
f: &fn(v: &Loan) -> bool) { f: &fn(v: &Loan) -> bool) {
@ -175,7 +170,7 @@ pub impl CheckLoanCtxt {
// note: we take opt_expr and expr_id separately because for // note: we take opt_expr and expr_id separately because for
// overloaded operators the callee has an id but no expr. // overloaded operators the callee has an id but no expr.
// annoying. // annoying.
fn check_pure_callee_or_arg(@mut self, fn check_pure_callee_or_arg(&mut self,
pc: purity_cause, pc: purity_cause,
opt_expr: Option<@ast::expr>, opt_expr: Option<@ast::expr>,
callee_id: ast::node_id, callee_id: ast::node_id,
@ -202,7 +197,7 @@ pub impl CheckLoanCtxt {
Some(expr) => { Some(expr) => {
match expr.node { match expr.node {
ast::expr_path(_) if pc == pc_pure_fn => { ast::expr_path(_) if pc == pc_pure_fn => {
let def = self.tcx().def_map.get(&expr.id); let def = *self.tcx().def_map.get(&expr.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
let is_fn_arg = let is_fn_arg =
did.crate == ast::local_crate && did.crate == ast::local_crate &&
@ -242,7 +237,7 @@ pub impl CheckLoanCtxt {
// True if the expression with the given `id` is a stack closure. // True if the expression with the given `id` is a stack closure.
// The expression must be an expr_fn_block(*) // The expression must be an expr_fn_block(*)
fn is_stack_closure(@mut self, id: ast::node_id) -> bool { fn is_stack_closure(&mut self, id: ast::node_id) -> bool {
let fn_ty = ty::node_id_to_type(self.tcx(), id); let fn_ty = ty::node_id_to_type(self.tcx(), id);
match ty::get(fn_ty).sty { match ty::get(fn_ty).sty {
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil, ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil,
@ -251,10 +246,10 @@ pub impl CheckLoanCtxt {
} }
} }
fn is_allowed_pure_arg(@mut self, expr: @ast::expr) -> bool { fn is_allowed_pure_arg(&mut self, expr: @ast::expr) -> bool {
return match expr.node { return match expr.node {
ast::expr_path(_) => { ast::expr_path(_) => {
let def = self.tcx().def_map.get(&expr.id); let def = *self.tcx().def_map.get(&expr.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
did.crate == ast::local_crate && did.crate == ast::local_crate &&
(*self.fn_args).contains(&(did.node)) (*self.fn_args).contains(&(did.node))
@ -264,18 +259,18 @@ pub impl CheckLoanCtxt {
}; };
} }
fn check_for_conflicting_loans(@mut self, scope_id: ast::node_id) { fn check_for_conflicting_loans(&mut self, scope_id: ast::node_id) {
debug!("check_for_conflicting_loans(scope_id=%?)", scope_id); debug!("check_for_conflicting_loans(scope_id=%?)", scope_id);
let new_loans = match self.req_maps.req_loan_map.find(&scope_id) { let new_loans = match self.req_maps.req_loan_map.find(&scope_id) {
None => return, None => return,
Some(loans) => loans Some(&loans) => loans
}; };
let new_loans: &mut ~[Loan] = new_loans; let new_loans: &mut ~[Loan] = new_loans;
debug!("new_loans has length %?", new_loans.len()); debug!("new_loans has length %?", new_loans.len());
let par_scope_id = self.tcx().region_map.get(&scope_id); let par_scope_id = *self.tcx().region_map.get(&scope_id);
for self.walk_loans(par_scope_id) |old_loan| { for self.walk_loans(par_scope_id) |old_loan| {
debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan)); debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan));
@ -294,7 +289,7 @@ pub impl CheckLoanCtxt {
} }
} }
fn report_error_if_loans_conflict(@mut self, fn report_error_if_loans_conflict(&self,
old_loan: &Loan, old_loan: &Loan,
new_loan: &Loan) { new_loan: &Loan) {
if old_loan.lp != new_loan.lp { if old_loan.lp != new_loan.lp {
@ -330,19 +325,19 @@ pub impl CheckLoanCtxt {
} }
} }
fn is_local_variable(@mut self, cmt: cmt) -> bool { fn is_local_variable(&self, cmt: cmt) -> bool {
match cmt.cat { match cmt.cat {
cat_local(_) => true, cat_local(_) => true,
_ => false _ => false
} }
} }
fn check_assignment(@mut self, at: assignment_type, ex: @ast::expr) { fn check_assignment(&mut self, at: assignment_type, ex: @ast::expr) {
// We don't use cat_expr() here because we don't want to treat // We don't use cat_expr() here because we don't want to treat
// auto-ref'd parameters in overloaded operators as rvalues. // auto-ref'd parameters in overloaded operators as rvalues.
let cmt = match self.bccx.tcx.adjustments.find(&ex.id) { let cmt = match self.bccx.tcx.adjustments.find(&ex.id) {
None => self.bccx.cat_expr_unadjusted(ex), None => self.bccx.cat_expr_unadjusted(ex),
Some(adj) => self.bccx.cat_expr_autoderefd(ex, adj) Some(&adj) => self.bccx.cat_expr_autoderefd(ex, adj)
}; };
debug!("check_assignment(cmt=%s)", debug!("check_assignment(cmt=%s)",
@ -369,6 +364,7 @@ pub impl CheckLoanCtxt {
match self.purity(ex.id) { match self.purity(ex.id) {
None => (), None => (),
Some(pc_cmt(_)) => { Some(pc_cmt(_)) => {
let purity = self.purity(ex.id).get();
// Subtle: Issue #3162. If we are enforcing purity // Subtle: Issue #3162. If we are enforcing purity
// because there is a reference to aliasable, mutable data // because there is a reference to aliasable, mutable data
// that we require to be immutable, we can't allow writes // that we require to be immutable, we can't allow writes
@ -376,7 +372,7 @@ pub impl CheckLoanCtxt {
// because that aliasable data might have been located on // because that aliasable data might have been located on
// the current stack frame, we don't know. // the current stack frame, we don't know.
self.report_purity_error( self.report_purity_error(
self.purity(ex.id).get(), purity,
ex.span, ex.span,
at.ing_form(self.bccx.cmt_to_str(cmt))); at.ing_form(self.bccx.cmt_to_str(cmt)));
} }
@ -404,7 +400,7 @@ pub impl CheckLoanCtxt {
self.add_write_guards_if_necessary(cmt); self.add_write_guards_if_necessary(cmt);
} }
fn add_write_guards_if_necessary(@mut self, cmt: cmt) { fn add_write_guards_if_necessary(&mut self, cmt: cmt) {
match cmt.cat { match cmt.cat {
cat_deref(base, deref_count, ptr_kind) => { cat_deref(base, deref_count, ptr_kind) => {
self.add_write_guards_if_necessary(base); self.add_write_guards_if_necessary(base);
@ -415,7 +411,7 @@ pub impl CheckLoanCtxt {
id: base.id, id: base.id,
derefs: deref_count derefs: deref_count
}; };
self.bccx.write_guard_map.insert(key, ()); self.bccx.write_guard_map.insert(key);
} }
_ => {} _ => {}
} }
@ -427,7 +423,7 @@ pub impl CheckLoanCtxt {
} }
} }
fn check_for_loan_conflicting_with_assignment(@mut self, fn check_for_loan_conflicting_with_assignment(&mut self,
at: assignment_type, at: assignment_type,
ex: @ast::expr, ex: @ast::expr,
cmt: cmt, cmt: cmt,
@ -466,7 +462,7 @@ pub impl CheckLoanCtxt {
} }
} }
fn report_purity_error(@mut self, pc: purity_cause, sp: span, msg: ~str) { fn report_purity_error(&mut self, pc: purity_cause, sp: span, msg: ~str) {
match pc { match pc {
pc_pure_fn => { pc_pure_fn => {
self.tcx().sess.span_err( self.tcx().sess.span_err(
@ -474,8 +470,7 @@ pub impl CheckLoanCtxt {
fmt!("%s prohibited in pure context", msg)); fmt!("%s prohibited in pure context", msg));
} }
pc_cmt(ref e) => { pc_cmt(ref e) => {
let reported = self.reported; if self.reported.insert((*e).cmt.id) {
if reported.insert((*e).cmt.id, ()) {
self.tcx().sess.span_err( self.tcx().sess.span_err(
(*e).cmt.span, (*e).cmt.span,
fmt!("illegal borrow unless pure: %s", fmt!("illegal borrow unless pure: %s",
@ -522,7 +517,7 @@ pub impl CheckLoanCtxt {
} }
} }
fn analyze_move_out_from_cmt(@mut self, cmt: cmt) -> MoveError { fn analyze_move_out_from_cmt(&mut self, cmt: cmt) -> MoveError {
debug!("check_move_out_from_cmt(cmt=%s)", debug!("check_move_out_from_cmt(cmt=%s)",
self.bccx.cmt_to_repr(cmt)); self.bccx.cmt_to_repr(cmt));
@ -555,7 +550,7 @@ pub impl CheckLoanCtxt {
return MoveOk; return MoveOk;
} }
fn check_call(@mut self, fn check_call(&mut self,
expr: @ast::expr, expr: @ast::expr,
callee: Option<@ast::expr>, callee: Option<@ast::expr>,
callee_id: ast::node_id, callee_id: ast::node_id,
@ -695,7 +690,7 @@ fn check_loans_in_expr(expr: @ast::expr,
self.check_for_conflicting_loans(expr.id); self.check_for_conflicting_loans(expr.id);
if self.bccx.moves_map.contains_key(&expr.id) { if self.bccx.moves_map.contains(&expr.id) {
self.check_move_out_from_expr(expr); self.check_move_out_from_expr(expr);
} }

View file

@ -32,9 +32,8 @@ use middle::ty;
use util::common::indenter; use util::common::indenter;
use util::ppaux::{expr_repr, region_to_str}; use util::ppaux::{expr_repr, region_to_str};
use core::hashmap::linear::LinearSet; use core::hashmap::linear::{LinearSet, LinearMap};
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast::{m_const, m_imm, m_mutbl}; use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::ast; use syntax::ast;
use syntax::codemap::span; use syntax::codemap::span;
@ -80,7 +79,8 @@ struct GatherLoanCtxt {
pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps { pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps {
let glcx = @mut GatherLoanCtxt { let glcx = @mut GatherLoanCtxt {
bccx: bccx, bccx: bccx,
req_maps: ReqMaps { req_loan_map: HashMap(), pure_map: HashMap() }, req_maps: ReqMaps { req_loan_map: LinearMap::new(),
pure_map: LinearMap::new() },
item_ub: 0, item_ub: 0,
root_ub: 0, root_ub: 0,
ignore_adjustments: LinearSet::new() ignore_adjustments: LinearSet::new()
@ -90,7 +90,8 @@ pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps {
visit_stmt: add_stmt_to_map, visit_stmt: add_stmt_to_map,
.. *visit::default_visitor()}); .. *visit::default_visitor()});
visit::visit_crate(*crate, glcx, v); visit::visit_crate(*crate, glcx, v);
return glcx.req_maps; let @GatherLoanCtxt{req_maps, _} = glcx;
return req_maps;
} }
fn req_loans_in_fn(fk: &visit::fn_kind, fn req_loans_in_fn(fk: &visit::fn_kind,
@ -132,7 +133,7 @@ fn req_loans_in_expr(ex: @ast::expr,
{ {
let mut this = &mut *self; let mut this = &mut *self;
if !this.ignore_adjustments.contains(&ex.id) { if !this.ignore_adjustments.contains(&ex.id) {
for tcx.adjustments.find(&ex.id).each |adjustments| { for tcx.adjustments.find(&ex.id).each |&adjustments| {
this.guarantee_adjustments(ex, *adjustments); this.guarantee_adjustments(ex, *adjustments);
} }
} }
@ -257,7 +258,7 @@ fn req_loans_in_expr(ex: @ast::expr,
// (if used like `a.b(...)`), the call where it's an argument // (if used like `a.b(...)`), the call where it's an argument
// (if used like `x(a.b)`), or the block (if used like `let x // (if used like `x(a.b)`), or the block (if used like `let x
// = a.b`). // = a.b`).
let scope_r = ty::re_scope(self.tcx().region_map.get(&ex.id)); let scope_r = ty::re_scope(*self.tcx().region_map.get(&ex.id));
let rcvr_cmt = self.bccx.cat_expr(rcvr); let rcvr_cmt = self.bccx.cat_expr(rcvr);
self.guarantee_valid(rcvr_cmt, m_imm, scope_r); self.guarantee_valid(rcvr_cmt, m_imm, scope_r);
visit::visit_expr(ex, self, vt); visit::visit_expr(ex, self, vt);
@ -429,8 +430,7 @@ pub impl GatherLoanCtxt {
// if the scope is some block/expr in the // if the scope is some block/expr in the
// fn, then just require that this scope // fn, then just require that this scope
// be pure // be pure
let pure_map = self.req_maps.pure_map; self.req_maps.pure_map.insert(pure_id, *e);
pure_map.insert(pure_id, *e);
self.bccx.stats.req_pure_paths += 1; self.bccx.stats.req_pure_paths += 1;
debug!("requiring purity for scope %?", debug!("requiring purity for scope %?",
@ -575,12 +575,11 @@ pub impl GatherLoanCtxt {
match self.req_maps.req_loan_map.find(&scope_id) { match self.req_maps.req_loan_map.find(&scope_id) {
Some(req_loans) => { Some(req_loans) => {
req_loans.push_all(loans); req_loans.push_all(loans);
return;
} }
None => { None => {}
let req_loan_map = self.req_maps.req_loan_map;
req_loan_map.insert(scope_id, @mut loans);
}
} }
self.req_maps.req_loan_map.insert(scope_id, @mut loans);
} }
fn gather_pat(@mut self, fn gather_pat(@mut self,
@ -683,7 +682,7 @@ fn add_stmt_to_map(stmt: @ast::stmt,
vt: visit::vt<@mut GatherLoanCtxt>) { vt: visit::vt<@mut GatherLoanCtxt>) {
match stmt.node { match stmt.node {
ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => { ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => {
self.bccx.stmt_map.insert(id, ()); self.bccx.stmt_map.insert(id);
} }
_ => () _ => ()
} }

View file

@ -130,7 +130,7 @@ pub impl LoanContext {
} }
cat_local(local_id) | cat_arg(local_id) | cat_self(local_id) => { cat_local(local_id) | cat_arg(local_id) | cat_self(local_id) => {
// FIXME(#4903) // FIXME(#4903)
let local_scope_id = self.bccx.tcx.region_map.get(&local_id); let local_scope_id = *self.bccx.tcx.region_map.get(&local_id);
self.issue_loan(cmt, ty::re_scope(local_scope_id), loan_kind, self.issue_loan(cmt, ty::re_scope(local_scope_id), loan_kind,
owns_lent_data) owns_lent_data)
} }

View file

@ -234,10 +234,10 @@ use middle::moves;
use util::common::{indenter, stmt_set}; use util::common::{indenter, stmt_set};
use util::ppaux::note_and_explain_region; use util::ppaux::note_and_explain_region;
use core::hashmap::linear::{LinearSet, LinearMap};
use core::io; use core::io;
use core::result::{Result, Ok, Err}; use core::result::{Result, Ok, Err};
use core::to_bytes; use core::to_bytes;
use std::oldmap::{HashMap, Set};
use syntax::ast::{mutability, m_mutbl, m_imm, m_const}; use syntax::ast::{mutability, m_mutbl, m_imm, m_const};
use syntax::ast; use syntax::ast;
use syntax::codemap::span; use syntax::codemap::span;
@ -260,9 +260,9 @@ pub fn check_crate(
moves_map: moves_map, moves_map: moves_map,
capture_map: capture_map, capture_map: capture_map,
root_map: root_map(), root_map: root_map(),
mutbl_map: HashMap(), mutbl_map: @mut LinearSet::new(),
write_guard_map: HashMap(), write_guard_map: @mut LinearSet::new(),
stmt_map: HashMap(), stmt_map: @mut LinearSet::new(),
stats: @mut BorrowStats { stats: @mut BorrowStats {
loaned_paths_same: 0, loaned_paths_same: 0,
loaned_paths_imm: 0, loaned_paths_imm: 0,
@ -333,7 +333,7 @@ pub struct RootInfo {
// a map mapping id's of expressions of gc'd type (@T, @[], etc) where // a map mapping id's of expressions of gc'd type (@T, @[], etc) where
// the box needs to be kept live to the id of the scope for which they // the box needs to be kept live to the id of the scope for which they
// must stay live. // must stay live.
pub type root_map = HashMap<root_map_key, RootInfo>; pub type root_map = @mut LinearMap<root_map_key, RootInfo>;
// the keys to the root map combine the `id` of the expression with // the keys to the root map combine the `id` of the expression with
// the number of types that it is autodereferenced. So, for example, // the number of types that it is autodereferenced. So, for example,
@ -348,11 +348,11 @@ pub struct root_map_key {
// set of ids of local vars / formal arguments that are modified / moved. // set of ids of local vars / formal arguments that are modified / moved.
// this is used in trans for optimization purposes. // this is used in trans for optimization purposes.
pub type mutbl_map = HashMap<ast::node_id, ()>; pub type mutbl_map = @mut LinearSet<ast::node_id>;
// A set containing IDs of expressions of gc'd type that need to have a write // A set containing IDs of expressions of gc'd type that need to have a write
// guard. // guard.
pub type write_guard_map = HashMap<root_map_key, ()>; pub type write_guard_map = @mut LinearSet<root_map_key>;
// Errors that can occur // Errors that can occur
#[deriving(Eq)] #[deriving(Eq)]
@ -405,8 +405,8 @@ pub struct Loan {
/// - `pure_map`: map from block/expr that must be pure to the error message /// - `pure_map`: map from block/expr that must be pure to the error message
/// that should be reported if they are not pure /// that should be reported if they are not pure
pub struct ReqMaps { pub struct ReqMaps {
req_loan_map: HashMap<ast::node_id, @mut ~[Loan]>, req_loan_map: LinearMap<ast::node_id, @mut ~[Loan]>,
pure_map: HashMap<ast::node_id, bckerr> pure_map: LinearMap<ast::node_id, bckerr>
} }
pub fn save_and_restore<T:Copy,U>(save_and_restore_t: &mut T, pub fn save_and_restore<T:Copy,U>(save_and_restore_t: &mut T,
@ -450,7 +450,7 @@ impl to_bytes::IterBytes for root_map_key {
} }
pub fn root_map() -> root_map { pub fn root_map() -> root_map {
return HashMap(); return @mut LinearMap::new();
} }
// ___________________________________________________________________________ // ___________________________________________________________________________
@ -541,7 +541,7 @@ pub impl BorrowckCtxt {
fn add_to_mutbl_map(&self, cmt: cmt) { fn add_to_mutbl_map(&self, cmt: cmt) {
match cmt.cat { match cmt.cat {
cat_local(id) | cat_arg(id) => { cat_local(id) | cat_arg(id) => {
self.mutbl_map.insert(id, ()); self.mutbl_map.insert(id);
} }
cat_stack_upvar(cmt) => { cat_stack_upvar(cmt) => {
self.add_to_mutbl_map(cmt); self.add_to_mutbl_map(cmt);

View file

@ -108,7 +108,7 @@ pub impl<'self> PreserveCtxt<'self> {
// Maybe if we pass in the parent instead here, // Maybe if we pass in the parent instead here,
// we can prevent the "scope not found" error // we can prevent the "scope not found" error
debug!("scope_region thing: %? ", cmt.id); debug!("scope_region thing: %? ", cmt.id);
ty::re_scope(self.tcx().region_map.get(&cmt.id)) ty::re_scope(*self.tcx().region_map.get(&cmt.id))
}; };
self.compare_scope(cmt, scope_region) self.compare_scope(cmt, scope_region)
@ -128,14 +128,14 @@ pub impl<'self> PreserveCtxt<'self> {
cmt.span, cmt.span,
~"preserve() called with local and !root_managed_data"); ~"preserve() called with local and !root_managed_data");
} }
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = *self.tcx().region_map.get(&local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_binding(local_id) => { cat_binding(local_id) => {
// Bindings are these kind of weird implicit pointers (cc // Bindings are these kind of weird implicit pointers (cc
// #2329). We require (in gather_loans) that they be // #2329). We require (in gather_loans) that they be
// rooted in an immutable location. // rooted in an immutable location.
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = *self.tcx().region_map.get(&local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_arg(local_id) => { cat_arg(local_id) => {
@ -143,11 +143,11 @@ pub impl<'self> PreserveCtxt<'self> {
// modes). In that case, the caller guarantees stability // modes). In that case, the caller guarantees stability
// for at least the scope of the fn. This is basically a // for at least the scope of the fn. This is basically a
// deref of a region ptr. // deref of a region ptr.
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = *self.tcx().region_map.get(&local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_self(local_id) => { cat_self(local_id) => {
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = *self.tcx().region_map.get(&local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_comp(cmt_base, comp_field(*)) | cat_comp(cmt_base, comp_field(*)) |
@ -371,7 +371,7 @@ pub impl<'self> PreserveCtxt<'self> {
// scope_id;`. Though that would potentially re-introduce // scope_id;`. Though that would potentially re-introduce
// the ICE. See #3511 for more details. // the ICE. See #3511 for more details.
let scope_to_use = if let scope_to_use = if
self.bccx.stmt_map.contains_key(&scope_id) { self.bccx.stmt_map.contains(&scope_id) {
// Root it in its parent scope, b/c // Root it in its parent scope, b/c
// trans won't introduce a new scope for the // trans won't introduce a new scope for the
// stmt // stmt

View file

@ -124,12 +124,12 @@ pub fn check_expr(sess: Session,
items without type parameters"); items without type parameters");
} }
match def_map.find(&e.id) { match def_map.find(&e.id) {
Some(def_const(_)) | Some(&def_const(_)) |
Some(def_fn(_, _)) | Some(&def_fn(_, _)) |
Some(def_variant(_, _)) | Some(&def_variant(_, _)) |
Some(def_struct(_)) => { } Some(&def_struct(_)) => { }
Some(def) => { Some(&def) => {
debug!("(checking const) found bad def: %?", def); debug!("(checking const) found bad def: %?", def);
sess.span_err( sess.span_err(
e.span, e.span,
@ -143,8 +143,8 @@ pub fn check_expr(sess: Session,
} }
expr_call(callee, _, NoSugar) => { expr_call(callee, _, NoSugar) => {
match def_map.find(&callee.id) { match def_map.find(&callee.id) {
Some(def_struct(*)) => {} // OK. Some(&def_struct(*)) => {} // OK.
Some(def_variant(*)) => {} // OK. Some(&def_variant(*)) => {} // OK.
_ => { _ => {
sess.span_err( sess.span_err(
e.span, e.span,
@ -238,7 +238,7 @@ pub fn check_item_recursion(sess: Session,
match e.node { match e.node {
expr_path(*) => { expr_path(*) => {
match env.def_map.find(&e.id) { match env.def_map.find(&e.id) {
Some(def_const(def_id)) => { Some(&def_const(def_id)) => {
if ast_util::is_local(def_id) { if ast_util::is_local(def_id) {
match env.ast_map.get(&def_id.node) { match env.ast_map.get(&def_id.node) {
ast_map::node_item(it, _) => { ast_map::node_item(it, _) => {

View file

@ -55,7 +55,7 @@ pub fn expr_is_non_moving_lvalue(cx: @MatchCheckCtxt, expr: @expr) -> bool {
return false; return false;
} }
!cx.moves_map.contains_key(&expr.id) !cx.moves_map.contains(&expr.id)
} }
pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, &&s: (), v: visit::vt<()>) { pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, &&s: (), v: visit::vt<()>) {
@ -303,8 +303,8 @@ pub fn pat_ctor_id(cx: @MatchCheckCtxt, p: @pat) -> Option<ctor> {
pat_wild => { None } pat_wild => { None }
pat_ident(_, _, _) | pat_enum(_, _) => { pat_ident(_, _, _) | pat_enum(_, _) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(&pat.id) {
Some(def_variant(_, id)) => Some(variant(id)), Some(&def_variant(_, id)) => Some(variant(id)),
Some(def_const(did)) => { Some(&def_const(did)) => {
let const_expr = lookup_const_by_id(cx.tcx, did).get(); let const_expr = lookup_const_by_id(cx.tcx, did).get();
Some(val(eval_const_expr(cx.tcx, const_expr))) Some(val(eval_const_expr(cx.tcx, const_expr)))
} }
@ -317,7 +317,7 @@ pub fn pat_ctor_id(cx: @MatchCheckCtxt, p: @pat) -> Option<ctor> {
} }
pat_struct(*) => { pat_struct(*) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(&pat.id) {
Some(def_variant(_, id)) => Some(variant(id)), Some(&def_variant(_, id)) => Some(variant(id)),
_ => Some(single) _ => Some(single)
} }
} }
@ -339,7 +339,7 @@ pub fn is_wild(cx: @MatchCheckCtxt, p: @pat) -> bool {
pat_wild => { true } pat_wild => { true }
pat_ident(_, _, _) => { pat_ident(_, _, _) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(&pat.id) {
Some(def_variant(_, _)) | Some(def_const(*)) => { false } Some(&def_variant(_, _)) | Some(&def_const(*)) => { false }
_ => { true } _ => { true }
} }
} }
@ -490,14 +490,14 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
pat_ident(_, _, _) => { pat_ident(_, _, _) => {
match cx.tcx.def_map.find(&pat_id) { match cx.tcx.def_map.find(&pat_id) {
Some(def_variant(_, id)) => { Some(&def_variant(_, id)) => {
if variant(id) == ctor_id { if variant(id) == ctor_id {
Some(vec::from_slice(r.tail())) Some(vec::from_slice(r.tail()))
} else { } else {
None None
} }
} }
Some(def_const(did)) => { Some(&def_const(did)) => {
let const_expr = let const_expr =
lookup_const_by_id(cx.tcx, did).get(); lookup_const_by_id(cx.tcx, did).get();
let e_v = eval_const_expr(cx.tcx, const_expr); let e_v = eval_const_expr(cx.tcx, const_expr);
@ -527,7 +527,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
} }
pat_enum(_, args) => { pat_enum(_, args) => {
match cx.tcx.def_map.get(&pat_id) { match *cx.tcx.def_map.get(&pat_id) {
def_const(did) => { def_const(did) => {
let const_expr = let const_expr =
lookup_const_by_id(cx.tcx, did).get(); lookup_const_by_id(cx.tcx, did).get();
@ -569,7 +569,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
pat_struct(_, ref flds, _) => { pat_struct(_, ref flds, _) => {
// Is this a struct or an enum variant? // Is this a struct or an enum variant?
match cx.tcx.def_map.get(&pat_id) { match *cx.tcx.def_map.get(&pat_id) {
def_variant(_, variant_id) => { def_variant(_, variant_id) => {
if variant(variant_id) == ctor_id { if variant(variant_id) == ctor_id {
// FIXME #4731: Is this right? --pcw // FIXME #4731: Is this right? --pcw
@ -714,12 +714,12 @@ pub fn check_fn(cx: @MatchCheckCtxt,
pub fn is_refutable(cx: @MatchCheckCtxt, pat: &pat) -> bool { pub fn is_refutable(cx: @MatchCheckCtxt, pat: &pat) -> bool {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(&pat.id) {
Some(def_variant(enum_id, _)) => { Some(&def_variant(enum_id, _)) => {
if vec::len(*ty::enum_variants(cx.tcx, enum_id)) != 1u { if vec::len(*ty::enum_variants(cx.tcx, enum_id)) != 1u {
return true; return true;
} }
} }
Some(def_const(*)) => return true, Some(&def_const(*)) => return true,
_ => () _ => ()
} }
@ -766,7 +766,7 @@ pub fn check_legality_of_move_bindings(cx: @MatchCheckCtxt,
by_ref_span = Some(span); by_ref_span = Some(span);
} }
bind_infer => { bind_infer => {
if cx.moves_map.contains_key(&id) { if cx.moves_map.contains(&id) {
any_by_move = true; any_by_move = true;
} }
} }
@ -806,7 +806,7 @@ pub fn check_legality_of_move_bindings(cx: @MatchCheckCtxt,
if pat_is_binding(def_map, p) { if pat_is_binding(def_map, p) {
match p.node { match p.node {
pat_ident(_, _, sub) => { pat_ident(_, _, sub) => {
if cx.moves_map.contains_key(&p.id) { if cx.moves_map.contains(&p.id) {
check_move(p, sub); check_move(p, sub);
} }
} }
@ -832,7 +832,7 @@ pub fn check_legality_of_move_bindings(cx: @MatchCheckCtxt,
behind_bad_pointer); behind_bad_pointer);
if behind_bad_pointer && if behind_bad_pointer &&
cx.moves_map.contains_key(&pat.id) cx.moves_map.contains(&pat.id)
{ {
cx.tcx.sess.span_err( cx.tcx.sess.span_err(
pat.span, pat.span,

View file

@ -12,16 +12,16 @@ use core::prelude::*;
use metadata::csearch; use metadata::csearch;
use middle::astencode; use middle::astencode;
use middle::resolve;
use middle::ty; use middle::ty;
use middle; use middle;
use core::float; use core::float;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::{ast, ast_map, ast_util, visit}; use syntax::{ast, ast_map, ast_util, visit};
use syntax::ast::*; use syntax::ast::*;
use std::oldmap::HashMap; use core::hashmap::linear::{LinearMap, LinearSet};
// //
// This pass classifies expressions by their constant-ness. // This pass classifies expressions by their constant-ness.
@ -77,12 +77,11 @@ pub fn join_all(cs: &[constness]) -> constness {
} }
pub fn classify(e: @expr, pub fn classify(e: @expr,
def_map: resolve::DefMap,
tcx: ty::ctxt) tcx: ty::ctxt)
-> constness { -> constness {
let did = ast_util::local_def(e.id); let did = ast_util::local_def(e.id);
match tcx.ccache.find(&did) { match tcx.ccache.find(&did) {
Some(x) => x, Some(&x) => x,
None => { None => {
let cn = let cn =
match e.node { match e.node {
@ -97,23 +96,23 @@ pub fn classify(e: @expr,
ast::expr_copy(inner) | ast::expr_copy(inner) |
ast::expr_unary(_, inner) | ast::expr_unary(_, inner) |
ast::expr_paren(inner) => { ast::expr_paren(inner) => {
classify(inner, def_map, tcx) classify(inner, tcx)
} }
ast::expr_binary(_, a, b) => { ast::expr_binary(_, a, b) => {
join(classify(a, def_map, tcx), join(classify(a, tcx),
classify(b, def_map, tcx)) classify(b, tcx))
} }
ast::expr_tup(ref es) | ast::expr_tup(ref es) |
ast::expr_vec(ref es, ast::m_imm) => { ast::expr_vec(ref es, ast::m_imm) => {
join_all(vec::map(*es, |e| classify(*e, def_map, tcx))) join_all(vec::map(*es, |e| classify(*e, tcx)))
} }
ast::expr_vstore(e, vstore) => { ast::expr_vstore(e, vstore) => {
match vstore { match vstore {
ast::expr_vstore_fixed(_) | ast::expr_vstore_fixed(_) |
ast::expr_vstore_slice => classify(e, def_map, tcx), ast::expr_vstore_slice => classify(e, tcx),
ast::expr_vstore_uniq | ast::expr_vstore_uniq |
ast::expr_vstore_box | ast::expr_vstore_box |
ast::expr_vstore_mut_box | ast::expr_vstore_mut_box |
@ -124,7 +123,7 @@ pub fn classify(e: @expr,
ast::expr_struct(_, ref fs, None) => { ast::expr_struct(_, ref fs, None) => {
let cs = do vec::map((*fs)) |f| { let cs = do vec::map((*fs)) |f| {
if f.node.mutbl == ast::m_imm { if f.node.mutbl == ast::m_imm {
classify(f.node.expr, def_map, tcx) classify(f.node.expr, tcx)
} else { } else {
non_const non_const
} }
@ -134,7 +133,7 @@ pub fn classify(e: @expr,
ast::expr_cast(base, _) => { ast::expr_cast(base, _) => {
let ty = ty::expr_ty(tcx, e); let ty = ty::expr_ty(tcx, e);
let base = classify(base, def_map, tcx); let base = classify(base, tcx);
if ty::type_is_integral(ty) { if ty::type_is_integral(ty) {
join(integral_const, base) join(integral_const, base)
} else if ty::type_is_fp(ty) { } else if ty::type_is_fp(ty) {
@ -145,16 +144,16 @@ pub fn classify(e: @expr,
} }
ast::expr_field(base, _, _) => { ast::expr_field(base, _, _) => {
classify(base, def_map, tcx) classify(base, tcx)
} }
ast::expr_index(base, idx) => { ast::expr_index(base, idx) => {
join(classify(base, def_map, tcx), join(classify(base, tcx),
classify(idx, def_map, tcx)) classify(idx, tcx))
} }
ast::expr_addr_of(ast::m_imm, base) => { ast::expr_addr_of(ast::m_imm, base) => {
classify(base, def_map, tcx) classify(base, tcx)
} }
// FIXME: (#3728) we can probably do something CCI-ish // FIXME: (#3728) we can probably do something CCI-ish
@ -173,7 +172,7 @@ pub fn classify(e: @expr,
pub fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> { pub fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> {
match tcx.def_map.find(&e.id) { match tcx.def_map.find(&e.id) {
Some(ast::def_const(def_id)) => lookup_const_by_id(tcx, def_id), Some(&ast::def_const(def_id)) => lookup_const_by_id(tcx, def_id),
_ => None _ => None
} }
} }
@ -192,14 +191,14 @@ pub fn lookup_const_by_id(tcx: ty::ctxt,
} }
} else { } else {
let maps = astencode::Maps { let maps = astencode::Maps {
mutbl_map: HashMap(), mutbl_map: @mut LinearSet::new(),
root_map: HashMap(), root_map: @mut LinearMap::new(),
last_use_map: HashMap(), last_use_map: @mut LinearMap::new(),
method_map: HashMap(), method_map: HashMap(),
vtable_map: HashMap(), vtable_map: @mut LinearMap::new(),
write_guard_map: HashMap(), write_guard_map: @mut LinearSet::new(),
moves_map: HashMap(), moves_map: @mut LinearSet::new(),
capture_map: HashMap() capture_map: @mut LinearMap::new()
}; };
match csearch::maybe_get_item_ast(tcx, def_id, match csearch::maybe_get_item_ast(tcx, def_id,
|a, b, c, d| astencode::decode_inlined_item(a, b, maps, /*bar*/ copy c, d)) { |a, b, c, d| astencode::decode_inlined_item(a, b, maps, /*bar*/ copy c, d)) {
@ -227,10 +226,9 @@ pub fn lookup_constness(tcx: ty::ctxt, e: @expr) -> constness {
} }
pub fn process_crate(crate: @ast::crate, pub fn process_crate(crate: @ast::crate,
def_map: resolve::DefMap,
tcx: ty::ctxt) { tcx: ty::ctxt) {
let v = visit::mk_simple_visitor(@visit::SimpleVisitor { let v = visit::mk_simple_visitor(@visit::SimpleVisitor {
visit_expr_post: |e| { classify(e, def_map, tcx); }, visit_expr_post: |e| { classify(e, tcx); },
.. *visit::default_simple_visitor() .. *visit::default_simple_visitor()
}); });
visit::visit_crate(*crate, (), v); visit::visit_crate(*crate, (), v);

View file

@ -12,13 +12,12 @@
// A pass that annotates for each loops and functions with the free // A pass that annotates for each loops and functions with the free
// variables that they contain. // variables that they contain.
use core::prelude::*;
use middle::resolve; use middle::resolve;
use middle::ty; use middle::ty;
use core::int; use core::hashmap::linear::LinearMap;
use core::option::*;
use core::vec;
use std::oldmap::*;
use syntax::codemap::span; use syntax::codemap::span;
use syntax::{ast, ast_util, visit}; use syntax::{ast, ast_util, visit};
@ -31,7 +30,7 @@ pub struct freevar_entry {
span: span //< First span where it is accessed (there can be multiple) span: span //< First span where it is accessed (there can be multiple)
} }
pub type freevar_info = @~[@freevar_entry]; pub type freevar_info = @~[@freevar_entry];
pub type freevar_map = HashMap<ast::node_id, freevar_info>; pub type freevar_map = @mut LinearMap<ast::node_id, freevar_info>;
// Searches through part of the AST for all references to locals or // Searches through part of the AST for all references to locals or
// upvars in this frame and returns the list of definition IDs thus found. // upvars in this frame and returns the list of definition IDs thus found.
@ -40,7 +39,7 @@ pub type freevar_map = HashMap<ast::node_id, freevar_info>;
// in order to start the search. // in order to start the search.
fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk) fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk)
-> freevar_info { -> freevar_info {
let seen = HashMap(); let seen = @mut LinearMap::new();
let refs = @mut ~[]; let refs = @mut ~[];
fn ignore_item(_i: @ast::item, &&_depth: int, _v: visit::vt<int>) { } fn ignore_item(_i: @ast::item, &&_depth: int, _v: visit::vt<int>) { }
@ -53,7 +52,7 @@ fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk)
let mut i = 0; let mut i = 0;
match def_map.find(&expr.id) { match def_map.find(&expr.id) {
None => fail!(~"path not found"), None => fail!(~"path not found"),
Some(df) => { Some(&df) => {
let mut def = df; let mut def = df;
while i < depth { while i < depth {
match def { match def {
@ -93,7 +92,7 @@ fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk)
// one pass. This could be improved upon if it turns out to matter. // one pass. This could be improved upon if it turns out to matter.
pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) -> pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) ->
freevar_map { freevar_map {
let freevars = HashMap(); let freevars = @mut LinearMap::new();
let walk_fn: @fn(&visit::fn_kind, let walk_fn: @fn(&visit::fn_kind,
&ast::fn_decl, &ast::fn_decl,
@ -116,7 +115,7 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) ->
pub fn get_freevars(tcx: ty::ctxt, fid: ast::node_id) -> freevar_info { pub fn get_freevars(tcx: ty::ctxt, fid: ast::node_id) -> freevar_info {
match tcx.freevars.find(&fid) { match tcx.freevars.find(&fid) {
None => fail!(~"get_freevars: "+int::to_str(fid)+~" has no freevars"), None => fail!(~"get_freevars: "+int::to_str(fid)+~" has no freevars"),
Some(d) => return d Some(&d) => return d
} }
} }

View file

@ -18,9 +18,6 @@ use middle::ty;
use middle::typeck; use middle::typeck;
use util::ppaux::{ty_to_str, tys_to_str}; use util::ppaux::{ty_to_str, tys_to_str};
use core::str;
use core::vec;
use std::oldmap::HashMap;
use syntax::ast::*; use syntax::ast::*;
use syntax::attr::attrs_contains_name; use syntax::attr::attrs_contains_name;
use syntax::codemap::{span, spanned}; use syntax::codemap::{span, spanned};
@ -58,8 +55,6 @@ use syntax::{visit, ast_util};
pub static try_adding: &'static str = "Try adding a move"; pub static try_adding: &'static str = "Try adding a move";
pub type rval_map = HashMap<node_id, ()>;
pub struct Context { pub struct Context {
tcx: ty::ctxt, tcx: ty::ctxt,
method_map: typeck::method_map, method_map: typeck::method_map,
@ -133,13 +128,13 @@ fn check_item(item: @item, cx: Context, visitor: visit::vt<Context>) {
item_impl(_, Some(trait_ref), self_type, _) => { item_impl(_, Some(trait_ref), self_type, _) => {
match cx.tcx.def_map.find(&trait_ref.ref_id) { match cx.tcx.def_map.find(&trait_ref.ref_id) {
None => cx.tcx.sess.bug(~"trait ref not in def map!"), None => cx.tcx.sess.bug(~"trait ref not in def map!"),
Some(trait_def) => { Some(&trait_def) => {
let trait_def_id = ast_util::def_id_of_def(trait_def); let trait_def_id = ast_util::def_id_of_def(trait_def);
if cx.tcx.lang_items.drop_trait() == trait_def_id { if cx.tcx.lang_items.drop_trait() == trait_def_id {
// Yes, it's a destructor. // Yes, it's a destructor.
match self_type.node { match self_type.node {
ty_path(_, path_node_id) => { ty_path(_, path_node_id) => {
let struct_def = cx.tcx.def_map.get( let struct_def = *cx.tcx.def_map.get(
&path_node_id); &path_node_id);
let struct_did = let struct_did =
ast_util::def_id_of_def(struct_def); ast_util::def_id_of_def(struct_def);
@ -281,7 +276,7 @@ pub fn check_expr(e: @expr, cx: Context, v: visit::vt<Context>) {
for cx.tcx.node_type_substs.find(&type_parameter_id).each |ts| { for cx.tcx.node_type_substs.find(&type_parameter_id).each |ts| {
let bounds = match e.node { let bounds = match e.node {
expr_path(_) => { expr_path(_) => {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get(&e.id)); let did = ast_util::def_id_of_def(*cx.tcx.def_map.get(&e.id));
ty::lookup_item_type(cx.tcx, did).bounds ty::lookup_item_type(cx.tcx, did).bounds
} }
_ => { _ => {
@ -340,7 +335,7 @@ fn check_ty(aty: @Ty, cx: Context, v: visit::vt<Context>) {
match aty.node { match aty.node {
ty_path(_, id) => { ty_path(_, id) => {
for cx.tcx.node_type_substs.find(&id).each |ts| { for cx.tcx.node_type_substs.find(&id).each |ts| {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get(&id)); let did = ast_util::def_id_of_def(*cx.tcx.def_map.get(&id));
let bounds = ty::lookup_item_type(cx.tcx, did).bounds; let bounds = ty::lookup_item_type(cx.tcx, did).bounds;
for vec::each2(*ts, *bounds) |ty, bound| { for vec::each2(*ts, *bounds) |ty, bound| {
check_bounds(cx, aty.id, aty.span, *ty, *bound) check_bounds(cx, aty.id, aty.span, *ty, *bound)
@ -405,7 +400,7 @@ pub fn check_bounds(cx: Context,
fn is_nullary_variant(cx: Context, ex: @expr) -> bool { fn is_nullary_variant(cx: Context, ex: @expr) -> bool {
match ex.node { match ex.node {
expr_path(_) => { expr_path(_) => {
match cx.tcx.def_map.get(&ex.id) { match *cx.tcx.def_map.get(&ex.id) {
def_variant(edid, vdid) => { def_variant(edid, vdid) => {
vec::len(ty::enum_variant_with_id(cx.tcx, edid, vdid).args) == 0u vec::len(ty::enum_variant_with_id(cx.tcx, edid, vdid).args) == 0u
} }

View file

@ -31,8 +31,8 @@ use syntax::ast_util::{local_def};
use syntax::visit::{default_simple_visitor, mk_simple_visitor, SimpleVisitor}; use syntax::visit::{default_simple_visitor, mk_simple_visitor, SimpleVisitor};
use syntax::visit::{visit_crate, visit_item}; use syntax::visit::{visit_crate, visit_item};
use core::hashmap::linear::LinearMap;
use core::ptr; use core::ptr;
use std::oldmap::HashMap;
pub enum LangItem { pub enum LangItem {
ConstTraitLangItem, // 0 ConstTraitLangItem, // 0
@ -259,7 +259,7 @@ fn LanguageItemCollector<'r>(crate: @crate,
session: Session, session: Session,
items: &'r mut LanguageItems) items: &'r mut LanguageItems)
-> LanguageItemCollector<'r> { -> LanguageItemCollector<'r> {
let item_refs = HashMap(); let mut item_refs = LinearMap::new();
item_refs.insert(@~"const", ConstTraitLangItem as uint); item_refs.insert(@~"const", ConstTraitLangItem as uint);
item_refs.insert(@~"copy", CopyTraitLangItem as uint); item_refs.insert(@~"copy", CopyTraitLangItem as uint);
@ -317,7 +317,7 @@ struct LanguageItemCollector {
crate: @crate, crate: @crate,
session: Session, session: Session,
item_refs: HashMap<@~str, uint>, item_refs: LinearMap<@~str, uint>,
} }
pub impl<'self> LanguageItemCollector<'self> { pub impl<'self> LanguageItemCollector<'self> {
@ -362,7 +362,7 @@ pub impl<'self> LanguageItemCollector<'self> {
None => { None => {
// Didn't match. // Didn't match.
} }
Some(item_index) => { Some(&item_index) => {
self.collect_item(item_index, item_def_id) self.collect_item(item_index, item_def_id)
} }
} }
@ -397,7 +397,7 @@ pub impl<'self> LanguageItemCollector<'self> {
} }
fn check_completeness(&self) { fn check_completeness(&self) {
for self.item_refs.each |&key, &item_ref| { for self.item_refs.each |&(&key, &item_ref)| {
match self.items.items[item_ref] { match self.items.items[item_ref] {
None => { None => {
self.session.err(fmt!("no item found for `%s`", *key)); self.session.err(fmt!("no item found for `%s`", *key));

View file

@ -15,6 +15,7 @@ use driver::session;
use middle::ty; use middle::ty;
use util::ppaux::{ty_to_str}; use util::ppaux::{ty_to_str};
use core::hashmap::linear::LinearMap;
use core::char; use core::char;
use core::cmp; use core::cmp;
use core::either; use core::either;
@ -30,8 +31,6 @@ use core::u32;
use core::u64; use core::u64;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::{Map, HashMap};
use std::oldmap;
use std::smallintmap::SmallIntMap; use std::smallintmap::SmallIntMap;
use syntax::attr; use syntax::attr;
use syntax::codemap::span; use syntax::codemap::span;
@ -110,7 +109,7 @@ struct LintSpec {
default: level default: level
} }
pub type LintDict = HashMap<@~str, @LintSpec>; pub type LintDict = @LinearMap<~str, LintSpec>;
/* /*
Pass names should not contain a '-', as the compiler normalizes Pass names should not contain a '-', as the compiler normalizes
@ -118,142 +117,142 @@ pub type LintDict = HashMap<@~str, @LintSpec>;
*/ */
pub fn get_lint_dict() -> LintDict { pub fn get_lint_dict() -> LintDict {
let v = ~[ let v = ~[
(@~"ctypes", (~"ctypes",
@LintSpec { LintSpec {
lint: ctypes, lint: ctypes,
desc: "proper use of core::libc types in foreign modules", desc: "proper use of core::libc types in foreign modules",
default: warn default: warn
}), }),
(@~"unused_imports", (~"unused_imports",
@LintSpec { LintSpec {
lint: unused_imports, lint: unused_imports,
desc: "imports that are never used", desc: "imports that are never used",
default: warn default: warn
}), }),
(@~"while_true", (~"while_true",
@LintSpec { LintSpec {
lint: while_true, lint: while_true,
desc: "suggest using loop { } instead of while(true) { }", desc: "suggest using loop { } instead of while(true) { }",
default: warn default: warn
}), }),
(@~"path_statement", (~"path_statement",
@LintSpec { LintSpec {
lint: path_statement, lint: path_statement,
desc: "path statements with no effect", desc: "path statements with no effect",
default: warn default: warn
}), }),
(@~"unrecognized_lint", (~"unrecognized_lint",
@LintSpec { LintSpec {
lint: unrecognized_lint, lint: unrecognized_lint,
desc: "unrecognized lint attribute", desc: "unrecognized lint attribute",
default: warn default: warn
}), }),
(@~"non_implicitly_copyable_typarams", (~"non_implicitly_copyable_typarams",
@LintSpec { LintSpec {
lint: non_implicitly_copyable_typarams, lint: non_implicitly_copyable_typarams,
desc: "passing non implicitly copyable types as copy type params", desc: "passing non implicitly copyable types as copy type params",
default: warn default: warn
}), }),
(@~"vecs_implicitly_copyable", (~"vecs_implicitly_copyable",
@LintSpec { LintSpec {
lint: vecs_implicitly_copyable, lint: vecs_implicitly_copyable,
desc: "make vecs and strs not implicitly copyable \ desc: "make vecs and strs not implicitly copyable \
(only checked at top level)", (only checked at top level)",
default: warn default: warn
}), }),
(@~"implicit_copies", (~"implicit_copies",
@LintSpec { LintSpec {
lint: implicit_copies, lint: implicit_copies,
desc: "implicit copies of non implicitly copyable data", desc: "implicit copies of non implicitly copyable data",
default: warn default: warn
}), }),
(@~"deprecated_mode", (~"deprecated_mode",
@LintSpec { LintSpec {
lint: deprecated_mode, lint: deprecated_mode,
desc: "warn about deprecated uses of modes", desc: "warn about deprecated uses of modes",
default: warn default: warn
}), }),
(@~"foreign_mode", (~"foreign_mode",
@LintSpec { LintSpec {
lint: foreign_mode, lint: foreign_mode,
desc: "warn about deprecated uses of modes in foreign fns", desc: "warn about deprecated uses of modes in foreign fns",
default: warn default: warn
}), }),
(@~"deprecated_pattern", (~"deprecated_pattern",
@LintSpec { LintSpec {
lint: deprecated_pattern, lint: deprecated_pattern,
desc: "warn about deprecated uses of pattern bindings", desc: "warn about deprecated uses of pattern bindings",
default: allow default: allow
}), }),
(@~"non_camel_case_types", (~"non_camel_case_types",
@LintSpec { LintSpec {
lint: non_camel_case_types, lint: non_camel_case_types,
desc: "types, variants and traits should have camel case names", desc: "types, variants and traits should have camel case names",
default: allow default: allow
}), }),
(@~"managed_heap_memory", (~"managed_heap_memory",
@LintSpec { LintSpec {
lint: managed_heap_memory, lint: managed_heap_memory,
desc: "use of managed (@ type) heap memory", desc: "use of managed (@ type) heap memory",
default: allow default: allow
}), }),
(@~"owned_heap_memory", (~"owned_heap_memory",
@LintSpec { LintSpec {
lint: owned_heap_memory, lint: owned_heap_memory,
desc: "use of owned (~ type) heap memory", desc: "use of owned (~ type) heap memory",
default: allow default: allow
}), }),
(@~"heap_memory", (~"heap_memory",
@LintSpec { LintSpec {
lint: heap_memory, lint: heap_memory,
desc: "use of any (~ type or @ type) heap memory", desc: "use of any (~ type or @ type) heap memory",
default: allow default: allow
}), }),
(@~"legacy modes", (~"legacy modes",
@LintSpec { LintSpec {
lint: legacy_modes, lint: legacy_modes,
desc: "allow legacy modes", desc: "allow legacy modes",
default: forbid default: forbid
}), }),
(@~"type_limits", (~"type_limits",
@LintSpec { LintSpec {
lint: type_limits, lint: type_limits,
desc: "comparisons made useless by limits of the types involved", desc: "comparisons made useless by limits of the types involved",
default: warn default: warn
}), }),
(@~"default_methods", (~"default_methods",
@LintSpec { LintSpec {
lint: default_methods, lint: default_methods,
desc: "allow default methods", desc: "allow default methods",
default: deny default: deny
}), }),
(@~"deprecated_mutable_fields", (~"deprecated_mutable_fields",
@LintSpec { LintSpec {
lint: deprecated_mutable_fields, lint: deprecated_mutable_fields,
desc: "deprecated mutable fields in structures", desc: "deprecated mutable fields in structures",
default: deny default: deny
}), }),
(@~"deprecated_drop", (~"deprecated_drop",
@LintSpec { LintSpec {
lint: deprecated_drop, lint: deprecated_drop,
desc: "deprecated \"drop\" notation for the destructor", desc: "deprecated \"drop\" notation for the destructor",
default: deny default: deny
@ -275,12 +274,16 @@ pub fn get_lint_dict() -> LintDict {
}), }),
*/ */
]; ];
oldmap::hash_from_vec(v) let mut map = LinearMap::new();
do vec::consume(v) |_, (k, v)| {
map.insert(k, v);
}
return @map;
} }
// This is a highly not-optimal set of data structure decisions. // This is a highly not-optimal set of data structure decisions.
type LintModes = @mut SmallIntMap<level>; type LintModes = @mut SmallIntMap<level>;
type LintModeMap = HashMap<ast::node_id, LintModes>; type LintModeMap = @mut LinearMap<ast::node_id, LintModes>;
// settings_map maps node ids of items with non-default lint settings // settings_map maps node ids of items with non-default lint settings
// to their settings; default_settings contains the settings for everything // to their settings; default_settings contains the settings for everything
@ -293,7 +296,7 @@ pub struct LintSettings {
pub fn mk_lint_settings() -> LintSettings { pub fn mk_lint_settings() -> LintSettings {
LintSettings { LintSettings {
default_settings: @mut SmallIntMap::new(), default_settings: @mut SmallIntMap::new(),
settings_map: HashMap() settings_map: @mut LinearMap::new()
} }
} }
@ -310,7 +313,7 @@ pub fn get_lint_settings_level(settings: LintSettings,
item_id: ast::node_id) item_id: ast::node_id)
-> level { -> level {
match settings.settings_map.find(&item_id) { match settings.settings_map.find(&item_id) {
Some(modes) => get_lint_level(modes, lint_mode), Some(&modes) => get_lint_level(modes, lint_mode),
None => get_lint_level(settings.default_settings, lint_mode) None => get_lint_level(settings.default_settings, lint_mode)
} }
} }
@ -392,7 +395,7 @@ pub impl Context {
(ref meta, level, lintname) => (meta, level, lintname) (ref meta, level, lintname) => (meta, level, lintname)
}; };
match self.dict.find(&lintname) { match self.dict.find(lintname) {
None => { None => {
self.span_lint( self.span_lint(
new_ctxt.get_level(unrecognized_lint), new_ctxt.get_level(unrecognized_lint),
@ -735,7 +738,7 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) {
for vec::each(vec::append_one(tys, decl.output)) |ty| { for vec::each(vec::append_one(tys, decl.output)) |ty| {
match ty.node { match ty.node {
ast::ty_path(_, id) => { ast::ty_path(_, id) => {
match cx.def_map.get(&id) { match *cx.def_map.get(&id) {
ast::def_prim_ty(ast::ty_int(ast::ty_i)) => { ast::def_prim_ty(ast::ty_int(ast::ty_i)) => {
cx.sess.span_lint( cx.sess.span_lint(
ctypes, id, fn_id, ctypes, id, fn_id,

View file

@ -112,13 +112,13 @@ use middle::moves;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use core::cmp; use core::cmp;
use core::hashmap::linear::LinearMap;
use core::io::WriterUtil; use core::io::WriterUtil;
use core::io; use core::io;
use core::ptr; use core::ptr;
use core::to_str; use core::to_str;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast::*; use syntax::ast::*;
use syntax::codemap::span; use syntax::codemap::span;
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
@ -135,7 +135,7 @@ use syntax::{visit, ast_util};
// //
// Very subtle (#2633): borrowck will remove entries from this table // Very subtle (#2633): borrowck will remove entries from this table
// if it detects an outstanding loan (that is, the addr is taken). // if it detects an outstanding loan (that is, the addr is taken).
pub type last_use_map = HashMap<node_id, @mut ~[node_id]>; pub type last_use_map = @mut LinearMap<node_id, @mut ~[node_id]>;
struct Variable(uint); struct Variable(uint);
struct LiveNode(uint); struct LiveNode(uint);
@ -212,7 +212,7 @@ pub fn check_crate(tcx: ty::ctxt,
.. *visit::default_visitor() .. *visit::default_visitor()
}); });
let last_use_map = HashMap(); let last_use_map = @mut LinearMap::new();
let initial_maps = @mut IrMaps(tcx, let initial_maps = @mut IrMaps(tcx,
method_map, method_map,
variable_moves_map, variable_moves_map,
@ -304,9 +304,9 @@ struct IrMaps {
num_live_nodes: uint, num_live_nodes: uint,
num_vars: uint, num_vars: uint,
live_node_map: HashMap<node_id, LiveNode>, live_node_map: LinearMap<node_id, LiveNode>,
variable_map: HashMap<node_id, Variable>, variable_map: LinearMap<node_id, Variable>,
capture_info_map: HashMap<node_id, @~[CaptureInfo]>, capture_info_map: LinearMap<node_id, @~[CaptureInfo]>,
var_kinds: ~[VarKind], var_kinds: ~[VarKind],
lnks: ~[LiveNodeKind], lnks: ~[LiveNodeKind],
} }
@ -325,9 +325,9 @@ fn IrMaps(tcx: ty::ctxt,
last_use_map: last_use_map, last_use_map: last_use_map,
num_live_nodes: 0, num_live_nodes: 0,
num_vars: 0, num_vars: 0,
live_node_map: HashMap(), live_node_map: LinearMap::new(),
variable_map: HashMap(), variable_map: LinearMap::new(),
capture_info_map: HashMap(), capture_info_map: LinearMap::new(),
var_kinds: ~[], var_kinds: ~[],
lnks: ~[] lnks: ~[]
} }
@ -374,7 +374,7 @@ pub impl IrMaps {
fn variable(&mut self, node_id: node_id, span: span) -> Variable { fn variable(&mut self, node_id: node_id, span: span) -> Variable {
match self.variable_map.find(&node_id) { match self.variable_map.find(&node_id) {
Some(var) => var, Some(&var) => var,
None => { None => {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
span, fmt!("No variable registered for id %d", node_id)); span, fmt!("No variable registered for id %d", node_id));
@ -396,7 +396,7 @@ pub impl IrMaps {
fn captures(&mut self, expr: @expr) -> @~[CaptureInfo] { fn captures(&mut self, expr: @expr) -> @~[CaptureInfo] {
match self.capture_info_map.find(&expr.id) { match self.capture_info_map.find(&expr.id) {
Some(caps) => caps, Some(&caps) => caps,
None => { None => {
self.tcx.sess.span_bug(expr.span, ~"no registered caps"); self.tcx.sess.span_bug(expr.span, ~"no registered caps");
} }
@ -416,7 +416,7 @@ pub impl IrMaps {
Local(LocalInfo {id: id, kind: FromLetWithInitializer, _}) | Local(LocalInfo {id: id, kind: FromLetWithInitializer, _}) |
Local(LocalInfo {id: id, kind: FromMatch(_), _}) => { Local(LocalInfo {id: id, kind: FromMatch(_), _}) => {
let v = match self.last_use_map.find(&expr_id) { let v = match self.last_use_map.find(&expr_id) {
Some(v) => v, Some(&v) => v,
None => { None => {
let v = @mut ~[]; let v = @mut ~[];
self.last_use_map.insert(expr_id, v); self.last_use_map.insert(expr_id, v);
@ -562,7 +562,7 @@ fn visit_expr(expr: @expr, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
match expr.node { match expr.node {
// live nodes required for uses or definitions of variables: // live nodes required for uses or definitions of variables:
expr_path(_) => { expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = *self.tcx.def_map.get(&expr.id);
debug!("expr %d: path that leads to %?", expr.id, def); debug!("expr %d: path that leads to %?", expr.id, def);
if relevant_def(def).is_some() { if relevant_def(def).is_some() {
self.add_live_node_for_node(expr.id, ExprNode(expr.span)); self.add_live_node_for_node(expr.id, ExprNode(expr.span));
@ -657,7 +657,7 @@ static ACC_READ: uint = 1u;
static ACC_WRITE: uint = 2u; static ACC_WRITE: uint = 2u;
static ACC_USE: uint = 4u; static ACC_USE: uint = 4u;
type LiveNodeMap = HashMap<node_id, LiveNode>; type LiveNodeMap = @mut LinearMap<node_id, LiveNode>;
struct Liveness { struct Liveness {
tcx: ty::ctxt, tcx: ty::ctxt,
@ -684,15 +684,16 @@ fn Liveness(ir: @mut IrMaps, specials: Specials) -> Liveness {
users: @mut vec::from_elem(ir.num_live_nodes * ir.num_vars, users: @mut vec::from_elem(ir.num_live_nodes * ir.num_vars,
invalid_users()), invalid_users()),
loop_scope: @mut ~[], loop_scope: @mut ~[],
break_ln: HashMap(), break_ln: @mut LinearMap::new(),
cont_ln: HashMap() cont_ln: @mut LinearMap::new()
} }
} }
pub impl Liveness { pub impl Liveness {
fn live_node(&self, node_id: node_id, span: span) -> LiveNode { fn live_node(&self, node_id: node_id, span: span) -> LiveNode {
match self.ir.live_node_map.find(&node_id) { let ir: &mut IrMaps = self.ir;
Some(ln) => ln, match ir.live_node_map.find(&node_id) {
Some(&ln) => ln,
None => { None => {
// This must be a mismatch between the ir_map construction // This must be a mismatch between the ir_map construction
// above and the propagation code below; the two sets of // above and the propagation code below; the two sets of
@ -708,7 +709,7 @@ pub impl Liveness {
fn variable_from_path(&self, expr: @expr) -> Option<Variable> { fn variable_from_path(&self, expr: @expr) -> Option<Variable> {
match expr.node { match expr.node {
expr_path(_) => { expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = *self.tcx.def_map.get(&expr.id);
relevant_def(def).map( relevant_def(def).map(
|rdef| self.variable(*rdef, expr.span) |rdef| self.variable(*rdef, expr.span)
) )
@ -724,7 +725,7 @@ pub impl Liveness {
fn variable_from_def_map(&self, node_id: node_id, fn variable_from_def_map(&self, node_id: node_id,
span: span) -> Option<Variable> { span: span) -> Option<Variable> {
match self.tcx.def_map.find(&node_id) { match self.tcx.def_map.find(&node_id) {
Some(def) => { Some(&def) => {
relevant_def(def).map( relevant_def(def).map(
|rdef| self.variable(*rdef, span) |rdef| self.variable(*rdef, span)
) )
@ -845,7 +846,7 @@ pub impl Liveness {
Some(_) => // Refers to a labeled loop. Use the results of resolve Some(_) => // Refers to a labeled loop. Use the results of resolve
// to find with one // to find with one
match self.tcx.def_map.find(&id) { match self.tcx.def_map.find(&id) {
Some(def_label(loop_id)) => loop_id, Some(&def_label(loop_id)) => loop_id,
_ => self.tcx.sess.span_bug(sp, ~"Label on break/loop \ _ => self.tcx.sess.span_bug(sp, ~"Label on break/loop \
doesn't refer to a loop") doesn't refer to a loop")
}, },
@ -1226,7 +1227,7 @@ pub impl Liveness {
// look it up in the break loop nodes table // look it up in the break loop nodes table
match self.break_ln.find(&sc) { match self.break_ln.find(&sc) {
Some(b) => b, Some(&b) => b,
None => self.tcx.sess.span_bug(expr.span, None => self.tcx.sess.span_bug(expr.span,
~"Break to unknown label") ~"Break to unknown label")
} }
@ -1240,7 +1241,7 @@ pub impl Liveness {
// look it up in the continue loop nodes table // look it up in the continue loop nodes table
match self.cont_ln.find(&sc) { match self.cont_ln.find(&sc) {
Some(b) => b, Some(&b) => b,
None => self.tcx.sess.span_bug(expr.span, None => self.tcx.sess.span_bug(expr.span,
~"Loop to unknown label") ~"Loop to unknown label")
} }
@ -1448,7 +1449,7 @@ pub impl Liveness {
fn access_path(&self, expr: @expr, succ: LiveNode, acc: uint) fn access_path(&self, expr: @expr, succ: LiveNode, acc: uint)
-> LiveNode { -> LiveNode {
let def = self.tcx.def_map.get(&expr.id); let def = *self.tcx.def_map.get(&expr.id);
match relevant_def(def) { match relevant_def(def) {
Some(nid) => { Some(nid) => {
let ln = self.live_node(expr.id, expr.span); let ln = self.live_node(expr.id, expr.span);
@ -1587,7 +1588,7 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) {
match self.ir.variable_moves_map.find(&expr.id) { match self.ir.variable_moves_map.find(&expr.id) {
None => {} None => {}
Some(entire_expr) => { Some(&entire_expr) => {
debug!("(checking expr) is a move: `%s`", debug!("(checking expr) is a move: `%s`",
expr_to_str(expr, self.tcx.sess.intr())); expr_to_str(expr, self.tcx.sess.intr()));
self.check_move_from_var(ln, *var, entire_expr); self.check_move_from_var(ln, *var, entire_expr);
@ -1723,7 +1724,7 @@ pub impl Liveness {
fn check_lvalue(@self, expr: @expr, vt: vt<@Liveness>) { fn check_lvalue(@self, expr: @expr, vt: vt<@Liveness>) {
match expr.node { match expr.node {
expr_path(_) => { expr_path(_) => {
match self.tcx.def_map.get(&expr.id) { match *self.tcx.def_map.get(&expr.id) {
def_local(nid, false) => { def_local(nid, false) => {
// Assignment to an immutable variable or argument: // Assignment to an immutable variable or argument:
// only legal if there is no later assignment. // only legal if there is no later assignment.

View file

@ -359,7 +359,7 @@ pub impl mem_categorization_ctxt {
self.cat_expr_unadjusted(expr) self.cat_expr_unadjusted(expr)
} }
Some(@ty::AutoAddEnv(*)) => { Some(&@ty::AutoAddEnv(*)) => {
// Convert a bare fn to a closure by adding NULL env. // Convert a bare fn to a closure by adding NULL env.
// Result is an rvalue. // Result is an rvalue.
let expr_ty = ty::expr_ty_adjusted(self.tcx, expr); let expr_ty = ty::expr_ty_adjusted(self.tcx, expr);
@ -367,7 +367,7 @@ pub impl mem_categorization_ctxt {
} }
Some( Some(
@ty::AutoDerefRef( &@ty::AutoDerefRef(
ty::AutoDerefRef { ty::AutoDerefRef {
autoref: Some(_), _})) => { autoref: Some(_), _})) => {
// Equivalent to &*expr or something similar. // Equivalent to &*expr or something similar.
@ -377,7 +377,7 @@ pub impl mem_categorization_ctxt {
} }
Some( Some(
@ty::AutoDerefRef( &@ty::AutoDerefRef(
ty::AutoDerefRef { ty::AutoDerefRef {
autoref: None, autoderefs: autoderefs})) => { autoref: None, autoderefs: autoderefs})) => {
// Equivalent to *expr or something similar. // Equivalent to *expr or something similar.
@ -431,7 +431,7 @@ pub impl mem_categorization_ctxt {
} }
ast::expr_path(_) => { ast::expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = *self.tcx.def_map.get(&expr.id);
self.cat_def(expr.id, expr.span, expr_ty, def) self.cat_def(expr.id, expr.span, expr_ty, def)
} }
@ -902,21 +902,21 @@ pub impl mem_categorization_ctxt {
} }
ast::pat_enum(_, Some(ref subpats)) => { ast::pat_enum(_, Some(ref subpats)) => {
match self.tcx.def_map.find(&pat.id) { match self.tcx.def_map.find(&pat.id) {
Some(ast::def_variant(enum_did, _)) => { Some(&ast::def_variant(enum_did, _)) => {
// variant(x, y, z) // variant(x, y, z)
for subpats.each |subpat| { for subpats.each |subpat| {
let subcmt = self.cat_variant(*subpat, enum_did, cmt); let subcmt = self.cat_variant(*subpat, enum_did, cmt);
self.cat_pattern(subcmt, *subpat, op); self.cat_pattern(subcmt, *subpat, op);
} }
} }
Some(ast::def_struct(*)) => { Some(&ast::def_struct(*)) => {
for subpats.each |subpat| { for subpats.each |subpat| {
let cmt_field = self.cat_anon_struct_field(*subpat, let cmt_field = self.cat_anon_struct_field(*subpat,
cmt); cmt);
self.cat_pattern(cmt_field, *subpat, op); self.cat_pattern(cmt_field, *subpat, op);
} }
} }
Some(ast::def_const(*)) => { Some(&ast::def_const(*)) => {
for subpats.each |subpat| { for subpats.each |subpat| {
self.cat_pattern(cmt, *subpat, op); self.cat_pattern(cmt, *subpat, op);
} }
@ -1124,7 +1124,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
} }
} }
ty::ty_enum(*) => { ty::ty_enum(*) => {
match tcx.def_map.get(&node_id) { match *tcx.def_map.get(&node_id) {
ast::def_variant(_, variant_id) => { ast::def_variant(_, variant_id) => {
for ty::lookup_struct_fields(tcx, variant_id).each |fld| { for ty::lookup_struct_fields(tcx, variant_id).each |fld| {
if fld.ident == f_name { if fld.ident == f_name {

View file

@ -216,8 +216,8 @@ use middle::typeck::check::{DerefArgs, DoDerefArgs, DontDerefArgs};
use util::ppaux; use util::ppaux;
use util::common::indenter; use util::common::indenter;
use core::hashmap::linear::{LinearSet, LinearMap};
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util; use syntax::ast_util;
use syntax::visit; use syntax::visit;
@ -242,14 +242,14 @@ pub struct CaptureVar {
mode: CaptureMode // How variable is being accessed mode: CaptureMode // How variable is being accessed
} }
pub type CaptureMap = HashMap<node_id, @[CaptureVar]>; pub type CaptureMap = @mut LinearMap<node_id, @[CaptureVar]>;
pub type MovesMap = HashMap<node_id, ()>; pub type MovesMap = @mut LinearSet<node_id>;
/** /**
* For each variable which will be moved, links to the * For each variable which will be moved, links to the
* expression */ * expression */
pub type VariableMovesMap = HashMap<node_id, @expr>; pub type VariableMovesMap = @mut LinearMap<node_id, @expr>;
/** See the section Output on the module comment for explanation. */ /** See the section Output on the module comment for explanation. */
pub struct MoveMaps { pub struct MoveMaps {
@ -260,7 +260,7 @@ pub struct MoveMaps {
struct VisitContext { struct VisitContext {
tcx: ty::ctxt, tcx: ty::ctxt,
method_map: HashMap<node_id,method_map_entry>, method_map: method_map,
move_maps: MoveMaps move_maps: MoveMaps
} }
@ -282,9 +282,9 @@ pub fn compute_moves(tcx: ty::ctxt,
tcx: tcx, tcx: tcx,
method_map: method_map, method_map: method_map,
move_maps: MoveMaps { move_maps: MoveMaps {
moves_map: HashMap(), moves_map: @mut LinearSet::new(),
variable_moves_map: HashMap(), variable_moves_map: @mut LinearMap::new(),
capture_map: HashMap() capture_map: @mut LinearMap::new()
} }
}; };
visit::visit_crate(*crate, visit_cx, visitor); visit::visit_crate(*crate, visit_cx, visitor);
@ -402,7 +402,7 @@ pub impl VisitContext {
expr_mode); expr_mode);
match expr_mode { match expr_mode {
MoveInWhole => { self.move_maps.moves_map.insert(expr.id, ()); } MoveInWhole => { self.move_maps.moves_map.insert(expr.id); }
MoveInPart(_) | Read => {} MoveInPart(_) | Read => {}
} }
@ -410,7 +410,7 @@ pub impl VisitContext {
// those adjustments is to take a reference, then it's only // those adjustments is to take a reference, then it's only
// reading the underlying expression, not moving it. // reading the underlying expression, not moving it.
let comp_mode = match self.tcx.adjustments.find(&expr.id) { let comp_mode = match self.tcx.adjustments.find(&expr.id) {
Some(@ty::AutoDerefRef( Some(&@ty::AutoDerefRef(
ty::AutoDerefRef { ty::AutoDerefRef {
autoref: Some(_), _})) => Read, autoref: Some(_), _})) => Read,
_ => expr_mode.component_mode(expr) _ => expr_mode.component_mode(expr)
@ -725,7 +725,7 @@ pub impl VisitContext {
}; };
match mode { match mode {
MoveInWhole => { self.move_maps.moves_map.insert(id, ()); } MoveInWhole => { self.move_maps.moves_map.insert(id); }
MoveInPart(_) | Read => {} MoveInPart(_) | Read => {}
} }
} }
@ -795,7 +795,7 @@ pub impl VisitContext {
for arm.pats.each |pat| { for arm.pats.each |pat| {
let mut found = false; let mut found = false;
do pat_bindings(self.tcx.def_map, *pat) |_, node_id, _, _| { do pat_bindings(self.tcx.def_map, *pat) |_, node_id, _, _| {
if moves_map.contains_key(&node_id) { if moves_map.contains(&node_id) {
found = true; found = true;
} }
} }

View file

@ -12,17 +12,17 @@ use core::prelude::*;
use middle::resolve; use middle::resolve;
use core::hashmap::linear::LinearMap;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util::{path_to_ident, walk_pat}; use syntax::ast_util::{path_to_ident, walk_pat};
use syntax::codemap::{span, respan}; use syntax::codemap::{span, respan};
use std::oldmap::HashMap;
pub type PatIdMap = HashMap<ident, node_id>; pub type PatIdMap = LinearMap<ident, node_id>;
// This is used because same-named variables in alternative patterns need to // This is used because same-named variables in alternative patterns need to
// use the node_id of their namesake in the first pattern. // use the node_id of their namesake in the first pattern.
pub fn pat_id_map(dm: resolve::DefMap, pat: @pat) -> PatIdMap { pub fn pat_id_map(dm: resolve::DefMap, pat: @pat) -> PatIdMap {
let map = HashMap(); let mut map = LinearMap::new();
do pat_bindings(dm, pat) |_bm, p_id, _s, n| { do pat_bindings(dm, pat) |_bm, p_id, _s, n| {
map.insert(path_to_ident(n), p_id); map.insert(path_to_ident(n), p_id);
}; };
@ -33,7 +33,7 @@ pub fn pat_is_variant_or_struct(dm: resolve::DefMap, pat: @pat) -> bool {
match pat.node { match pat.node {
pat_enum(_, _) | pat_ident(_, _, None) | pat_struct(*) => { pat_enum(_, _) | pat_ident(_, _, None) | pat_struct(*) => {
match dm.find(&pat.id) { match dm.find(&pat.id) {
Some(def_variant(*)) | Some(def_struct(*)) => true, Some(&def_variant(*)) | Some(&def_struct(*)) => true,
_ => false _ => false
} }
} }
@ -45,7 +45,7 @@ pub fn pat_is_const(dm: resolve::DefMap, pat: &pat) -> bool {
match pat.node { match pat.node {
pat_ident(_, _, None) | pat_enum(*) => { pat_ident(_, _, None) | pat_enum(*) => {
match dm.find(&pat.id) { match dm.find(&pat.id) {
Some(def_const(*)) => true, Some(&def_const(*)) => true,
_ => false _ => false
} }
} }

View file

@ -484,7 +484,7 @@ pub fn check_crate(tcx: ty::ctxt,
} }
} }
expr_path(path) => { expr_path(path) => {
check_path(expr.span, tcx.def_map.get(&expr.id), path); check_path(expr.span, *tcx.def_map.get(&expr.id), path);
} }
expr_struct(_, ref fields, _) => { expr_struct(_, ref fields, _) => {
match ty::get(ty::expr_ty(tcx, expr)).sty { match ty::get(ty::expr_ty(tcx, expr)).sty {
@ -502,7 +502,7 @@ pub fn check_crate(tcx: ty::ctxt,
ty_enum(id, _) => { ty_enum(id, _) => {
if id.crate != local_crate || if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) { !privileged_items.contains(&(id.node)) {
match tcx.def_map.get(&expr.id) { match *tcx.def_map.get(&expr.id) {
def_variant(_, variant_id) => { def_variant(_, variant_id) => {
for (*fields).each |field| { for (*fields).each |field| {
debug!("(privacy checking) \ debug!("(privacy checking) \
@ -570,7 +570,7 @@ pub fn check_crate(tcx: ty::ctxt,
!privileged_items.contains( !privileged_items.contains(
&enum_id.node) { &enum_id.node) {
match tcx.def_map.find(&pattern.id) { match tcx.def_map.find(&pattern.id) {
Some(def_variant(_, variant_id)) => { Some(&def_variant(_, variant_id)) => {
for fields.each |field| { for fields.each |field| {
debug!("(privacy checking) \ debug!("(privacy checking) \
checking field in \ checking field in \

View file

@ -26,8 +26,8 @@ use middle::ty::{region_variance, rv_covariant, rv_invariant};
use middle::ty::{rv_contravariant}; use middle::ty::{rv_contravariant};
use middle::ty; use middle::ty;
use core::hashmap::linear::{LinearMap, LinearSet};
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast_map; use syntax::ast_map;
use syntax::codemap::span; use syntax::codemap::span;
use syntax::print::pprust; use syntax::print::pprust;
@ -46,7 +46,7 @@ Encodes the bounding lifetime for a given AST node:
- Variables and bindings are mapped to the block in which they are declared. - Variables and bindings are mapped to the block in which they are declared.
*/ */
pub type region_map = HashMap<ast::node_id, ast::node_id>; pub type region_map = @mut LinearMap<ast::node_id, ast::node_id>;
pub struct ctxt { pub struct ctxt {
sess: Session, sess: Session,
@ -62,7 +62,7 @@ pub struct ctxt {
// the condition in a while loop is always a parent. In those // the condition in a while loop is always a parent. In those
// cases, we add the node id of such an expression to this set so // cases, we add the node id of such an expression to this set so
// that when we visit it we can view it as a parent. // that when we visit it we can view it as a parent.
root_exprs: HashMap<ast::node_id, ()>, root_exprs: @mut LinearSet<ast::node_id>,
// The parent scope is the innermost block, statement, call, or match // The parent scope is the innermost block, statement, call, or match
// expression during the execution of which the current expression // expression during the execution of which the current expression
@ -106,7 +106,7 @@ pub fn scope_contains(region_map: region_map, superscope: ast::node_id,
while superscope != subscope { while superscope != subscope {
match region_map.find(&subscope) { match region_map.find(&subscope) {
None => return false, None => return false,
Some(scope) => subscope = scope Some(&scope) => subscope = scope
} }
} }
return true; return true;
@ -150,7 +150,7 @@ pub fn nearest_common_ancestor(region_map: region_map,
loop { loop {
match region_map.find(&scope) { match region_map.find(&scope) {
None => return result, None => return result,
Some(superscope) => { Some(&superscope) => {
result.push(superscope); result.push(superscope);
scope = superscope; scope = superscope;
} }
@ -228,7 +228,7 @@ pub fn resolve_pat(pat: @ast::pat, cx: ctxt, visitor: visit::vt<ctxt>) {
ast::pat_ident(*) => { ast::pat_ident(*) => {
let defn_opt = cx.def_map.find(&pat.id); let defn_opt = cx.def_map.find(&pat.id);
match defn_opt { match defn_opt {
Some(ast::def_variant(_,_)) => { Some(&ast::def_variant(_,_)) => {
/* Nothing to do; this names a variant. */ /* Nothing to do; this names a variant. */
} }
_ => { _ => {
@ -280,12 +280,12 @@ pub fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) {
new_cx.parent = Some(expr.id); new_cx.parent = Some(expr.id);
} }
ast::expr_while(cond, _) => { ast::expr_while(cond, _) => {
new_cx.root_exprs.insert(cond.id, ()); new_cx.root_exprs.insert(cond.id);
} }
_ => {} _ => {}
}; };
if new_cx.root_exprs.contains_key(&expr.id) { if new_cx.root_exprs.contains(&expr.id) {
new_cx.parent = Some(expr.id); new_cx.parent = Some(expr.id);
} }
@ -350,8 +350,8 @@ pub fn resolve_crate(sess: Session,
-> region_map { -> region_map {
let cx: ctxt = ctxt {sess: sess, let cx: ctxt = ctxt {sess: sess,
def_map: def_map, def_map: def_map,
region_map: HashMap(), region_map: @mut LinearMap::new(),
root_exprs: HashMap(), root_exprs: @mut LinearSet::new(),
parent: None}; parent: None};
let visitor = visit::mk_vt(@visit::Visitor { let visitor = visit::mk_vt(@visit::Visitor {
visit_block: resolve_block, visit_block: resolve_block,
@ -387,7 +387,7 @@ pub fn resolve_crate(sess: Session,
// a worklist. We can then process the worklist, propagating indirect // a worklist. We can then process the worklist, propagating indirect
// dependencies until a fixed point is reached. // dependencies until a fixed point is reached.
pub type region_paramd_items = HashMap<ast::node_id, region_variance>; pub type region_paramd_items = @mut LinearMap<ast::node_id, region_variance>;
#[deriving(Eq)] #[deriving(Eq)]
pub struct region_dep { pub struct region_dep {
@ -395,7 +395,7 @@ pub struct region_dep {
id: ast::node_id id: ast::node_id
} }
pub type dep_map = HashMap<ast::node_id, @mut ~[region_dep]>; pub type dep_map = @mut LinearMap<ast::node_id, @mut ~[region_dep]>;
pub struct DetermineRpCtxt { pub struct DetermineRpCtxt {
sess: Session, sess: Session,
@ -464,7 +464,8 @@ pub impl DetermineRpCtxt {
/// the new variance is joined with the old variance. /// the new variance is joined with the old variance.
fn add_rp(&mut self, id: ast::node_id, variance: region_variance) { fn add_rp(&mut self, id: ast::node_id, variance: region_variance) {
fail_unless!(id != 0); fail_unless!(id != 0);
let old_variance = self.region_paramd_items.find(&id); let old_variance = self.region_paramd_items.find(&id).
map_consume(|x| *x);
let joined_variance = match old_variance { let joined_variance = match old_variance {
None => variance, None => variance,
Some(v) => join_variance(v, variance) Some(v) => join_variance(v, variance)
@ -496,7 +497,7 @@ pub impl DetermineRpCtxt {
self.sess.parse_sess.interner), self.sess.parse_sess.interner),
copy self.ambient_variance); copy self.ambient_variance);
let vec = match self.dep_map.find(&from) { let vec = match self.dep_map.find(&from) {
Some(vec) => vec, Some(&vec) => vec,
None => { None => {
let vec = @mut ~[]; let vec = @mut ~[];
let dep_map = self.dep_map; let dep_map = self.dep_map;
@ -689,7 +690,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
match ty.node { match ty.node {
ast::ty_path(path, id) => { ast::ty_path(path, id) => {
match cx.def_map.find(&id) { match cx.def_map.find(&id) {
Some(ast::def_ty(did)) | Some(ast::def_struct(did)) => { Some(&ast::def_ty(did)) | Some(&ast::def_struct(did)) => {
if did.crate == ast::local_crate { if did.crate == ast::local_crate {
if cx.region_is_relevant(path.rp) { if cx.region_is_relevant(path.rp) {
cx.add_dep(did.node); cx.add_dep(did.node);
@ -782,15 +783,15 @@ pub fn determine_rp_in_struct_field(
pub fn determine_rp_in_crate(sess: Session, pub fn determine_rp_in_crate(sess: Session,
ast_map: ast_map::map, ast_map: ast_map::map,
def_map: resolve::DefMap, +def_map: resolve::DefMap,
crate: @ast::crate) crate: @ast::crate)
-> region_paramd_items { -> region_paramd_items {
let cx = @mut DetermineRpCtxt { let cx = @mut DetermineRpCtxt {
sess: sess, sess: sess,
ast_map: ast_map, ast_map: ast_map,
def_map: def_map, def_map: def_map,
region_paramd_items: HashMap(), region_paramd_items: @mut LinearMap::new(),
dep_map: HashMap(), dep_map: @mut LinearMap::new(),
worklist: ~[], worklist: ~[],
item_id: 0, item_id: 0,
anon_implies_rp: false, anon_implies_rp: false,
@ -822,7 +823,7 @@ pub fn determine_rp_in_crate(sess: Session,
let cx = &mut *cx; let cx = &mut *cx;
while cx.worklist.len() != 0 { while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop(); let c_id = cx.worklist.pop();
let c_variance = cx.region_paramd_items.get(&c_id); let c_variance = *cx.region_paramd_items.get(&c_id);
debug!("popped %d from worklist", c_id); debug!("popped %d from worklist", c_id);
match cx.dep_map.find(&c_id) { match cx.dep_map.find(&c_id) {
None => {} None => {}
@ -839,7 +840,7 @@ pub fn determine_rp_in_crate(sess: Session,
debug!("%s", { debug!("%s", {
debug!("Region variance results:"); debug!("Region variance results:");
let region_paramd_items = cx.region_paramd_items; let region_paramd_items = cx.region_paramd_items;
for region_paramd_items.each |&key, &value| { for region_paramd_items.each |&(&key, &value)| {
debug!("item %? (%s) is parameterized with variance %?", debug!("item %? (%s) is parameterized with variance %?",
key, key,
ast_map::node_id_to_str(ast_map, key, ast_map::node_id_to_str(ast_map, key,

View file

@ -77,11 +77,10 @@ use syntax::opt_vec::OptVec;
use core::option::{Some, get, is_some, is_none}; use core::option::{Some, get, is_some, is_none};
use core::str::{connect, each_split_str}; use core::str::{connect, each_split_str};
use core::hashmap::linear::LinearMap; use core::hashmap::linear::{LinearMap, LinearSet};
use std::oldmap::HashMap;
// Definition mapping // Definition mapping
pub type DefMap = HashMap<node_id,def>; pub type DefMap = @mut LinearMap<node_id,def>;
pub struct binding_info { pub struct binding_info {
span: span, span: span,
@ -89,7 +88,7 @@ pub struct binding_info {
} }
// Map from the name in a pattern to its binding mode. // Map from the name in a pattern to its binding mode.
pub type BindingMap = HashMap<ident,binding_info>; pub type BindingMap = LinearMap<ident,binding_info>;
// Implementation resolution // Implementation resolution
// //
@ -110,11 +109,11 @@ pub struct Impl {
} }
// Trait method resolution // Trait method resolution
pub type TraitMap = @HashMap<node_id,@mut ~[def_id]>; pub type TraitMap = LinearMap<node_id,@mut ~[def_id]>;
// This is the replacement export map. It maps a module to all of the exports // This is the replacement export map. It maps a module to all of the exports
// within. // within.
pub type ExportMap2 = HashMap<node_id, ~[Export2]>; pub type ExportMap2 = @mut LinearMap<node_id, ~[Export2]>;
pub struct Export2 { pub struct Export2 {
name: @~str, // The name of the target. name: @~str, // The name of the target.
@ -335,13 +334,13 @@ pub fn namespace_for_duplicate_checking_mode(mode: DuplicateCheckingMode)
/// One local scope. /// One local scope.
pub struct Rib { pub struct Rib {
bindings: HashMap<ident,def_like>, bindings: @mut LinearMap<ident,def_like>,
kind: RibKind, kind: RibKind,
} }
pub fn Rib(kind: RibKind) -> Rib { pub fn Rib(kind: RibKind) -> Rib {
Rib { Rib {
bindings: HashMap(), bindings: @mut LinearMap::new(),
kind: kind kind: kind
} }
} }
@ -475,7 +474,7 @@ pub struct Module {
// There will be an anonymous module created around `g` with the ID of the // There will be an anonymous module created around `g` with the ID of the
// entry block for `f`. // entry block for `f`.
anonymous_children: @HashMap<node_id,@mut Module>, anonymous_children: @mut LinearMap<node_id,@mut Module>,
// The status of resolving each import in this module. // The status of resolving each import in this module.
import_resolutions: @mut LinearMap<ident, @mut ImportResolution>, import_resolutions: @mut LinearMap<ident, @mut ImportResolution>,
@ -497,7 +496,7 @@ pub fn Module(parent_link: ParentLink,
kind: kind, kind: kind,
children: @mut LinearMap::new(), children: @mut LinearMap::new(),
imports: @mut ~[], imports: @mut ~[],
anonymous_children: @HashMap(), anonymous_children: @mut LinearMap::new(),
import_resolutions: @mut LinearMap::new(), import_resolutions: @mut LinearMap::new(),
glob_count: 0, glob_count: 0,
resolved_import_count: 0 resolved_import_count: 0
@ -709,11 +708,11 @@ pub fn NameBindings() -> NameBindings {
/// Interns the names of the primitive types. /// Interns the names of the primitive types.
pub struct PrimitiveTypeTable { pub struct PrimitiveTypeTable {
primitive_types: HashMap<ident,prim_ty>, primitive_types: LinearMap<ident,prim_ty>,
} }
pub impl PrimitiveTypeTable { pub impl PrimitiveTypeTable {
fn intern(&self, intr: @ident_interner, string: @~str, fn intern(&mut self, intr: @ident_interner, string: @~str,
primitive_type: prim_ty) { primitive_type: prim_ty) {
let ident = intr.intern(string); let ident = intr.intern(string);
self.primitive_types.insert(ident, primitive_type); self.primitive_types.insert(ident, primitive_type);
@ -721,8 +720,8 @@ pub impl PrimitiveTypeTable {
} }
pub fn PrimitiveTypeTable(intr: @ident_interner) -> PrimitiveTypeTable { pub fn PrimitiveTypeTable(intr: @ident_interner) -> PrimitiveTypeTable {
let table = PrimitiveTypeTable { let mut table = PrimitiveTypeTable {
primitive_types: HashMap() primitive_types: LinearMap::new()
}; };
table.intern(intr, @~"bool", ty_bool); table.intern(intr, @~"bool", ty_bool);
@ -777,8 +776,8 @@ pub fn Resolver(session: Session,
graph_root: graph_root, graph_root: graph_root,
trait_info: @HashMap(), trait_info: LinearMap::new(),
structs: @HashMap(), structs: LinearSet::new(),
unresolved_imports: 0, unresolved_imports: 0,
@ -801,9 +800,9 @@ pub fn Resolver(session: Session,
attr_main_fn: None, attr_main_fn: None,
main_fns: ~[], main_fns: ~[],
def_map: @HashMap(), def_map: @mut LinearMap::new(),
export_map2: @HashMap(), export_map2: @mut LinearMap::new(),
trait_map: @HashMap(), trait_map: LinearMap::new(),
intr: session.intr() intr: session.intr()
}; };
@ -821,8 +820,8 @@ pub struct Resolver {
graph_root: @mut NameBindings, graph_root: @mut NameBindings,
trait_info: @HashMap<def_id,@HashMap<ident,()>>, trait_info: LinearMap<def_id, LinearSet<ident>>,
structs: @HashMap<def_id,()>, structs: LinearSet<def_id>,
// The number of imports that are currently unresolved. // The number of imports that are currently unresolved.
unresolved_imports: uint, unresolved_imports: uint,
@ -863,8 +862,8 @@ pub struct Resolver {
// The functions named 'main' // The functions named 'main'
main_fns: ~[Option<(node_id, span)>], main_fns: ~[Option<(node_id, span)>],
def_map: @DefMap, def_map: DefMap,
export_map2: @ExportMap2, export_map2: ExportMap2,
trait_map: TraitMap, trait_map: TraitMap,
} }
@ -1192,7 +1191,7 @@ pub impl Resolver {
} }
// Record the def ID of this struct. // Record the def ID of this struct.
self.structs.insert(local_def(item.id), ()); self.structs.insert(local_def(item.id));
visit_item(item, new_parent, visitor); visit_item(item, new_parent, visitor);
} }
@ -1305,8 +1304,8 @@ pub impl Resolver {
} }
// Add the names of all the methods to the trait info. // Add the names of all the methods to the trait info.
let method_names = @HashMap(); let mut method_names = LinearSet::new();
for (*methods).each |method| { for methods.each |method| {
let ty_m = trait_method_to_ty_method(method); let ty_m = trait_method_to_ty_method(method);
let ident = ty_m.ident; let ident = ty_m.ident;
@ -1329,7 +1328,7 @@ pub impl Resolver {
ty_m.span); ty_m.span);
} }
_ => { _ => {
method_names.insert(ident, ()); method_names.insert(ident);
} }
} }
} }
@ -1378,7 +1377,7 @@ pub impl Resolver {
def_variant(item_id, def_variant(item_id,
local_def(variant.node.id)), local_def(variant.node.id)),
variant.span); variant.span);
self.structs.insert(local_def(variant.node.id), ()); self.structs.insert(local_def(variant.node.id));
} }
enum_variant_kind(ref enum_definition) => { enum_variant_kind(ref enum_definition) => {
child.define_type(privacy, child.define_type(privacy,
@ -1565,7 +1564,7 @@ pub impl Resolver {
fn handle_external_def(@mut self, fn handle_external_def(@mut self,
def: def, def: def,
modules: HashMap<def_id, @mut Module>, modules: &mut LinearMap<def_id, @mut Module>,
child_name_bindings: @mut NameBindings, child_name_bindings: @mut NameBindings,
final_ident: &str, final_ident: &str,
ident: ident, ident: ident,
@ -1585,8 +1584,8 @@ pub impl Resolver {
%s", final_ident); %s", final_ident);
let parent_link = self.get_parent_link(new_parent, ident); let parent_link = self.get_parent_link(new_parent, ident);
match modules.find(&def_id) { // FIXME (#5074): this should be a match on find
None => { if !modules.contains_key(&def_id) {
child_name_bindings.define_module(Public, child_name_bindings.define_module(Public,
parent_link, parent_link,
Some(def_id), Some(def_id),
@ -1594,8 +1593,8 @@ pub impl Resolver {
dummy_sp()); dummy_sp());
modules.insert(def_id, modules.insert(def_id,
child_name_bindings.get_module()); child_name_bindings.get_module());
} } else {
Some(existing_module) => { let existing_module = *modules.get(&def_id);
// Create an import resolution to // Create an import resolution to
// avoid creating cycles in the // avoid creating cycles in the
// module graph. // module graph.
@ -1627,7 +1626,6 @@ pub impl Resolver {
} }
} }
} }
}
def_fn(*) | def_static_method(*) | def_const(*) | def_fn(*) | def_static_method(*) | def_const(*) |
def_variant(*) => { def_variant(*) => {
debug!("(building reduced graph for external \ debug!("(building reduced graph for external \
@ -1646,7 +1644,7 @@ pub impl Resolver {
// Nothing to do. // Nothing to do.
} }
Some(method_names) => { Some(method_names) => {
let interned_method_names = @HashMap(); let mut interned_method_names = LinearSet::new();
for method_names.each |method_data| { for method_names.each |method_data| {
let (method_name, self_ty) = *method_data; let (method_name, self_ty) = *method_data;
debug!("(building reduced graph for \ debug!("(building reduced graph for \
@ -1656,7 +1654,7 @@ pub impl Resolver {
// Add it to the trait info if not static. // Add it to the trait info if not static.
if self_ty != sty_static { if self_ty != sty_static {
interned_method_names.insert(method_name, ()); interned_method_names.insert(method_name);
} }
} }
self.trait_info.insert(def_id, interned_method_names); self.trait_info.insert(def_id, interned_method_names);
@ -1670,7 +1668,7 @@ pub impl Resolver {
crate) building type %s", crate) building type %s",
final_ident); final_ident);
child_name_bindings.define_type(Public, def, dummy_sp()); child_name_bindings.define_type(Public, def, dummy_sp());
self.structs.insert(def_id, ()); self.structs.insert(def_id);
} }
def_self(*) | def_arg(*) | def_local(*) | def_self(*) | def_arg(*) | def_local(*) |
def_prim_ty(*) | def_ty_param(*) | def_binding(*) | def_prim_ty(*) | def_ty_param(*) | def_binding(*) |
@ -1686,7 +1684,7 @@ pub impl Resolver {
* crate. * crate.
*/ */
fn build_reduced_graph_for_external_crate(@mut self, root: @mut Module) { fn build_reduced_graph_for_external_crate(@mut self, root: @mut Module) {
let modules = HashMap(); let mut modules = LinearMap::new();
// Create all the items reachable by paths. // Create all the items reachable by paths.
for each_path(self.session.cstore, root.def_id.get().crate) for each_path(self.session.cstore, root.def_id.get().crate)
@ -1758,7 +1756,7 @@ pub impl Resolver {
dummy_sp()); dummy_sp());
self.handle_external_def(def, self.handle_external_def(def,
modules, &mut modules,
child_name_bindings, child_name_bindings,
*self.session.str_of( *self.session.str_of(
final_ident), final_ident),
@ -3338,7 +3336,8 @@ pub impl Resolver {
// If the def is a ty param, and came from the parent // If the def is a ty param, and came from the parent
// item, it's ok // item, it's ok
match def { match def {
def_ty_param(did, _) if self.def_map.find(&did.node) def_ty_param(did, _)
if self.def_map.find(&did.node).map_consume(|x| *x)
== Some(def_typaram_binder(item_id)) => { == Some(def_typaram_binder(item_id)) => {
// ok // ok
} }
@ -3412,7 +3411,7 @@ pub impl Resolver {
while i != 0 { while i != 0 {
i -= 1; i -= 1;
match ribs[i].bindings.find(&name) { match ribs[i].bindings.find(&name) {
Some(def_like) => { Some(&def_like) => {
return self.upvarify(ribs, i, def_like, span, return self.upvarify(ribs, i, def_like, span,
allow_capturing_self); allow_capturing_self);
} }
@ -3995,8 +3994,8 @@ pub impl Resolver {
} }
fn binding_mode_map(@mut self, pat: @pat) -> BindingMap { fn binding_mode_map(@mut self, pat: @pat) -> BindingMap {
let result = HashMap(); let mut result = LinearMap::new();
do pat_bindings(*self.def_map, pat) |binding_mode, _id, sp, path| { do pat_bindings(self.def_map, pat) |binding_mode, _id, sp, path| {
let ident = path_to_ident(path); let ident = path_to_ident(path);
result.insert(ident, result.insert(ident,
binding_info {span: sp, binding_info {span: sp,
@ -4011,7 +4010,7 @@ pub impl Resolver {
for arm.pats.eachi() |i, p| { for arm.pats.eachi() |i, p| {
let map_i = self.binding_mode_map(*p); let map_i = self.binding_mode_map(*p);
for map_0.each |&key, &binding_0| { for map_0.each |&(&key, &binding_0)| {
match map_i.find(&key) { match map_i.find(&key) {
None => { None => {
self.session.span_err( self.session.span_err(
@ -4032,7 +4031,7 @@ pub impl Resolver {
} }
} }
for map_i.each |&key, &binding| { for map_i.each |&(&key, &binding)| {
if !map_0.contains_key(&key) { if !map_0.contains_key(&key) {
self.session.span_err( self.session.span_err(
binding.span, binding.span,
@ -4047,7 +4046,7 @@ pub impl Resolver {
fn resolve_arm(@mut self, arm: &arm, visitor: ResolveVisitor) { fn resolve_arm(@mut self, arm: &arm, visitor: ResolveVisitor) {
self.value_ribs.push(@Rib(NormalRibKind)); self.value_ribs.push(@Rib(NormalRibKind));
let bindings_list = HashMap(); let bindings_list = @mut LinearMap::new();
for arm.pats.each |pattern| { for arm.pats.each |pattern| {
self.resolve_pattern(*pattern, RefutableMode, Immutable, self.resolve_pattern(*pattern, RefutableMode, Immutable,
Some(bindings_list), visitor); Some(bindings_list), visitor);
@ -4071,7 +4070,7 @@ pub impl Resolver {
let orig_module = self.current_module; let orig_module = self.current_module;
match self.current_module.anonymous_children.find(&block.node.id) { match self.current_module.anonymous_children.find(&block.node.id) {
None => { /* Nothing to do. */ } None => { /* Nothing to do. */ }
Some(anonymous_module) => { Some(&anonymous_module) => {
debug!("(resolving block) found anonymous module, moving \ debug!("(resolving block) found anonymous module, moving \
down"); down");
self.current_module = anonymous_module; self.current_module = anonymous_module;
@ -4106,7 +4105,7 @@ pub impl Resolver {
.primitive_types .primitive_types
.find(&name) { .find(&name) {
Some(primitive_type) => { Some(&primitive_type) => {
result_def = result_def =
Some(def_prim_ty(primitive_type)); Some(def_prim_ty(primitive_type));
} }
@ -4167,7 +4166,7 @@ pub impl Resolver {
mutability: Mutability, mutability: Mutability,
// Maps idents to the node ID for the (outermost) // Maps idents to the node ID for the (outermost)
// pattern that binds them // pattern that binds them
bindings_list: Option<HashMap<ident,node_id>>, bindings_list: Option<@mut LinearMap<ident,node_id>>,
visitor: ResolveVisitor) { visitor: ResolveVisitor) {
let pat_id = pattern.id; let pat_id = pattern.id;
do walk_pat(pattern) |pattern| { do walk_pat(pattern) |pattern| {
@ -4271,7 +4270,7 @@ pub impl Resolver {
bindings_list.insert(ident, pat_id); bindings_list.insert(ident, pat_id);
} }
Some(b) => { Some(b) => {
if b.find(&ident) == Some(pat_id) { if b.find(&ident) == Some(&pat_id) {
// Then this is a duplicate variable // Then this is a duplicate variable
// in the same disjunct, which is an // in the same disjunct, which is an
// error // error
@ -4371,21 +4370,19 @@ pub impl Resolver {
} }
pat_struct(path, _, _) => { pat_struct(path, _, _) => {
let structs: &mut LinearSet<def_id> = &mut self.structs;
match self.resolve_path(path, TypeNS, false, visitor) { match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id)) Some(def_ty(class_id))
if self.structs.contains_key(&class_id) if structs.contains(&class_id) => {
=> {
let class_def = def_struct(class_id); let class_def = def_struct(class_id);
self.record_def(pattern.id, class_def); self.record_def(pattern.id, class_def);
} }
Some(definition @ def_struct(class_id)) Some(definition @ def_struct(class_id))
if self.structs.contains_key(&class_id) if structs.contains(&class_id) => {
=> {
self.record_def(pattern.id, definition); self.record_def(pattern.id, definition);
} }
Some(definition @ def_variant(_, variant_id)) Some(definition @ def_variant(_, variant_id))
if self.structs.contains_key(&variant_id) if structs.contains(&variant_id) => {
=> {
self.record_def(pattern.id, definition); self.record_def(pattern.id, definition);
} }
result => { result => {
@ -4869,14 +4866,15 @@ pub impl Resolver {
expr_struct(path, _, _) => { expr_struct(path, _, _) => {
// Resolve the path to the structure it goes to. // Resolve the path to the structure it goes to.
let structs: &mut LinearSet<def_id> = &mut self.structs;
match self.resolve_path(path, TypeNS, false, visitor) { match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id)) | Some(def_struct(class_id)) Some(def_ty(class_id)) | Some(def_struct(class_id))
if self.structs.contains_key(&class_id) => { if structs.contains(&class_id) => {
let class_def = def_struct(class_id); let class_def = def_struct(class_id);
self.record_def(expr.id, class_def); self.record_def(expr.id, class_def);
} }
Some(definition @ def_variant(_, class_id)) Some(definition @ def_variant(_, class_id))
if self.structs.contains_key(&class_id) => { if structs.contains(&class_id) => {
self.record_def(expr.id, definition); self.record_def(expr.id, definition);
} }
_ => { _ => {
@ -5095,7 +5093,7 @@ pub impl Resolver {
return found_traits; return found_traits;
} }
fn add_trait_info_if_containing_method(@mut self, fn add_trait_info_if_containing_method(&self,
found_traits: &mut ~[def_id], found_traits: &mut ~[def_id],
trait_def_id: def_id, trait_def_id: def_id,
name: ident) name: ident)
@ -5107,7 +5105,7 @@ pub impl Resolver {
*self.session.str_of(name)); *self.session.str_of(name));
match self.trait_info.find(&trait_def_id) { match self.trait_info.find(&trait_def_id) {
Some(trait_info) if trait_info.contains_key(&name) => { Some(trait_info) if trait_info.contains(&name) => {
debug!("(adding trait info if containing method) found trait \ debug!("(adding trait info if containing method) found trait \
%d:%d for method '%s'", %d:%d for method '%s'",
trait_def_id.crate, trait_def_id.crate,
@ -5351,10 +5349,11 @@ pub fn resolve_crate(session: Session,
-> CrateMap { -> CrateMap {
let resolver = @mut Resolver(session, lang_items, crate); let resolver = @mut Resolver(session, lang_items, crate);
resolver.resolve(); resolver.resolve();
let @Resolver{def_map, export_map2, trait_map, _} = resolver;
CrateMap { CrateMap {
def_map: *resolver.def_map, def_map: def_map,
exp_map2: *resolver.export_map2, exp_map2: export_map2,
trait_map: resolver.trait_map trait_map: trait_map
} }
} }

View file

@ -167,7 +167,7 @@ use middle::trans::type_of;
use middle::ty; use middle::ty;
use util::common::indenter; use util::common::indenter;
use std::oldmap::HashMap; use core::hashmap::linear::LinearMap;
use syntax::ast; use syntax::ast;
use syntax::ast::ident; use syntax::ast::ident;
use syntax::ast_util::path_to_ident; use syntax::ast_util::path_to_ident;
@ -282,7 +282,7 @@ pub fn trans_opt(bcx: block, o: &Opt) -> opt_result {
pub fn variant_opt(bcx: block, pat_id: ast::node_id) pub fn variant_opt(bcx: block, pat_id: ast::node_id)
-> Opt { -> Opt {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
match ccx.tcx.def_map.get(&pat_id) { match *ccx.tcx.def_map.get(&pat_id) {
ast::def_variant(enum_id, var_id) => { ast::def_variant(enum_id, var_id) => {
let variants = ty::enum_variants(ccx.tcx, enum_id); let variants = ty::enum_variants(ccx.tcx, enum_id);
for vec::each(*variants) |v| { for vec::each(*variants) |v| {
@ -323,7 +323,7 @@ pub struct BindingInfo {
ty: ty::t, ty: ty::t,
} }
pub type BindingsMap = HashMap<ident, BindingInfo>; pub type BindingsMap = LinearMap<ident, BindingInfo>;
pub struct ArmData { pub struct ArmData {
bodycx: block, bodycx: block,
@ -517,7 +517,7 @@ pub fn enter_opt<'r>(bcx: block,
match p.node { match p.node {
ast::pat_enum(*) | ast::pat_enum(*) |
ast::pat_ident(_, _, None) if pat_is_const(tcx.def_map, p) => { ast::pat_ident(_, _, None) if pat_is_const(tcx.def_map, p) => {
let const_def = tcx.def_map.get(&p.id); let const_def = *tcx.def_map.get(&p.id);
let const_def_id = ast_util::def_id_of_def(const_def); let const_def_id = ast_util::def_id_of_def(const_def);
if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) { if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) {
Some(~[]) Some(~[])
@ -553,7 +553,7 @@ pub fn enter_opt<'r>(bcx: block,
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) { if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
// Look up the struct variant ID. // Look up the struct variant ID.
let struct_id; let struct_id;
match tcx.def_map.get(&p.id) { match *tcx.def_map.get(&p.id) {
ast::def_variant(_, found_struct_id) => { ast::def_variant(_, found_struct_id) => {
struct_id = found_struct_id; struct_id = found_struct_id;
} }
@ -801,15 +801,15 @@ pub fn get_options(bcx: block, m: &[@Match], col: uint) -> ~[Opt] {
// This is one of: an enum variant, a unit-like struct, or a // This is one of: an enum variant, a unit-like struct, or a
// variable binding. // variable binding.
match ccx.tcx.def_map.find(&cur.id) { match ccx.tcx.def_map.find(&cur.id) {
Some(ast::def_variant(*)) => { Some(&ast::def_variant(*)) => {
add_to_set(ccx.tcx, &mut found, add_to_set(ccx.tcx, &mut found,
variant_opt(bcx, cur.id)); variant_opt(bcx, cur.id));
} }
Some(ast::def_struct(*)) => { Some(&ast::def_struct(*)) => {
add_to_set(ccx.tcx, &mut found, add_to_set(ccx.tcx, &mut found,
lit(UnitLikeStructLit(cur.id))); lit(UnitLikeStructLit(cur.id)));
} }
Some(ast::def_const(const_did)) => { Some(&ast::def_const(const_did)) => {
add_to_set(ccx.tcx, &mut found, add_to_set(ccx.tcx, &mut found,
lit(ConstLit(const_did))); lit(ConstLit(const_did)));
} }
@ -820,11 +820,11 @@ pub fn get_options(bcx: block, m: &[@Match], col: uint) -> ~[Opt] {
// This could be one of: a tuple-like enum variant, a // This could be one of: a tuple-like enum variant, a
// struct-like enum variant, or a struct. // struct-like enum variant, or a struct.
match ccx.tcx.def_map.find(&cur.id) { match ccx.tcx.def_map.find(&cur.id) {
Some(ast::def_variant(*)) => { Some(&ast::def_variant(*)) => {
add_to_set(ccx.tcx, &mut found, add_to_set(ccx.tcx, &mut found,
variant_opt(bcx, cur.id)); variant_opt(bcx, cur.id));
} }
Some(ast::def_const(const_did)) => { Some(&ast::def_const(const_did)) => {
add_to_set(ccx.tcx, &mut found, add_to_set(ccx.tcx, &mut found,
lit(ConstLit(const_did))); lit(ConstLit(const_did)));
} }
@ -959,7 +959,7 @@ pub fn root_pats_as_necessary(bcx: block,
let key = root_map_key {id: pat_id, derefs: 0u }; let key = root_map_key {id: pat_id, derefs: 0u };
match bcx.ccx().maps.root_map.find(&key) { match bcx.ccx().maps.root_map.find(&key) {
None => (), None => (),
Some(root_info) => { Some(&root_info) => {
// Note: the scope_id will always be the id of the match. See // Note: the scope_id will always be the id of the match. See
// the extended comment in rustc::middle::borrowck::preserve() // the extended comment in rustc::middle::borrowck::preserve()
// for details (look for the case covering cat_discr). // for details (look for the case covering cat_discr).
@ -1013,7 +1013,7 @@ pub fn any_tuple_struct_pat(bcx: block, m: &[@Match], col: uint) -> bool {
match pat.node { match pat.node {
ast::pat_enum(_, Some(_)) => { ast::pat_enum(_, Some(_)) => {
match bcx.tcx().def_map.find(&pat.id) { match bcx.tcx().def_map.find(&pat.id) {
Some(ast::def_struct(*)) => true, Some(&ast::def_struct(*)) => true,
_ => false _ => false
} }
} }
@ -1620,7 +1620,7 @@ pub fn trans_match_inner(scope_cx: block,
// to an alloca() that will be the value for that local variable. // to an alloca() that will be the value for that local variable.
// Note that we use the names because each binding will have many ids // Note that we use the names because each binding will have many ids
// from the various alternatives. // from the various alternatives.
let bindings_map = HashMap(); let mut bindings_map = LinearMap::new();
do pat_bindings(tcx.def_map, arm.pats[0]) |bm, p_id, _s, path| { do pat_bindings(tcx.def_map, arm.pats[0]) |bm, p_id, _s, path| {
let ident = path_to_ident(path); let ident = path_to_ident(path);
let variable_ty = node_id_type(bcx, p_id); let variable_ty = node_id_type(bcx, p_id);
@ -1633,7 +1633,7 @@ pub fn trans_match_inner(scope_cx: block,
// but during matching we need to store a *T as explained // but during matching we need to store a *T as explained
// above // above
let is_move = let is_move =
scope_cx.ccx().maps.moves_map.contains_key(&p_id); scope_cx.ccx().maps.moves_map.contains(&p_id);
llmatch = alloca(bcx, T_ptr(llvariable_ty)); llmatch = alloca(bcx, T_ptr(llvariable_ty));
trmode = TrByValue(is_move, alloca(bcx, llvariable_ty)); trmode = TrByValue(is_move, alloca(bcx, llvariable_ty));
} }
@ -1765,7 +1765,7 @@ pub fn bind_irrefutable_pat(bcx: block,
} }
ast::pat_enum(_, ref sub_pats) => { ast::pat_enum(_, ref sub_pats) => {
match bcx.tcx().def_map.find(&pat.id) { match bcx.tcx().def_map.find(&pat.id) {
Some(ast::def_variant(enum_id, var_id)) => { Some(&ast::def_variant(enum_id, var_id)) => {
let repr = adt::represent_node(bcx, pat.id); let repr = adt::represent_node(bcx, pat.id);
let vinfo = ty::enum_variant_with_id(ccx.tcx, let vinfo = ty::enum_variant_with_id(ccx.tcx,
enum_id, enum_id,
@ -1784,7 +1784,7 @@ pub fn bind_irrefutable_pat(bcx: block,
} }
} }
} }
Some(ast::def_struct(*)) => { Some(&ast::def_struct(*)) => {
match *sub_pats { match *sub_pats {
None => { None => {
// This is a unit-like struct. Nothing to do here. // This is a unit-like struct. Nothing to do here.
@ -1804,7 +1804,7 @@ pub fn bind_irrefutable_pat(bcx: block,
} }
} }
} }
Some(ast::def_const(*)) => { Some(&ast::def_const(*)) => {
bcx = bind_irrefutable_pat(bcx, pat, val, make_copy, binding_mode); bcx = bind_irrefutable_pat(bcx, pat, val, make_copy, binding_mode);
} }
_ => { _ => {

View file

@ -67,7 +67,7 @@ use util::ppaux::{ty_to_str, ty_to_short_str};
use util::ppaux; use util::ppaux;
use core::hash; use core::hash;
use core::hashmap::linear::LinearMap; use core::hashmap::linear::{LinearMap, LinearSet};
use core::int; use core::int;
use core::io; use core::io;
use core::libc::{c_uint, c_ulonglong}; use core::libc::{c_uint, c_ulonglong};
@ -383,7 +383,7 @@ pub fn get_tydesc_simple(ccx: @CrateContext, t: ty::t) -> ValueRef {
pub fn get_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info { pub fn get_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info {
match ccx.tydescs.find(&t) { match ccx.tydescs.find(&t) {
Some(inf) => inf, Some(&inf) => inf,
_ => { _ => {
ccx.stats.n_static_tydescs += 1u; ccx.stats.n_static_tydescs += 1u;
let inf = glue::declare_tydesc(ccx, t); let inf = glue::declare_tydesc(ccx, t);
@ -467,10 +467,12 @@ pub fn set_glue_inlining(f: ValueRef, t: ty::t) {
// Double-check that we never ask LLVM to declare the same symbol twice. It // Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model. // silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: @CrateContext, +sym: ~str) { pub fn note_unique_llvm_symbol(ccx: @CrateContext, +sym: ~str) {
if ccx.all_llvm_symbols.contains_key(&sym) { // XXX: this should not be necessary
use core::container::Set;
if ccx.all_llvm_symbols.contains(&sym) {
ccx.sess.bug(~"duplicate LLVM symbol: " + sym); ccx.sess.bug(~"duplicate LLVM symbol: " + sym);
} }
ccx.all_llvm_symbols.insert(sym, ()); ccx.all_llvm_symbols.insert(sym);
} }
@ -1100,7 +1102,7 @@ pub fn init_local(bcx: block, local: @ast::local) -> block {
} }
let llptr = match bcx.fcx.lllocals.find(&local.node.id) { let llptr = match bcx.fcx.lllocals.find(&local.node.id) {
Some(local_mem(v)) => v, Some(&local_mem(v)) => v,
_ => { bcx.tcx().sess.span_bug(local.span, _ => { bcx.tcx().sess.span_bug(local.span,
~"init_local: Someone forgot to document why it's\ ~"init_local: Someone forgot to document why it's\
safe to assume local.node.init must be local_mem!"); safe to assume local.node.init must be local_mem!");
@ -1453,7 +1455,7 @@ pub fn call_memcpy(cx: block, dst: ValueRef, src: ValueRef,
| session::arch_mips => ~"llvm.memcpy.p0i8.p0i8.i32", | session::arch_mips => ~"llvm.memcpy.p0i8.p0i8.i32",
session::arch_x86_64 => ~"llvm.memcpy.p0i8.p0i8.i64" session::arch_x86_64 => ~"llvm.memcpy.p0i8.p0i8.i64"
}; };
let memcpy = ccx.intrinsics.get(&key); let memcpy = *ccx.intrinsics.get(&key);
let src_ptr = PointerCast(cx, src, T_ptr(T_i8())); let src_ptr = PointerCast(cx, src, T_ptr(T_i8()));
let dst_ptr = PointerCast(cx, dst, T_ptr(T_i8())); let dst_ptr = PointerCast(cx, dst, T_ptr(T_i8()));
let size = IntCast(cx, n_bytes, ccx.int_type); let size = IntCast(cx, n_bytes, ccx.int_type);
@ -1502,7 +1504,7 @@ pub fn memzero(cx: block, llptr: ValueRef, llty: TypeRef) {
} }
} }
let llintrinsicfn = ccx.intrinsics.get(&intrinsic_key); let llintrinsicfn = *ccx.intrinsics.get(&intrinsic_key);
let llptr = PointerCast(cx, llptr, T_ptr(T_i8())); let llptr = PointerCast(cx, llptr, T_ptr(T_i8()));
let llzeroval = C_u8(0); let llzeroval = C_u8(0);
let size = IntCast(cx, machine::llsize_of(ccx, llty), ccx.int_type); let size = IntCast(cx, machine::llsize_of(ccx, llty), ccx.int_type);
@ -1601,9 +1603,9 @@ pub fn new_fn_ctxt_w_id(ccx: @CrateContext,
llself: None, llself: None,
personality: None, personality: None,
loop_ret: None, loop_ret: None,
llargs: @HashMap(), llargs: @mut LinearMap::new(),
lllocals: @HashMap(), lllocals: @mut LinearMap::new(),
llupvars: @HashMap(), llupvars: @mut LinearMap::new(),
id: id, id: id,
impl_id: impl_id, impl_id: impl_id,
param_substs: param_substs, param_substs: param_substs,
@ -1905,7 +1907,7 @@ pub fn trans_enum_variant(ccx: @CrateContext,
// this function as an opaque blob due to the way that type_of() // this function as an opaque blob due to the way that type_of()
// works. So we have to cast to the destination's view of the type. // works. So we have to cast to the destination's view of the type.
let llarg = match fcx.llargs.find(&va.id) { let llarg = match fcx.llargs.find(&va.id) {
Some(local_mem(x)) => x, Some(&local_mem(x)) => x,
_ => fail!(~"trans_enum_variant: how do we know this works?"), _ => fail!(~"trans_enum_variant: how do we know this works?"),
}; };
let arg_ty = arg_tys[i].ty; let arg_ty = arg_tys[i].ty;
@ -1969,7 +1971,7 @@ pub fn trans_tuple_struct(ccx: @CrateContext,
for fields.eachi |i, field| { for fields.eachi |i, field| {
let lldestptr = adt::trans_field_ptr(bcx, repr, fcx.llretptr, 0, i); let lldestptr = adt::trans_field_ptr(bcx, repr, fcx.llretptr, 0, i);
let llarg = match fcx.llargs.get(&field.node.id) { let llarg = match *fcx.llargs.get(&field.node.id) {
local_mem(x) => x, local_mem(x) => x,
_ => { _ => {
ccx.tcx.sess.bug(~"trans_tuple_struct: llarg wasn't \ ccx.tcx.sess.bug(~"trans_tuple_struct: llarg wasn't \
@ -2347,7 +2349,7 @@ pub fn get_dtor_symbol(ccx: @CrateContext,
-> ~str { -> ~str {
let t = ty::node_id_to_type(ccx.tcx, id); let t = ty::node_id_to_type(ccx.tcx, id);
match ccx.item_symbols.find(&id) { match ccx.item_symbols.find(&id) {
Some(ref s) => (/*bad*/copy *s), Some(s) => (/*bad*/copy *s),
None if substs.is_none() => { None if substs.is_none() => {
let s = mangle_exported_name( let s = mangle_exported_name(
ccx, ccx,
@ -2382,7 +2384,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
debug!("get_item_val(id=`%?`)", id); debug!("get_item_val(id=`%?`)", id);
let tcx = ccx.tcx; let tcx = ccx.tcx;
match ccx.item_vals.find(&id) { match ccx.item_vals.find(&id) {
Some(v) => v, Some(&v) => v,
None => { None => {
let mut exprt = false; let mut exprt = false;
@ -2538,7 +2540,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
ccx.sess.bug(~"get_item_val(): unexpected variant") ccx.sess.bug(~"get_item_val(): unexpected variant")
} }
}; };
if !(exprt || ccx.reachable.contains_key(&id)) { if !(exprt || ccx.reachable.contains(&id)) {
lib::llvm::SetLinkage(val, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(val, lib::llvm::InternalLinkage);
} }
ccx.item_vals.insert(id, val); ccx.item_vals.insert(id, val);
@ -2617,7 +2619,7 @@ pub fn p2i(ccx: @CrateContext, v: ValueRef) -> ValueRef {
} }
} }
pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> { pub fn declare_intrinsics(llmod: ModuleRef) -> LinearMap<~str, ValueRef> {
let T_memcpy32_args: ~[TypeRef] = let T_memcpy32_args: ~[TypeRef] =
~[T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()]; ~[T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()];
let T_memcpy64_args: ~[TypeRef] = let T_memcpy64_args: ~[TypeRef] =
@ -2750,7 +2752,7 @@ pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> {
let bswap64 = decl_cdecl_fn(llmod, ~"llvm.bswap.i64", let bswap64 = decl_cdecl_fn(llmod, ~"llvm.bswap.i64",
T_fn(~[T_i64()], T_i64())); T_fn(~[T_i64()], T_i64()));
let intrinsics = HashMap(); let mut intrinsics = LinearMap::new();
intrinsics.insert(~"llvm.gcroot", gcroot); intrinsics.insert(~"llvm.gcroot", gcroot);
intrinsics.insert(~"llvm.gcread", gcread); intrinsics.insert(~"llvm.gcread", gcread);
intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i32", memcpy32); intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i32", memcpy32);
@ -2811,7 +2813,7 @@ pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> {
} }
pub fn declare_dbg_intrinsics(llmod: ModuleRef, pub fn declare_dbg_intrinsics(llmod: ModuleRef,
intrinsics: HashMap<~str, ValueRef>) { intrinsics: &mut LinearMap<~str, ValueRef>) {
let declare = let declare =
decl_cdecl_fn(llmod, ~"llvm.dbg.declare", decl_cdecl_fn(llmod, ~"llvm.dbg.declare",
T_fn(~[T_metadata(), T_metadata()], T_void())); T_fn(~[T_metadata(), T_metadata()], T_void()));
@ -2826,7 +2828,7 @@ pub fn declare_dbg_intrinsics(llmod: ModuleRef,
pub fn trap(bcx: block) { pub fn trap(bcx: block) {
let v: ~[ValueRef] = ~[]; let v: ~[ValueRef] = ~[];
match bcx.ccx().intrinsics.find(&~"llvm.trap") { match bcx.ccx().intrinsics.find(&~"llvm.trap") {
Some(x) => { Call(bcx, x, v); }, Some(&x) => { Call(bcx, x, v); },
_ => bcx.sess().bug(~"unbound llvm.trap in trap") _ => bcx.sess().bug(~"unbound llvm.trap in trap")
} }
} }
@ -2861,8 +2863,8 @@ pub fn create_module_map(ccx: @CrateContext) -> ValueRef {
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
} }
let mut elts: ~[ValueRef] = ~[]; let mut elts: ~[ValueRef] = ~[];
for ccx.module_data.each |&key, &val| { for ccx.module_data.each |&(key, &val)| {
let elt = C_struct(~[p2i(ccx, C_cstr(ccx, @/*bad*/ copy key)), let elt = C_struct(~[p2i(ccx, C_cstr(ccx, @/*bad*/ copy *key)),
p2i(ccx, val)]); p2i(ccx, val)]);
elts.push(elt); elts.push(elt);
} }
@ -3036,9 +3038,9 @@ pub fn trans_crate(sess: session::Session,
let targ_cfg = sess.targ_cfg; let targ_cfg = sess.targ_cfg;
let td = mk_target_data(sess.targ_cfg.target_strs.data_layout); let td = mk_target_data(sess.targ_cfg.target_strs.data_layout);
let tn = mk_type_names(); let tn = mk_type_names();
let intrinsics = declare_intrinsics(llmod); let mut intrinsics = declare_intrinsics(llmod);
if sess.opts.extra_debuginfo { if sess.opts.extra_debuginfo {
declare_dbg_intrinsics(llmod, intrinsics); declare_dbg_intrinsics(llmod, &mut intrinsics);
} }
let int_type = T_int(targ_cfg); let int_type = T_int(targ_cfg);
let float_type = T_float(targ_cfg); let float_type = T_float(targ_cfg);
@ -3061,34 +3063,34 @@ pub fn trans_crate(sess: session::Session,
tn: tn, tn: tn,
externs: HashMap(), externs: HashMap(),
intrinsics: intrinsics, intrinsics: intrinsics,
item_vals: HashMap(), item_vals: @mut LinearMap::new(),
exp_map2: emap2, exp_map2: emap2,
reachable: reachable, reachable: reachable,
item_symbols: HashMap(), item_symbols: @mut LinearMap::new(),
link_meta: link_meta, link_meta: link_meta,
enum_sizes: ty::new_ty_hash(), enum_sizes: @mut LinearMap::new(),
discrims: HashMap(), discrims: @mut LinearMap::new(),
discrim_symbols: HashMap(), discrim_symbols: @mut LinearMap::new(),
tydescs: ty::new_ty_hash(), tydescs: @mut LinearMap::new(),
finished_tydescs: @mut false, finished_tydescs: @mut false,
external: HashMap(), external: @mut LinearMap::new(),
monomorphized: HashMap(), monomorphized: @mut LinearMap::new(),
monomorphizing: HashMap(), monomorphizing: @mut LinearMap::new(),
type_use_cache: HashMap(), type_use_cache: @mut LinearMap::new(),
vtables: oldmap::HashMap(), vtables: @mut LinearMap::new(),
const_cstr_cache: HashMap(), const_cstr_cache: @mut LinearMap::new(),
const_globals: HashMap(), const_globals: @mut LinearMap::new(),
const_values: HashMap(), const_values: @mut LinearMap::new(),
module_data: HashMap(), module_data: @mut LinearMap::new(),
lltypes: ty::new_ty_hash(), lltypes: ty::new_ty_hash(),
llsizingtypes: ty::new_ty_hash(), llsizingtypes: ty::new_ty_hash(),
adt_reprs: @mut LinearMap::new(), adt_reprs: @mut LinearMap::new(),
names: new_namegen(sess.parse_sess.interner), names: new_namegen(sess.parse_sess.interner),
next_addrspace: new_addrspace_gen(), next_addrspace: new_addrspace_gen(),
symbol_hasher: symbol_hasher, symbol_hasher: symbol_hasher,
type_hashcodes: ty::new_ty_hash(), type_hashcodes: @mut LinearMap::new(),
type_short_names: ty::new_ty_hash(), type_short_names: @mut LinearMap::new(),
all_llvm_symbols: HashMap(), all_llvm_symbols: @mut LinearSet::new(),
tcx: tcx, tcx: tcx,
maps: maps, maps: maps,
stats: @mut Stats { stats: @mut Stats {
@ -3101,7 +3103,7 @@ pub fn trans_crate(sess: session::Session,
n_inlines: 0u, n_inlines: 0u,
n_closures: 0u, n_closures: 0u,
llvm_insn_ctxt: @mut ~[], llvm_insn_ctxt: @mut ~[],
llvm_insns: HashMap(), llvm_insns: @mut LinearMap::new(),
fn_times: @mut ~[] fn_times: @mut ~[]
}, },
upcalls: upcall::declare_upcalls(targ_cfg, llmod), upcalls: upcall::declare_upcalls(targ_cfg, llmod),
@ -3151,7 +3153,7 @@ pub fn trans_crate(sess: session::Session,
} }
if ccx.sess.count_llvm_insns() { if ccx.sess.count_llvm_insns() {
for ccx.stats.llvm_insns.each |&k, &v| { for ccx.stats.llvm_insns.each |&(&k, &v)| {
io::println(fmt!("%-7u %s", v, k)); io::println(fmt!("%-7u %s", v, k));
} }
} }

View file

@ -18,13 +18,13 @@ use syntax::codemap::span;
use core::prelude::*; use core::prelude::*;
use core::cast; use core::cast;
use core::hashmap::linear::LinearMap;
use core::libc::{c_uint, c_int, c_ulonglong, c_char}; use core::libc::{c_uint, c_int, c_ulonglong, c_char};
use core::libc; use core::libc;
use core::option::Some; use core::option::Some;
use core::ptr; use core::ptr;
use core::str; use core::str;
use core::vec; use core::vec;
use std::oldmap::HashMap;
pub fn terminate(cx: block, _: &str) { pub fn terminate(cx: block, _: &str) {
unsafe { unsafe {
@ -55,7 +55,7 @@ pub fn count_insn(cx: block, category: &str) {
// Build version of path with cycles removed. // Build version of path with cycles removed.
// Pass 1: scan table mapping str -> rightmost pos. // Pass 1: scan table mapping str -> rightmost pos.
let mm = HashMap(); let mut mm = LinearMap::new();
let len = vec::len(*v); let len = vec::len(*v);
let mut i = 0u; let mut i = 0u;
while i < len { while i < len {
@ -70,7 +70,7 @@ pub fn count_insn(cx: block, category: &str) {
let mut s = ~"."; let mut s = ~".";
i = 0u; i = 0u;
while i < len { while i < len {
i = mm.get(&v[i]); i = *mm.get(&v[i]);
s += ~"/"; s += ~"/";
s += v[i]; s += v[i];
i += 1u; i += 1u;
@ -80,7 +80,7 @@ pub fn count_insn(cx: block, category: &str) {
s += category; s += category;
let n = match h.find(&s) { let n = match h.find(&s) {
Some(n) => n, Some(&n) => n,
_ => 0u _ => 0u
}; };
h.insert(s, n+1u); h.insert(s, n+1u);

View file

@ -700,7 +700,7 @@ pub fn trans_arg_expr(bcx: block,
match autoref_arg { match autoref_arg {
DoAutorefArg => { DoAutorefArg => {
fail_unless!(! fail_unless!(!
bcx.ccx().maps.moves_map.contains_key(&arg_expr.id)); bcx.ccx().maps.moves_map.contains(&arg_expr.id));
val = arg_datum.to_ref_llval(bcx); val = arg_datum.to_ref_llval(bcx);
} }
DontAutorefArg => { DontAutorefArg => {

View file

@ -411,7 +411,7 @@ pub fn trans_expr_fn(bcx: block,
let Result {bcx: bcx, val: closure} = match sigil { let Result {bcx: bcx, val: closure} = match sigil {
ast::BorrowedSigil | ast::ManagedSigil | ast::OwnedSigil => { ast::BorrowedSigil | ast::ManagedSigil | ast::OwnedSigil => {
let cap_vars = ccx.maps.capture_map.get(&user_id); let cap_vars = *ccx.maps.capture_map.get(&user_id);
let ret_handle = match is_loop_body {Some(x) => x, let ret_handle = match is_loop_body {Some(x) => x,
None => None}; None => None};
let ClosureResult {llbox, cdata_ty, bcx} let ClosureResult {llbox, cdata_ty, bcx}

View file

@ -45,7 +45,7 @@ use util::ppaux::{expr_repr, ty_to_str};
use core::cast; use core::cast;
use core::hash; use core::hash;
use core::hashmap::linear::LinearMap; use core::hashmap::linear::{LinearMap, LinearSet};
use core::libc::{c_uint, c_longlong, c_ulonglong}; use core::libc::{c_uint, c_longlong, c_ulonglong};
use core::ptr; use core::ptr;
use core::str; use core::str;
@ -134,7 +134,7 @@ pub struct Stats {
n_inlines: uint, n_inlines: uint,
n_closures: uint, n_closures: uint,
llvm_insn_ctxt: @mut ~[~str], llvm_insn_ctxt: @mut ~[~str],
llvm_insns: HashMap<~str, uint>, llvm_insns: @mut LinearMap<~str, uint>,
fn_times: @mut ~[(~str, int)] // (ident, time) fn_times: @mut ~[(~str, int)] // (ident, time)
} }
@ -165,30 +165,30 @@ pub struct CrateContext {
td: TargetData, td: TargetData,
tn: @TypeNames, tn: @TypeNames,
externs: ExternMap, externs: ExternMap,
intrinsics: HashMap<~str, ValueRef>, intrinsics: LinearMap<~str, ValueRef>,
item_vals: HashMap<ast::node_id, ValueRef>, item_vals: @mut LinearMap<ast::node_id, ValueRef>,
exp_map2: resolve::ExportMap2, exp_map2: resolve::ExportMap2,
reachable: reachable::map, reachable: reachable::map,
item_symbols: HashMap<ast::node_id, ~str>, item_symbols: @mut LinearMap<ast::node_id, ~str>,
link_meta: LinkMeta, link_meta: LinkMeta,
enum_sizes: HashMap<ty::t, uint>, enum_sizes: @mut LinearMap<ty::t, uint>,
discrims: HashMap<ast::def_id, ValueRef>, discrims: @mut LinearMap<ast::def_id, ValueRef>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: @mut LinearMap<ast::node_id, ~str>,
tydescs: HashMap<ty::t, @mut tydesc_info>, tydescs: @mut LinearMap<ty::t, @mut tydesc_info>,
// Set when running emit_tydescs to enforce that no more tydescs are // Set when running emit_tydescs to enforce that no more tydescs are
// created. // created.
finished_tydescs: @mut bool, finished_tydescs: @mut bool,
// Track mapping of external ids to local items imported for inlining // Track mapping of external ids to local items imported for inlining
external: HashMap<ast::def_id, Option<ast::node_id>>, external: @mut LinearMap<ast::def_id, Option<ast::node_id>>,
// Cache instances of monomorphized functions // Cache instances of monomorphized functions
monomorphized: HashMap<mono_id, ValueRef>, monomorphized: @mut LinearMap<mono_id, ValueRef>,
monomorphizing: HashMap<ast::def_id, uint>, monomorphizing: @mut LinearMap<ast::def_id, uint>,
// Cache computed type parameter uses (see type_use.rs) // Cache computed type parameter uses (see type_use.rs)
type_use_cache: HashMap<ast::def_id, ~[type_use::type_uses]>, type_use_cache: @mut LinearMap<ast::def_id, ~[type_use::type_uses]>,
// Cache generated vtables // Cache generated vtables
vtables: HashMap<mono_id, ValueRef>, vtables: @mut LinearMap<mono_id, ValueRef>,
// Cache of constant strings, // Cache of constant strings,
const_cstr_cache: HashMap<@~str, ValueRef>, const_cstr_cache: @mut LinearMap<@~str, ValueRef>,
// Reverse-direction for const ptrs cast from globals. // Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T, // Key is an int, cast from a ValueRef holding a *T,
@ -198,20 +198,20 @@ pub struct CrateContext {
// when we ptrcast, and we have to ptrcast during translation // when we ptrcast, and we have to ptrcast during translation
// of a [T] const because we form a slice, a [*T,int] pair, not // of a [T] const because we form a slice, a [*T,int] pair, not
// a pointer to an LLVM array type. // a pointer to an LLVM array type.
const_globals: HashMap<int, ValueRef>, const_globals: @mut LinearMap<int, ValueRef>,
// Cache of emitted const values // Cache of emitted const values
const_values: HashMap<ast::node_id, ValueRef>, const_values: @mut LinearMap<ast::node_id, ValueRef>,
module_data: HashMap<~str, ValueRef>, module_data: @mut LinearMap<~str, ValueRef>,
lltypes: HashMap<ty::t, TypeRef>, lltypes: HashMap<ty::t, TypeRef>,
llsizingtypes: HashMap<ty::t, TypeRef>, llsizingtypes: HashMap<ty::t, TypeRef>,
adt_reprs: @mut LinearMap<ty::t, @adt::Repr>, adt_reprs: @mut LinearMap<ty::t, @adt::Repr>,
names: namegen, names: namegen,
next_addrspace: addrspace_gen, next_addrspace: addrspace_gen,
symbol_hasher: @hash::State, symbol_hasher: @hash::State,
type_hashcodes: HashMap<ty::t, @str>, type_hashcodes: @mut LinearMap<ty::t, @str>,
type_short_names: HashMap<ty::t, ~str>, type_short_names: @mut LinearMap<ty::t, ~str>,
all_llvm_symbols: Set<~str>, all_llvm_symbols: @mut LinearSet<~str>,
tcx: ty::ctxt, tcx: ty::ctxt,
maps: astencode::Maps, maps: astencode::Maps,
stats: @mut Stats, stats: @mut Stats,
@ -310,12 +310,12 @@ pub struct fn_ctxt_ {
loop_ret: Option<(ValueRef, ValueRef)>, loop_ret: Option<(ValueRef, ValueRef)>,
// Maps arguments to allocas created for them in llallocas. // Maps arguments to allocas created for them in llallocas.
llargs: @HashMap<ast::node_id, local_val>, llargs: @mut LinearMap<ast::node_id, local_val>,
// Maps the def_ids for local variables to the allocas created for // Maps the def_ids for local variables to the allocas created for
// them in llallocas. // them in llallocas.
lllocals: @HashMap<ast::node_id, local_val>, lllocals: @mut LinearMap<ast::node_id, local_val>,
// Same as above, but for closure upvars // Same as above, but for closure upvars
llupvars: @HashMap<ast::node_id, ValueRef>, llupvars: @mut LinearMap<ast::node_id, ValueRef>,
// The node_id of the function, or -1 if it doesn't correspond to // The node_id of the function, or -1 if it doesn't correspond to
// a user-defined function. // a user-defined function.
@ -712,7 +712,7 @@ pub impl block_ {
fn def(@mut self, nid: ast::node_id) -> ast::def { fn def(@mut self, nid: ast::node_id) -> ast::def {
match self.tcx().def_map.find(&nid) { match self.tcx().def_map.find(&nid) {
Some(v) => v, Some(&v) => v,
None => { None => {
self.tcx().sess.bug(fmt!( self.tcx().sess.bug(fmt!(
"No def associated with node id %?", nid)); "No def associated with node id %?", nid));
@ -1132,7 +1132,7 @@ pub fn C_u8(i: uint) -> ValueRef {
pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef { pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef {
unsafe { unsafe {
match cx.const_cstr_cache.find(&s) { match cx.const_cstr_cache.find(&s) {
Some(llval) => return llval, Some(&llval) => return llval,
None => () None => ()
} }
@ -1400,7 +1400,7 @@ pub fn node_vtables(bcx: block, id: ast::node_id)
-> Option<typeck::vtable_res> { -> Option<typeck::vtable_res> {
let raw_vtables = bcx.ccx().maps.vtable_map.find(&id); let raw_vtables = bcx.ccx().maps.vtable_map.find(&id);
raw_vtables.map( raw_vtables.map(
|vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts)) |&vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts))
} }
pub fn resolve_vtables_in_fn_ctxt(fcx: fn_ctxt, vts: typeck::vtable_res) pub fn resolve_vtables_in_fn_ctxt(fcx: fn_ctxt, vts: typeck::vtable_res)

View file

@ -110,7 +110,7 @@ fn const_addr_of(cx: @CrateContext, cv: ValueRef) -> ValueRef {
fn const_deref_ptr(cx: @CrateContext, v: ValueRef) -> ValueRef { fn const_deref_ptr(cx: @CrateContext, v: ValueRef) -> ValueRef {
let v = match cx.const_globals.find(&(v as int)) { let v = match cx.const_globals.find(&(v as int)) {
Some(v) => v, Some(&v) => v,
None => v None => v
}; };
unsafe { unsafe {
@ -167,7 +167,7 @@ pub fn get_const_val(cx: @CrateContext, def_id: ast::def_id) -> ValueRef {
_ => cx.tcx.sess.bug(~"expected a const to be an item") _ => cx.tcx.sess.bug(~"expected a const to be an item")
} }
} }
cx.const_values.get(&def_id.node) *cx.const_values.get(&def_id.node)
} }
pub fn const_expr(cx: @CrateContext, e: @ast::expr) -> ValueRef { pub fn const_expr(cx: @CrateContext, e: @ast::expr) -> ValueRef {
@ -175,14 +175,14 @@ pub fn const_expr(cx: @CrateContext, e: @ast::expr) -> ValueRef {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
match cx.tcx.adjustments.find(&e.id) { match cx.tcx.adjustments.find(&e.id) {
None => { } None => { }
Some(@ty::AutoAddEnv(ty::re_static, ast::BorrowedSigil)) => { Some(&@ty::AutoAddEnv(ty::re_static, ast::BorrowedSigil)) => {
llconst = C_struct(~[llconst, C_null(T_opaque_box_ptr(cx))]) llconst = C_struct(~[llconst, C_null(T_opaque_box_ptr(cx))])
} }
Some(@ty::AutoAddEnv(ref r, ref s)) => { Some(&@ty::AutoAddEnv(ref r, ref s)) => {
cx.sess.span_bug(e.span, fmt!("unexpected static function: \ cx.sess.span_bug(e.span, fmt!("unexpected static function: \
region %? sigil %?", *r, *s)) region %? sigil %?", *r, *s))
} }
Some(@ty::AutoDerefRef(ref adj)) => { Some(&@ty::AutoDerefRef(ref adj)) => {
let mut ty = ety; let mut ty = ety;
let mut maybe_ptr = None; let mut maybe_ptr = None;
for adj.autoderefs.times { for adj.autoderefs.times {
@ -496,7 +496,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: @ast::expr) -> ValueRef {
ast::expr_path(pth) => { ast::expr_path(pth) => {
fail_unless!(pth.types.len() == 0); fail_unless!(pth.types.len() == 0);
match cx.tcx.def_map.find(&e.id) { match cx.tcx.def_map.find(&e.id) {
Some(ast::def_fn(def_id, _purity)) => { Some(&ast::def_fn(def_id, _purity)) => {
if !ast_util::is_local(def_id) { if !ast_util::is_local(def_id) {
let ty = csearch::get_type(cx.tcx, def_id).ty; let ty = csearch::get_type(cx.tcx, def_id).ty;
base::trans_external_path(cx, def_id, ty) base::trans_external_path(cx, def_id, ty)
@ -505,10 +505,10 @@ fn const_expr_unadjusted(cx: @CrateContext, e: @ast::expr) -> ValueRef {
base::get_item_val(cx, def_id.node) base::get_item_val(cx, def_id.node)
} }
} }
Some(ast::def_const(def_id)) => { Some(&ast::def_const(def_id)) => {
get_const_val(cx, def_id) get_const_val(cx, def_id)
} }
Some(ast::def_variant(enum_did, variant_did)) => { Some(&ast::def_variant(enum_did, variant_did)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let repr = adt::represent_type(cx, ety); let repr = adt::represent_type(cx, ety);
let vinfo = ty::enum_variant_with_id(cx.tcx, let vinfo = ty::enum_variant_with_id(cx.tcx,
@ -516,7 +516,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: @ast::expr) -> ValueRef {
variant_did); variant_did);
adt::trans_const(cx, repr, vinfo.disr_val, []) adt::trans_const(cx, repr, vinfo.disr_val, [])
} }
Some(ast::def_struct(_)) => { Some(&ast::def_struct(_)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let llty = type_of::type_of(cx, ety); let llty = type_of::type_of(cx, ety);
C_null(llty) C_null(llty)
@ -529,13 +529,13 @@ fn const_expr_unadjusted(cx: @CrateContext, e: @ast::expr) -> ValueRef {
} }
ast::expr_call(callee, ref args, _) => { ast::expr_call(callee, ref args, _) => {
match cx.tcx.def_map.find(&callee.id) { match cx.tcx.def_map.find(&callee.id) {
Some(ast::def_struct(_)) => { Some(&ast::def_struct(_)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let repr = adt::represent_type(cx, ety); let repr = adt::represent_type(cx, ety);
adt::trans_const(cx, repr, 0, adt::trans_const(cx, repr, 0,
args.map(|a| const_expr(cx, *a))) args.map(|a| const_expr(cx, *a)))
} }
Some(ast::def_variant(enum_did, variant_did)) => { Some(&ast::def_variant(enum_did, variant_did)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let repr = adt::represent_type(cx, ety); let repr = adt::represent_type(cx, ety);
let vinfo = ty::enum_variant_with_id(cx.tcx, let vinfo = ty::enum_variant_with_id(cx.tcx,
@ -561,7 +561,7 @@ pub fn trans_const(ccx: @CrateContext, _e: @ast::expr, id: ast::node_id) {
let g = base::get_item_val(ccx, id); let g = base::get_item_val(ccx, id);
// At this point, get_item_val has already translated the // At this point, get_item_val has already translated the
// constant's initializer to determine its LLVM type. // constant's initializer to determine its LLVM type.
let v = ccx.const_values.get(&id); let v = *ccx.const_values.get(&id);
llvm::LLVMSetInitializer(g, v); llvm::LLVMSetInitializer(g, v);
llvm::LLVMSetGlobalConstant(g, True); llvm::LLVMSetGlobalConstant(g, True);
} }

View file

@ -196,7 +196,7 @@ pub fn trans_log(log_ex: @ast::expr,
}; };
let global = if ccx.module_data.contains_key(&modname) { let global = if ccx.module_data.contains_key(&modname) {
ccx.module_data.get(&modname) *ccx.module_data.get(&modname)
} else { } else {
let s = link::mangle_internal_name_by_path_and_seq( let s = link::mangle_internal_name_by_path_and_seq(
ccx, modpath, ~"loglevel"); ccx, modpath, ~"loglevel");

View file

@ -105,6 +105,7 @@ use util::common::indenter;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use core::cmp; use core::cmp;
use core::container::Set; // XXX: this should not be necessary
use core::to_bytes; use core::to_bytes;
use core::uint; use core::uint;
use syntax::ast; use syntax::ast;
@ -230,7 +231,7 @@ pub impl Datum {
* `id` is located in the move table, but copies otherwise. * `id` is located in the move table, but copies otherwise.
*/ */
if bcx.ccx().maps.moves_map.contains_key(&id) { if bcx.ccx().maps.moves_map.contains(&id) {
self.move_to(bcx, action, dst) self.move_to(bcx, action, dst)
} else { } else {
self.copy_to(bcx, action, dst) self.copy_to(bcx, action, dst)
@ -646,16 +647,15 @@ pub impl Datum {
let key = root_map_key { id: expr_id, derefs: derefs }; let key = root_map_key { id: expr_id, derefs: derefs };
let bcx = match ccx.maps.root_map.find(&key) { let bcx = match ccx.maps.root_map.find(&key) {
None => bcx, None => bcx,
Some(root_info) => self.root(bcx, root_info) Some(&root_info) => self.root(bcx, root_info)
}; };
// Perform the write guard, if necessary. // Perform the write guard, if necessary.
// //
// (Note: write-guarded values are always boxes) // (Note: write-guarded values are always boxes)
let bcx = match ccx.maps.write_guard_map.find(&key) { let bcx = if ccx.maps.write_guard_map.contains(&key) {
None => bcx, self.perform_write_guard(bcx)
Some(_) => self.perform_write_guard(bcx) } else { bcx };
};
match ty::get(self.ty).sty { match ty::get(self.ty).sty {
ty::ty_box(_) | ty::ty_uniq(_) => { ty::ty_box(_) | ty::ty_uniq(_) => {

View file

@ -20,11 +20,10 @@ use middle::trans;
use middle::ty; use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use core::hashmap::linear::LinearMap;
use core::libc; use core::libc;
use core::option; use core::option;
use core::sys; use core::sys;
use std::oldmap::HashMap;
use std::oldmap;
use syntax::codemap::{span, CharPos}; use syntax::codemap::{span, CharPos};
use syntax::parse::token::ident_interner; use syntax::parse::token::ident_interner;
use syntax::{ast, codemap, ast_util, ast_map}; use syntax::{ast, codemap, ast_util, ast_map};
@ -107,19 +106,18 @@ pub struct DebugContext {
pub fn mk_ctxt(+crate: ~str, intr: @ident_interner) -> DebugContext { pub fn mk_ctxt(+crate: ~str, intr: @ident_interner) -> DebugContext {
DebugContext { DebugContext {
llmetadata: oldmap::HashMap(), llmetadata: @mut LinearMap::new(),
names: new_namegen(intr), names: new_namegen(intr),
crate_file: crate crate_file: crate
} }
} }
fn update_cache(cache: metadata_cache, mdtag: int, val: debug_metadata) { fn update_cache(cache: metadata_cache, mdtag: int, val: debug_metadata) {
let existing = if cache.contains_key(&mdtag) { let mut existing = match cache.pop(&mdtag) {
cache.get(&mdtag) Some(arr) => arr, None => ~[]
} else {
~[]
}; };
cache.insert(mdtag, vec::append_one(existing, val)); existing.push(val);
cache.insert(mdtag, existing);
} }
struct Metadata<T> { struct Metadata<T> {
@ -153,7 +151,7 @@ struct RetvalMetadata {
id: ast::node_id id: ast::node_id
} }
type metadata_cache = HashMap<int, ~[debug_metadata]>; type metadata_cache = @mut LinearMap<int, ~[debug_metadata]>;
enum debug_metadata { enum debug_metadata {
file_metadata(@Metadata<FileMetadata>), file_metadata(@Metadata<FileMetadata>),
@ -318,7 +316,7 @@ fn create_block(cx: block) -> @Metadata<BlockMetadata> {
}; };
let file_node = create_file(cx.ccx(), fname); let file_node = create_file(cx.ccx(), fname);
let unique_id = match cache.find(&LexicalBlockTag) { let unique_id = match cache.find(&LexicalBlockTag) {
option::Some(v) => vec::len(v) as int, option::Some(v) => v.len() as int,
option::None => 0 option::None => 0
}; };
let lldata = ~[lltag(tg), let lldata = ~[lltag(tg),
@ -746,13 +744,13 @@ pub fn create_local_var(bcx: block, local: @ast::local)
update_cache(cache, AutoVariableTag, local_var_metadata(mdval)); update_cache(cache, AutoVariableTag, local_var_metadata(mdval));
let llptr = match bcx.fcx.lllocals.find(&local.node.id) { let llptr = match bcx.fcx.lllocals.find(&local.node.id) {
option::Some(local_mem(v)) => v, option::Some(&local_mem(v)) => v,
option::Some(_) => { option::Some(_) => {
bcx.tcx().sess.span_bug(local.span, ~"local is bound to \ bcx.tcx().sess.span_bug(local.span, ~"local is bound to \
something weird"); something weird");
} }
option::None => { option::None => {
match bcx.fcx.lllocals.get(&local.node.pat.id) { match *bcx.fcx.lllocals.get(&local.node.pat.id) {
local_imm(v) => v, local_imm(v) => v,
_ => bcx.tcx().sess.span_bug(local.span, ~"local is bound to \ _ => bcx.tcx().sess.span_bug(local.span, ~"local is bound to \
something weird") something weird")
@ -760,7 +758,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
} }
}; };
let declargs = ~[llmdnode(~[llptr]), mdnode]; let declargs = ~[llmdnode(~[llptr]), mdnode];
trans::build::Call(bcx, cx.intrinsics.get(&~"llvm.dbg.declare"), trans::build::Call(bcx, *cx.intrinsics.get(&~"llvm.dbg.declare"),
declargs); declargs);
return mdval; return mdval;
} }
@ -807,12 +805,12 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
}; };
update_cache(cache, tg, argument_metadata(mdval)); update_cache(cache, tg, argument_metadata(mdval));
let llptr = match fcx.llargs.get(&arg.id) { let llptr = match *fcx.llargs.get(&arg.id) {
local_mem(v) | local_imm(v) => v, local_mem(v) | local_imm(v) => v,
}; };
let declargs = ~[llmdnode(~[llptr]), mdnode]; let declargs = ~[llmdnode(~[llptr]), mdnode];
trans::build::Call(bcx, trans::build::Call(bcx,
cx.intrinsics.get(&~"llvm.dbg.declare"), *cx.intrinsics.get(&~"llvm.dbg.declare"),
declargs); declargs);
return Some(mdval); return Some(mdval);
} }

View file

@ -150,7 +150,7 @@ use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowVecRef, AutoBorrowFn,
use util::common::indenter; use util::common::indenter;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use std::oldmap::HashMap; use core::hashmap::linear::LinearMap;
use syntax::print::pprust::{expr_to_str}; use syntax::print::pprust::{expr_to_str};
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
@ -198,14 +198,14 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
None => { None => {
trans_to_datum_unadjusted(bcx, expr) trans_to_datum_unadjusted(bcx, expr)
} }
Some(@AutoAddEnv(*)) => { Some(&@AutoAddEnv(*)) => {
let mut bcx = bcx; let mut bcx = bcx;
let mut datum = unpack_datum!(bcx, { let mut datum = unpack_datum!(bcx, {
trans_to_datum_unadjusted(bcx, expr) trans_to_datum_unadjusted(bcx, expr)
}); });
add_env(bcx, expr, datum) add_env(bcx, expr, datum)
} }
Some(@AutoDerefRef(ref adj)) => { Some(&@AutoDerefRef(ref adj)) => {
let mut bcx = bcx; let mut bcx = bcx;
let mut datum = unpack_datum!(bcx, { let mut datum = unpack_datum!(bcx, {
trans_to_datum_unadjusted(bcx, expr) trans_to_datum_unadjusted(bcx, expr)
@ -925,7 +925,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
// at the end of the scope with id `scope_id`: // at the end of the scope with id `scope_id`:
let root_key = root_map_key { id: expr.id, derefs: 0u }; let root_key = root_map_key { id: expr.id, derefs: 0u };
for bcx.ccx().maps.root_map.find(&root_key).each |&root_info| { for bcx.ccx().maps.root_map.find(&root_key).each |&root_info| {
bcx = unrooted_datum.root(bcx, root_info); bcx = unrooted_datum.root(bcx, *root_info);
} }
return DatumBlock {bcx: bcx, datum: unrooted_datum}; return DatumBlock {bcx: bcx, datum: unrooted_datum};
@ -1131,7 +1131,7 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
// Can't move upvars, so this is never a ZeroMemLastUse. // Can't move upvars, so this is never a ZeroMemLastUse.
let local_ty = node_id_type(bcx, nid); let local_ty = node_id_type(bcx, nid);
match bcx.fcx.llupvars.find(&nid) { match bcx.fcx.llupvars.find(&nid) {
Some(val) => { Some(&val) => {
Datum { Datum {
val: val, val: val,
ty: local_ty, ty: local_ty,
@ -1146,10 +1146,10 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
} }
} }
ast::def_arg(nid, _, _) => { ast::def_arg(nid, _, _) => {
take_local(bcx, *bcx.fcx.llargs, nid) take_local(bcx, bcx.fcx.llargs, nid)
} }
ast::def_local(nid, _) | ast::def_binding(nid, _) => { ast::def_local(nid, _) | ast::def_binding(nid, _) => {
take_local(bcx, *bcx.fcx.lllocals, nid) take_local(bcx, bcx.fcx.lllocals, nid)
} }
ast::def_self(nid, _) => { ast::def_self(nid, _) => {
let self_info: ValSelfData = match bcx.fcx.llself { let self_info: ValSelfData = match bcx.fcx.llself {
@ -1181,11 +1181,11 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
}; };
fn take_local(bcx: block, fn take_local(bcx: block,
table: HashMap<ast::node_id, local_val>, table: &LinearMap<ast::node_id, local_val>,
nid: ast::node_id) -> Datum { nid: ast::node_id) -> Datum {
let (v, mode) = match table.find(&nid) { let (v, mode) = match table.find(&nid) {
Some(local_mem(v)) => (v, ByRef), Some(&local_mem(v)) => (v, ByRef),
Some(local_imm(v)) => (v, ByValue), Some(&local_imm(v)) => (v, ByValue),
None => { None => {
bcx.sess().bug(fmt!( bcx.sess().bug(fmt!(
"trans_local_var: no llval for local/arg %? found", nid)); "trans_local_var: no llval for local/arg %? found", nid));
@ -1227,7 +1227,7 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
ty_to_str(tcx, ty))); ty_to_str(tcx, ty)));
} }
Some(node_id) => { Some(node_id) => {
match tcx.def_map.get(&node_id) { match *tcx.def_map.get(&node_id) {
ast::def_variant(enum_id, variant_id) => { ast::def_variant(enum_id, variant_id) => {
let variant_info = ty::enum_variant_with_id( let variant_info = ty::enum_variant_with_id(
tcx, enum_id, variant_id); tcx, enum_id, variant_id);

View file

@ -605,7 +605,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
abi::tydesc_field_visit_glue, None); abi::tydesc_field_visit_glue, None);
} }
~"frame_address" => { ~"frame_address" => {
let frameaddress = ccx.intrinsics.get(&~"llvm.frameaddress"); let frameaddress = *ccx.intrinsics.get(&~"llvm.frameaddress");
let frameaddress_val = Call(bcx, frameaddress, ~[C_i32(0i32)]); let frameaddress_val = Call(bcx, frameaddress, ~[C_i32(0i32)]);
let star_u8 = ty::mk_imm_ptr( let star_u8 = ty::mk_imm_ptr(
bcx.tcx(), bcx.tcx(),
@ -644,7 +644,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let size = get_param(decl, first_real_arg + 2); let size = get_param(decl, first_real_arg + 2);
let align = C_i32(1); let align = C_i32(1);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = bcx.ccx().intrinsics.get( let llfn = *bcx.ccx().intrinsics.get(
&~"llvm.memmove.p0i8.p0i8.i32"); &~"llvm.memmove.p0i8.p0i8.i32");
Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]); Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]);
} }
@ -654,249 +654,249 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let size = get_param(decl, first_real_arg + 2); let size = get_param(decl, first_real_arg + 2);
let align = C_i32(1); let align = C_i32(1);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = bcx.ccx().intrinsics.get( let llfn = *bcx.ccx().intrinsics.get(
&~"llvm.memmove.p0i8.p0i8.i64"); &~"llvm.memmove.p0i8.p0i8.i64");
Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]); Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]);
} }
~"sqrtf32" => { ~"sqrtf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = ccx.intrinsics.get(&~"llvm.sqrt.f32"); let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f32");
Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr);
} }
~"sqrtf64" => { ~"sqrtf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = ccx.intrinsics.get(&~"llvm.sqrt.f64"); let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f64");
Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr);
} }
~"powif32" => { ~"powif32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = ccx.intrinsics.get(&~"llvm.powi.f32"); let powif = *ccx.intrinsics.get(&~"llvm.powi.f32");
Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr);
} }
~"powif64" => { ~"powif64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = ccx.intrinsics.get(&~"llvm.powi.f64"); let powif = *ccx.intrinsics.get(&~"llvm.powi.f64");
Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr);
} }
~"sinf32" => { ~"sinf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = ccx.intrinsics.get(&~"llvm.sin.f32"); let sinf = *ccx.intrinsics.get(&~"llvm.sin.f32");
Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr);
} }
~"sinf64" => { ~"sinf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = ccx.intrinsics.get(&~"llvm.sin.f64"); let sinf = *ccx.intrinsics.get(&~"llvm.sin.f64");
Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr);
} }
~"cosf32" => { ~"cosf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = ccx.intrinsics.get(&~"llvm.cos.f32"); let cosf = *ccx.intrinsics.get(&~"llvm.cos.f32");
Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr);
} }
~"cosf64" => { ~"cosf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = ccx.intrinsics.get(&~"llvm.cos.f64"); let cosf = *ccx.intrinsics.get(&~"llvm.cos.f64");
Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr);
} }
~"powf32" => { ~"powf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = ccx.intrinsics.get(&~"llvm.pow.f32"); let powf = *ccx.intrinsics.get(&~"llvm.pow.f32");
Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr);
} }
~"powf64" => { ~"powf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = ccx.intrinsics.get(&~"llvm.pow.f64"); let powf = *ccx.intrinsics.get(&~"llvm.pow.f64");
Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr);
} }
~"expf32" => { ~"expf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = ccx.intrinsics.get(&~"llvm.exp.f32"); let expf = *ccx.intrinsics.get(&~"llvm.exp.f32");
Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr);
} }
~"expf64" => { ~"expf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = ccx.intrinsics.get(&~"llvm.exp.f64"); let expf = *ccx.intrinsics.get(&~"llvm.exp.f64");
Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr);
} }
~"exp2f32" => { ~"exp2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = ccx.intrinsics.get(&~"llvm.exp2.f32"); let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f32");
Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr);
} }
~"exp2f64" => { ~"exp2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = ccx.intrinsics.get(&~"llvm.exp2.f64"); let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f64");
Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr);
} }
~"logf32" => { ~"logf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = ccx.intrinsics.get(&~"llvm.log.f32"); let logf = *ccx.intrinsics.get(&~"llvm.log.f32");
Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr);
} }
~"logf64" => { ~"logf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = ccx.intrinsics.get(&~"llvm.log.f64"); let logf = *ccx.intrinsics.get(&~"llvm.log.f64");
Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr);
} }
~"log10f32" => { ~"log10f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = ccx.intrinsics.get(&~"llvm.log10.f32"); let log10f = *ccx.intrinsics.get(&~"llvm.log10.f32");
Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr);
} }
~"log10f64" => { ~"log10f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = ccx.intrinsics.get(&~"llvm.log10.f64"); let log10f = *ccx.intrinsics.get(&~"llvm.log10.f64");
Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr);
} }
~"log2f32" => { ~"log2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = ccx.intrinsics.get(&~"llvm.log2.f32"); let log2f = *ccx.intrinsics.get(&~"llvm.log2.f32");
Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr);
} }
~"log2f64" => { ~"log2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = ccx.intrinsics.get(&~"llvm.log2.f64"); let log2f = *ccx.intrinsics.get(&~"llvm.log2.f64");
Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr);
} }
~"fmaf32" => { ~"fmaf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = ccx.intrinsics.get(&~"llvm.fma.f32"); let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f32");
Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr); Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr);
} }
~"fmaf64" => { ~"fmaf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = ccx.intrinsics.get(&~"llvm.fma.f64"); let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f64");
Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr); Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr);
} }
~"fabsf32" => { ~"fabsf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = ccx.intrinsics.get(&~"llvm.fabs.f32"); let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f32");
Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr);
} }
~"fabsf64" => { ~"fabsf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = ccx.intrinsics.get(&~"llvm.fabs.f64"); let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f64");
Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr);
} }
~"floorf32" => { ~"floorf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = ccx.intrinsics.get(&~"llvm.floor.f32"); let floorf = *ccx.intrinsics.get(&~"llvm.floor.f32");
Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr);
} }
~"floorf64" => { ~"floorf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = ccx.intrinsics.get(&~"llvm.floor.f64"); let floorf = *ccx.intrinsics.get(&~"llvm.floor.f64");
Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr);
} }
~"ceilf32" => { ~"ceilf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = ccx.intrinsics.get(&~"llvm.ceil.f32"); let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f32");
Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr);
} }
~"ceilf64" => { ~"ceilf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = ccx.intrinsics.get(&~"llvm.ceil.f64"); let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f64");
Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr);
} }
~"truncf32" => { ~"truncf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = ccx.intrinsics.get(&~"llvm.trunc.f32"); let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f32");
Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr);
} }
~"truncf64" => { ~"truncf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = ccx.intrinsics.get(&~"llvm.trunc.f64"); let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f64");
Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr);
} }
~"ctpop8" => { ~"ctpop8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i8"); let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i8");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop16" => { ~"ctpop16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i16"); let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i16");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop32" => { ~"ctpop32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i32"); let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i32");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop64" => { ~"ctpop64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i64"); let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i64");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctlz8" => { ~"ctlz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i8"); let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i8");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz16" => { ~"ctlz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i16"); let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i16");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz32" => { ~"ctlz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i32"); let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i32");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz64" => { ~"ctlz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i64"); let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i64");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"cttz8" => { ~"cttz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i8"); let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i8");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz16" => { ~"cttz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i16"); let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i16");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz32" => { ~"cttz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i32"); let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i32");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz64" => { ~"cttz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i64"); let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i64");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"bswap16" => { ~"bswap16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i16"); let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i16");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
~"bswap32" => { ~"bswap32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i32"); let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i32");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
~"bswap64" => { ~"bswap64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i64"); let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i64");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
_ => { _ => {

View file

@ -32,13 +32,13 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::def_id,
-> ast::def_id { -> ast::def_id {
let _icx = ccx.insn_ctxt("maybe_instantiate_inline"); let _icx = ccx.insn_ctxt("maybe_instantiate_inline");
match ccx.external.find(&fn_id) { match ccx.external.find(&fn_id) {
Some(Some(node_id)) => { Some(&Some(node_id)) => {
// Already inline // Already inline
debug!("maybe_instantiate_inline(%s): already inline as node id %d", debug!("maybe_instantiate_inline(%s): already inline as node id %d",
ty::item_path_str(ccx.tcx, fn_id), node_id); ty::item_path_str(ccx.tcx, fn_id), node_id);
local_def(node_id) local_def(node_id)
} }
Some(None) => fn_id, // Not inlinable Some(&None) => fn_id, // Not inlinable
None => { // Not seen yet None => { // Not seen yet
match csearch::maybe_get_item_ast( match csearch::maybe_get_item_ast(
ccx.tcx, fn_id, ccx.tcx, fn_id,

View file

@ -10,6 +10,7 @@
// Information concerning the machine representation of various types. // Information concerning the machine representation of various types.
use core::prelude::*;
use lib::llvm::{ModuleRef, ValueRef, TypeRef, BasicBlockRef, BuilderRef}; use lib::llvm::{ModuleRef, ValueRef, TypeRef, BasicBlockRef, BuilderRef};
use lib::llvm::{True, False, Bool}; use lib::llvm::{True, False, Bool};
@ -119,7 +120,7 @@ pub fn llalign_of(cx: @CrateContext, t: TypeRef) -> ValueRef {
// Computes the size of the data part of an enum. // Computes the size of the data part of an enum.
pub fn static_size_of_enum(cx: @CrateContext, t: ty::t) -> uint { pub fn static_size_of_enum(cx: @CrateContext, t: ty::t) -> uint {
if cx.enum_sizes.contains_key(&t) { if cx.enum_sizes.contains_key(&t) {
return cx.enum_sizes.get(&t); return *cx.enum_sizes.get(&t);
} }
debug!("static_size_of_enum %s", ty_to_str(cx.tcx, t)); debug!("static_size_of_enum %s", ty_to_str(cx.tcx, t));

View file

@ -323,7 +323,7 @@ pub fn trans_static_method_callee(bcx: block,
name=%s", method_id, callee_id, *ccx.sess.str_of(mname)); name=%s", method_id, callee_id, *ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt( let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, ccx.maps.vtable_map.get(&callee_id)); bcx.fcx, *ccx.maps.vtable_map.get(&callee_id));
match vtbls[bound_index] { match vtbls[bound_index] {
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => { typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
@ -764,7 +764,7 @@ pub fn get_vtable(ccx: @CrateContext,
// XXX: Bad copy. // XXX: Bad copy.
let hash_id = vtable_id(ccx, copy origin); let hash_id = vtable_id(ccx, copy origin);
match ccx.vtables.find(&hash_id) { match ccx.vtables.find(&hash_id) {
Some(val) => val, Some(&val) => val,
None => match origin { None => match origin {
typeck::vtable_static(id, substs, sub_vtables) => { typeck::vtable_static(id, substs, sub_vtables) => {
make_impl_vtable(ccx, id, substs, sub_vtables) make_impl_vtable(ccx, id, substs, sub_vtables)

View file

@ -77,7 +77,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
substs.map(|s| ty_to_str(ccx.tcx, *s)), hash_id); substs.map(|s| ty_to_str(ccx.tcx, *s)), hash_id);
match ccx.monomorphized.find(&hash_id) { match ccx.monomorphized.find(&hash_id) {
Some(val) => { Some(&val) => {
debug!("leaving monomorphic fn %s", debug!("leaving monomorphic fn %s",
ty::item_path_str(ccx.tcx, fn_id)); ty::item_path_str(ccx.tcx, fn_id));
return (val, must_cast); return (val, must_cast);
@ -142,7 +142,9 @@ pub fn monomorphic_fn(ccx: @CrateContext,
ccx.stats.n_monos += 1; ccx.stats.n_monos += 1;
let depth = option::get_or_default(ccx.monomorphizing.find(&fn_id), 0u); let depth = match ccx.monomorphizing.find(&fn_id) {
Some(&d) => d, None => 0
};
// Random cut-off -- code that needs to instantiate the same function // Random cut-off -- code that needs to instantiate the same function
// recursively more than thirty times can probably safely be assumed to be // recursively more than thirty times can probably safely be assumed to be
// causing an infinite expansion. // causing an infinite expansion.

View file

@ -21,7 +21,7 @@ use middle::ty;
use middle::typeck; use middle::typeck;
use core::prelude::*; use core::prelude::*;
use std::oldmap::HashMap; use core::hashmap::linear::LinearSet;
use syntax::ast; use syntax::ast;
use syntax::ast::*; use syntax::ast::*;
use syntax::ast_util::def_id_of_def; use syntax::ast_util::def_id_of_def;
@ -30,27 +30,29 @@ use syntax::codemap;
use syntax::print::pprust::expr_to_str; use syntax::print::pprust::expr_to_str;
use syntax::{visit, ast_util, ast_map}; use syntax::{visit, ast_util, ast_map};
pub type map = HashMap<node_id, ()>; pub type map = @LinearSet<node_id>;
struct ctx { struct ctx {
exp_map2: resolve::ExportMap2, exp_map2: resolve::ExportMap2,
tcx: ty::ctxt, tcx: ty::ctxt,
method_map: typeck::method_map, method_map: typeck::method_map,
rmap: map rmap: &'self mut LinearSet<node_id>,
} }
pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2, pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2,
tcx: ty::ctxt, method_map: typeck::method_map) -> map { tcx: ty::ctxt, method_map: typeck::method_map) -> map {
let rmap = HashMap(); let mut rmap = LinearSet::new();
{
let cx = ctx { let cx = ctx {
exp_map2: exp_map2, exp_map2: exp_map2,
tcx: tcx, tcx: tcx,
method_map: method_map, method_map: method_map,
rmap: rmap rmap: &mut rmap
}; };
traverse_public_mod(cx, ast::crate_node_id, crate_mod); traverse_public_mod(cx, ast::crate_node_id, crate_mod);
traverse_all_resources_and_impls(cx, crate_mod); traverse_all_resources_and_impls(cx, crate_mod);
rmap }
return @rmap;
} }
fn traverse_exports(cx: ctx, mod_id: node_id) -> bool { fn traverse_exports(cx: ctx, mod_id: node_id) -> bool {
@ -77,10 +79,10 @@ fn traverse_def_id(cx: ctx, did: def_id) {
ast_map::node_item(item, _) => traverse_public_item(cx, item), ast_map::node_item(item, _) => traverse_public_item(cx, item),
ast_map::node_method(_, impl_id, _) => traverse_def_id(cx, impl_id), ast_map::node_method(_, impl_id, _) => traverse_def_id(cx, impl_id),
ast_map::node_foreign_item(item, _, _, _) => { ast_map::node_foreign_item(item, _, _, _) => {
cx.rmap.insert(item.id, ()); cx.rmap.insert(item.id);
} }
ast_map::node_variant(ref v, _, _) => { ast_map::node_variant(ref v, _, _) => {
cx.rmap.insert((*v).node.id, ()); cx.rmap.insert(v.node.id);
} }
_ => () _ => ()
} }
@ -96,14 +98,16 @@ fn traverse_public_mod(cx: ctx, mod_id: node_id, m: &_mod) {
} }
fn traverse_public_item(cx: ctx, item: @item) { fn traverse_public_item(cx: ctx, item: @item) {
if cx.rmap.contains_key(&item.id) { return; } // XXX: it shouldn't be necessary to do this
cx.rmap.insert(item.id, ()); let rmap: &mut LinearSet<node_id> = cx.rmap;
if rmap.contains(&item.id) { return; }
rmap.insert(item.id);
match item.node { match item.node {
item_mod(ref m) => traverse_public_mod(cx, item.id, m), item_mod(ref m) => traverse_public_mod(cx, item.id, m),
item_foreign_mod(ref nm) => { item_foreign_mod(ref nm) => {
if !traverse_exports(cx, item.id) { if !traverse_exports(cx, item.id) {
for nm.items.each |item| { for nm.items.each |item| {
cx.rmap.insert(item.id, ()); cx.rmap.insert(item.id);
} }
} }
} }
@ -119,17 +123,17 @@ fn traverse_public_item(cx: ctx, item: @item) {
m.generics.ty_params.len() > 0u || m.generics.ty_params.len() > 0u ||
attr::find_inline_attr(m.attrs) != attr::ia_none attr::find_inline_attr(m.attrs) != attr::ia_none
{ {
cx.rmap.insert(m.id, ()); cx.rmap.insert(m.id);
traverse_inline_body(cx, &m.body); traverse_inline_body(cx, &m.body);
} }
} }
} }
item_struct(ref struct_def, ref generics) => { item_struct(ref struct_def, ref generics) => {
for struct_def.ctor_id.each |&ctor_id| { for struct_def.ctor_id.each |&ctor_id| {
cx.rmap.insert(ctor_id, ()); cx.rmap.insert(ctor_id);
} }
for struct_def.dtor.each |dtor| { for struct_def.dtor.each |dtor| {
cx.rmap.insert(dtor.node.id, ()); cx.rmap.insert(dtor.node.id);
if generics.ty_params.len() > 0u || if generics.ty_params.len() > 0u ||
attr::find_inline_attr(dtor.node.attrs) != attr::ia_none attr::find_inline_attr(dtor.node.attrs) != attr::ia_none
{ {
@ -151,17 +155,19 @@ fn mk_ty_visitor() -> visit::vt<ctx> {
..*visit::default_visitor()}) ..*visit::default_visitor()})
} }
fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt<ctx>) { fn traverse_ty(ty: @Ty, cx: ctx<'a>, v: visit::vt<ctx<'a>>) {
if cx.rmap.contains_key(&ty.id) { return; } // XXX: it shouldn't be necessary to do this
cx.rmap.insert(ty.id, ()); let rmap: &mut LinearSet<node_id> = cx.rmap;
if rmap.contains(&ty.id) { return; }
rmap.insert(ty.id);
match ty.node { match ty.node {
ty_path(p, p_id) => { ty_path(p, p_id) => {
match cx.tcx.def_map.find(&p_id) { match cx.tcx.def_map.find(&p_id) {
// Kind of a hack to check this here, but I'm not sure what else // Kind of a hack to check this here, but I'm not sure what else
// to do // to do
Some(def_prim_ty(_)) => { /* do nothing */ } Some(&def_prim_ty(_)) => { /* do nothing */ }
Some(d) => traverse_def_id(cx, def_id_of_def(d)), Some(&d) => traverse_def_id(cx, def_id_of_def(d)),
None => { /* do nothing -- but should we fail here? */ } None => { /* do nothing -- but should we fail here? */ }
} }
for p.types.each |t| { for p.types.each |t| {
@ -173,11 +179,11 @@ fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt<ctx>) {
} }
fn traverse_inline_body(cx: ctx, body: &blk) { fn traverse_inline_body(cx: ctx, body: &blk) {
fn traverse_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { fn traverse_expr(e: @expr, cx: ctx<'a>, v: visit::vt<ctx<'a>>) {
match e.node { match e.node {
expr_path(_) => { expr_path(_) => {
match cx.tcx.def_map.find(&e.id) { match cx.tcx.def_map.find(&e.id) {
Some(d) => { Some(&d) => {
traverse_def_id(cx, def_id_of_def(d)); traverse_def_id(cx, def_id_of_def(d));
} }
None => cx.tcx.sess.span_bug(e.span, fmt!("Unbound node \ None => cx.tcx.sess.span_bug(e.span, fmt!("Unbound node \

View file

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use core::prelude::*;
use lib::llvm::llvm; use lib::llvm::llvm;
use lib::llvm::{TypeRef}; use lib::llvm::{TypeRef};

View file

@ -27,6 +27,7 @@
// much information, but have the disadvantage of being very // much information, but have the disadvantage of being very
// invasive.) // invasive.)
use core::prelude::*;
use middle::freevars; use middle::freevars;
use middle::trans::common::*; use middle::trans::common::*;
@ -59,7 +60,7 @@ pub struct Context {
pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint) pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint)
-> ~[type_uses] { -> ~[type_uses] {
match ccx.type_use_cache.find(&fn_id) { match ccx.type_use_cache.find(&fn_id) {
Some(uses) => return uses, Some(uses) => return /*bad*/ copy *uses,
None => () None => ()
} }
@ -295,7 +296,7 @@ pub fn mark_for_expr(cx: Context, e: @expr) {
} }
expr_path(_) => { expr_path(_) => {
for cx.ccx.tcx.node_type_substs.find(&e.id).each |ts| { for cx.ccx.tcx.node_type_substs.find(&e.id).each |ts| {
let id = ast_util::def_id_of_def(cx.ccx.tcx.def_map.get(&e.id)); let id = ast_util::def_id_of_def(*cx.ccx.tcx.def_map.get(&e.id));
let uses_for_ts = type_uses_for(cx.ccx, id, ts.len()); let uses_for_ts = type_uses_for(cx.ccx, id, ts.len());
for vec::each2(uses_for_ts, *ts) |uses, subst| { for vec::each2(uses_for_ts, *ts) |uses, subst| {
type_needs(cx, *uses, *subst) type_needs(cx, *uses, *subst)

View file

@ -37,7 +37,7 @@ use core::result;
use core::to_bytes; use core::to_bytes;
use core::uint; use core::uint;
use core::vec; use core::vec;
use core::hashmap::linear::LinearMap; use core::hashmap::linear::{LinearMap, LinearSet};
use std::oldmap::HashMap; use std::oldmap::HashMap;
use std::oldmap; use std::oldmap;
use std::smallintmap::SmallIntMap; use std::smallintmap::SmallIntMap;
@ -118,7 +118,7 @@ pub struct creader_cache_key {
len: uint len: uint
} }
type creader_cache = HashMap<creader_cache_key, t>; type creader_cache = @mut LinearMap<creader_cache_key, t>;
impl to_bytes::IterBytes for creader_cache_key { impl to_bytes::IterBytes for creader_cache_key {
fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) { fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) {
@ -222,7 +222,7 @@ pub enum AutoRefKind {
// This is a map from ID of each implementation to the method info and trait // This is a map from ID of each implementation to the method info and trait
// method ID of each of the default methods belonging to the trait that that // method ID of each of the default methods belonging to the trait that that
// implementation implements. // implementation implements.
pub type ProvidedMethodsMap = HashMap<def_id,@mut ~[@ProvidedMethodInfo]>; pub type ProvidedMethodsMap = @mut LinearMap<def_id,@mut ~[@ProvidedMethodInfo]>;
// Stores the method info and definition ID of the associated trait method for // Stores the method info and definition ID of the associated trait method for
// each instantiation of each provided method. // each instantiation of each provided method.
@ -245,7 +245,7 @@ pub type ctxt = @ctxt_;
struct ctxt_ { struct ctxt_ {
diag: @syntax::diagnostic::span_handler, diag: @syntax::diagnostic::span_handler,
interner: HashMap<intern_key, t_box>, interner: @mut LinearMap<intern_key, t_box>,
next_id: @mut uint, next_id: @mut uint,
vecs_implicitly_copyable: bool, vecs_implicitly_copyable: bool,
legacy_modes: bool, legacy_modes: bool,
@ -276,32 +276,32 @@ struct ctxt_ {
short_names_cache: HashMap<t, @~str>, short_names_cache: HashMap<t, @~str>,
needs_unwind_cleanup_cache: HashMap<t, bool>, needs_unwind_cleanup_cache: HashMap<t, bool>,
tc_cache: @mut LinearMap<uint, TypeContents>, tc_cache: @mut LinearMap<uint, TypeContents>,
ast_ty_to_ty_cache: HashMap<node_id, ast_ty_to_ty_cache_entry>, ast_ty_to_ty_cache: @mut LinearMap<node_id, ast_ty_to_ty_cache_entry>,
enum_var_cache: HashMap<def_id, @~[VariantInfo]>, enum_var_cache: @mut LinearMap<def_id, @~[VariantInfo]>,
trait_method_cache: HashMap<def_id, @~[method]>, trait_method_cache: @mut LinearMap<def_id, @~[method]>,
ty_param_bounds: HashMap<ast::node_id, param_bounds>, ty_param_bounds: @mut LinearMap<ast::node_id, param_bounds>,
inferred_modes: HashMap<ast::node_id, ast::mode>, inferred_modes: @mut LinearMap<ast::node_id, ast::mode>,
adjustments: HashMap<ast::node_id, @AutoAdjustment>, adjustments: @mut LinearMap<ast::node_id, @AutoAdjustment>,
normalized_cache: HashMap<t, t>, normalized_cache: HashMap<t, t>,
lang_items: middle::lang_items::LanguageItems, lang_items: middle::lang_items::LanguageItems,
// A mapping from an implementation ID to the method info and trait // A mapping from an implementation ID to the method info and trait
// method ID of the provided (a.k.a. default) methods in the traits that // method ID of the provided (a.k.a. default) methods in the traits that
// that implementation implements. // that implementation implements.
provided_methods: ProvidedMethodsMap, provided_methods: ProvidedMethodsMap,
provided_method_sources: HashMap<ast::def_id, ProvidedMethodSource>, provided_method_sources: @mut LinearMap<ast::def_id, ProvidedMethodSource>,
supertraits: HashMap<ast::def_id, @~[InstantiatedTraitRef]>, supertraits: @mut LinearMap<ast::def_id, @~[InstantiatedTraitRef]>,
// A mapping from the def ID of an enum or struct type to the def ID // A mapping from the def ID of an enum or struct type to the def ID
// of the method that implements its destructor. If the type is not // of the method that implements its destructor. If the type is not
// present in this map, it does not have a destructor. This map is // present in this map, it does not have a destructor. This map is
// populated during the coherence phase of typechecking. // populated during the coherence phase of typechecking.
destructor_for_type: HashMap<ast::def_id, ast::def_id>, destructor_for_type: @mut LinearMap<ast::def_id, ast::def_id>,
// A method will be in this list if and only if it is a destructor. // A method will be in this list if and only if it is a destructor.
destructors: HashMap<ast::def_id, ()>, destructors: @mut LinearSet<ast::def_id>,
// Maps a trait onto a mapping from self-ty to impl // Maps a trait onto a mapping from self-ty to impl
trait_impls: HashMap<ast::def_id, HashMap<t, @Impl>> trait_impls: @mut LinearMap<ast::def_id, @mut LinearMap<t, @Impl>>
} }
enum tbox_flag { enum tbox_flag {
@ -778,14 +778,14 @@ pub struct ty_param_substs_and_ty {
ty: ty::t ty: ty::t
} }
type type_cache = HashMap<ast::def_id, ty_param_bounds_and_ty>; type type_cache = @mut LinearMap<ast::def_id, ty_param_bounds_and_ty>;
type constness_cache = HashMap<ast::def_id, const_eval::constness>; type constness_cache = @mut LinearMap<ast::def_id, const_eval::constness>;
pub type node_type_table = @mut SmallIntMap<t>; pub type node_type_table = @mut SmallIntMap<t>;
fn mk_rcache() -> creader_cache { fn mk_rcache() -> creader_cache {
return oldmap::HashMap(); return @mut LinearMap::new();
} }
pub fn new_ty_hash<V:Copy>() -> oldmap::HashMap<t, V> { pub fn new_ty_hash<V:Copy>() -> oldmap::HashMap<t, V> {
@ -811,13 +811,12 @@ pub fn mk_ctxt(s: session::Session,
} }
} }
let interner = oldmap::HashMap();
let vecs_implicitly_copyable = let vecs_implicitly_copyable =
get_lint_level(s.lint_settings.default_settings, get_lint_level(s.lint_settings.default_settings,
lint::vecs_implicitly_copyable) == allow; lint::vecs_implicitly_copyable) == allow;
@ctxt_ { @ctxt_ {
diag: s.diagnostic(), diag: s.diagnostic(),
interner: interner, interner: @mut LinearMap::new(),
next_id: @mut 0, next_id: @mut 0,
vecs_implicitly_copyable: vecs_implicitly_copyable, vecs_implicitly_copyable: vecs_implicitly_copyable,
legacy_modes: legacy_modes, legacy_modes: legacy_modes,
@ -831,26 +830,26 @@ pub fn mk_ctxt(s: session::Session,
items: amap, items: amap,
intrinsic_defs: oldmap::HashMap(), intrinsic_defs: oldmap::HashMap(),
freevars: freevars, freevars: freevars,
tcache: HashMap(), tcache: @mut LinearMap::new(),
rcache: mk_rcache(), rcache: mk_rcache(),
ccache: HashMap(), ccache: @mut LinearMap::new(),
short_names_cache: new_ty_hash(), short_names_cache: new_ty_hash(),
needs_unwind_cleanup_cache: new_ty_hash(), needs_unwind_cleanup_cache: new_ty_hash(),
tc_cache: @mut LinearMap::new(), tc_cache: @mut LinearMap::new(),
ast_ty_to_ty_cache: HashMap(), ast_ty_to_ty_cache: @mut LinearMap::new(),
enum_var_cache: HashMap(), enum_var_cache: @mut LinearMap::new(),
trait_method_cache: HashMap(), trait_method_cache: @mut LinearMap::new(),
ty_param_bounds: HashMap(), ty_param_bounds: @mut LinearMap::new(),
inferred_modes: HashMap(), inferred_modes: @mut LinearMap::new(),
adjustments: HashMap(), adjustments: @mut LinearMap::new(),
normalized_cache: new_ty_hash(), normalized_cache: new_ty_hash(),
lang_items: lang_items, lang_items: lang_items,
provided_methods: HashMap(), provided_methods: @mut LinearMap::new(),
provided_method_sources: HashMap(), provided_method_sources: @mut LinearMap::new(),
supertraits: HashMap(), supertraits: @mut LinearMap::new(),
destructor_for_type: HashMap(), destructor_for_type: @mut LinearMap::new(),
destructors: HashMap(), destructors: @mut LinearSet::new(),
trait_impls: HashMap() trait_impls: @mut LinearMap::new()
} }
} }
@ -863,7 +862,7 @@ fn mk_t(cx: ctxt, +st: sty) -> t { mk_t_with_id(cx, st, None) }
fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t { fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t {
let key = intern_key { sty: to_unsafe_ptr(&st), o_def_id: o_def_id }; let key = intern_key { sty: to_unsafe_ptr(&st), o_def_id: o_def_id };
match cx.interner.find(&key) { match cx.interner.find(&key) {
Some(t) => unsafe { return cast::reinterpret_cast(&t); }, Some(&t) => unsafe { return cast::reinterpret_cast(&t); },
_ => () _ => ()
} }
@ -1161,7 +1160,7 @@ pub fn default_arg_mode_for_ty(tcx: ctxt, ty: ty::t) -> ast::rmode {
// with id `id`. // with id `id`.
pub fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region { pub fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region {
match cx.region_map.find(&id) { match cx.region_map.find(&id) {
Some(encl_scope) => ty::re_scope(encl_scope), Some(&encl_scope) => ty::re_scope(encl_scope),
None => ty::re_static None => ty::re_static
} }
} }
@ -2016,7 +2015,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
fail_unless!(p.def_id.crate == ast::local_crate); fail_unless!(p.def_id.crate == ast::local_crate);
param_bounds_to_contents( param_bounds_to_contents(
cx, cx.ty_param_bounds.get(&p.def_id.node)) cx, *cx.ty_param_bounds.get(&p.def_id.node))
} }
ty_self(_) => { ty_self(_) => {
@ -2711,10 +2710,6 @@ impl to_bytes::IterBytes for sty {
} }
} }
pub fn br_hashmap<V:Copy>() -> HashMap<bound_region, V> {
oldmap::HashMap()
}
pub fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t { pub fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t {
//io::println(fmt!("%?/%?", id, cx.node_types.len())); //io::println(fmt!("%?/%?", id, cx.node_types.len()));
match cx.node_types.find(&(id as uint)) { match cx.node_types.find(&(id as uint)) {
@ -2882,7 +2877,7 @@ pub fn expr_ty_adjusted(cx: ctxt, expr: @ast::expr) -> t {
return match cx.adjustments.find(&expr.id) { return match cx.adjustments.find(&expr.id) {
None => unadjusted_ty, None => unadjusted_ty,
Some(@AutoAddEnv(r, s)) => { Some(&@AutoAddEnv(r, s)) => {
match ty::get(unadjusted_ty).sty { match ty::get(unadjusted_ty).sty {
ty::ty_bare_fn(ref b) => { ty::ty_bare_fn(ref b) => {
ty::mk_closure( ty::mk_closure(
@ -2900,7 +2895,7 @@ pub fn expr_ty_adjusted(cx: ctxt, expr: @ast::expr) -> t {
} }
} }
Some(@AutoDerefRef(ref adj)) => { Some(&@AutoDerefRef(ref adj)) => {
let mut adjusted_ty = unadjusted_ty; let mut adjusted_ty = unadjusted_ty;
for uint::range(0, adj.autoderefs) |i| { for uint::range(0, adj.autoderefs) |i| {
@ -3037,7 +3032,7 @@ pub fn method_call_bounds(tcx: ctxt, method_map: typeck::method_map,
pub fn resolve_expr(tcx: ctxt, expr: @ast::expr) -> ast::def { pub fn resolve_expr(tcx: ctxt, expr: @ast::expr) -> ast::def {
match tcx.def_map.find(&expr.id) { match tcx.def_map.find(&expr.id) {
Some(def) => def, Some(&def) => def,
None => { None => {
tcx.sess.span_bug(expr.span, fmt!( tcx.sess.span_bug(expr.span, fmt!(
"No def-map entry for expr %?", expr.id)); "No def-map entry for expr %?", expr.id));
@ -3270,17 +3265,18 @@ pub fn occurs_check(tcx: ctxt, sp: span, vid: TyVid, rt: t) {
// Maintains a little union-set tree for inferred modes. `canon()` returns // Maintains a little union-set tree for inferred modes. `canon()` returns
// the current head value for `m0`. // the current head value for `m0`.
fn canon<T:Copy + cmp::Eq>(tbl: HashMap<ast::node_id, ast::inferable<T>>, fn canon<T:Copy + cmp::Eq>(tbl: &mut LinearMap<ast::node_id, ast::inferable<T>>,
+m0: ast::inferable<T>) -> ast::inferable<T> { +m0: ast::inferable<T>) -> ast::inferable<T> {
match m0 { match m0 {
ast::infer(id) => match tbl.find(&id) { ast::infer(id) => {
None => m0, let m1 = match tbl.find(&id) {
Some(ref m1) => { None => return m0,
let cm1 = canon(tbl, (*m1)); Some(&m1) => m1
};
let cm1 = canon(tbl, m1);
// path compression: // path compression:
if cm1 != (*m1) { tbl.insert(id, cm1); } if cm1 != m1 { tbl.insert(id, cm1); }
cm1 cm1
}
}, },
_ => m0 _ => m0
} }
@ -3565,7 +3561,7 @@ pub fn trait_supertraits(cx: ctxt,
-> @~[InstantiatedTraitRef] { -> @~[InstantiatedTraitRef] {
// Check the cache. // Check the cache.
match cx.supertraits.find(&id) { match cx.supertraits.find(&id) {
Some(instantiated_trait_info) => { return instantiated_trait_info; } Some(&instantiated_trait_info) => { return instantiated_trait_info; }
None => {} // Continue. None => {} // Continue.
} }
@ -3598,7 +3594,7 @@ pub fn trait_supertraits(cx: ctxt,
pub fn trait_methods(cx: ctxt, id: ast::def_id) -> @~[method] { pub fn trait_methods(cx: ctxt, id: ast::def_id) -> @~[method] {
match cx.trait_method_cache.find(&id) { match cx.trait_method_cache.find(&id) {
// Local traits are supposed to have been added explicitly. // Local traits are supposed to have been added explicitly.
Some(ms) => ms, Some(&ms) => ms,
_ => { _ => {
// If the lookup in trait_method_cache fails, assume that the trait // If the lookup in trait_method_cache fails, assume that the trait
// method we're trying to look up is in a different crate, and look // method we're trying to look up is in a different crate, and look
@ -3736,7 +3732,7 @@ pub impl DtorKind {
Otherwise return none. */ Otherwise return none. */
pub fn ty_dtor(cx: ctxt, struct_id: def_id) -> DtorKind { pub fn ty_dtor(cx: ctxt, struct_id: def_id) -> DtorKind {
match cx.destructor_for_type.find(&struct_id) { match cx.destructor_for_type.find(&struct_id) {
Some(method_def_id) => return TraitDtor(method_def_id), Some(&method_def_id) => return TraitDtor(method_def_id),
None => {} // Continue. None => {} // Continue.
} }
@ -3834,7 +3830,7 @@ pub fn type_is_empty(cx: ctxt, t: t) -> bool {
pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] { pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] {
match cx.enum_var_cache.find(&id) { match cx.enum_var_cache.find(&id) {
Some(variants) => return variants, Some(&variants) => return variants,
_ => { /* fallthrough */ } _ => { /* fallthrough */ }
} }
@ -3920,7 +3916,7 @@ pub fn lookup_item_type(cx: ctxt,
did: ast::def_id) did: ast::def_id)
-> ty_param_bounds_and_ty { -> ty_param_bounds_and_ty {
match cx.tcache.find(&did) { match cx.tcache.find(&did) {
Some(tpt) => { Some(&tpt) => {
// The item is in this crate. The caller should have added it to the // The item is in this crate. The caller should have added it to the
// type cache already // type cache already
return tpt; return tpt;
@ -4299,7 +4295,7 @@ pub fn iter_bound_traits_and_supertraits(tcx: ctxt,
} }
}; };
let mut supertrait_map = HashMap(); let mut supertrait_map = LinearMap::new();
let mut seen_def_ids = ~[]; let mut seen_def_ids = ~[];
let mut i = 0; let mut i = 0;
let trait_ty_id = ty_to_def_id(bound_trait_ty).expect( let trait_ty_id = ty_to_def_id(bound_trait_ty).expect(

View file

@ -235,11 +235,11 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + Durable>(
} }
ast::ty_path(path, id) if a_seq_ty.mutbl == ast::m_imm => { ast::ty_path(path, id) if a_seq_ty.mutbl == ast::m_imm => {
match tcx.def_map.find(&id) { match tcx.def_map.find(&id) {
Some(ast::def_prim_ty(ast::ty_str)) => { Some(&ast::def_prim_ty(ast::ty_str)) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS); check_path_args(tcx, path, NO_TPS | NO_REGIONS);
return ty::mk_estr(tcx, vst); return ty::mk_estr(tcx, vst);
} }
Some(ast::def_ty(type_def_id)) => { Some(&ast::def_ty(type_def_id)) => {
let result = ast_path_to_substs_and_ty( let result = ast_path_to_substs_and_ty(
self, rscope, self, rscope,
type_def_id, path); type_def_id, path);
@ -303,8 +303,8 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + Durable>(
let tcx = self.tcx(); let tcx = self.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) { match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(ty::atttce_resolved(ty)) => return ty, Some(&ty::atttce_resolved(ty)) => return ty,
Some(ty::atttce_unresolved) => { Some(&ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \ tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \
insert an enum in the cycle, \ insert an enum in the cycle, \
if this is desired"); if this is desired");
@ -359,7 +359,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + Durable>(
None => tcx.sess.span_fatal( None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s", ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))), path_to_str(path, tcx.sess.intr()))),
Some(d) => d Some(&d) => d
}; };
match a_def { match a_def {
ast::def_ty(did) | ast::def_struct(did) => { ast::def_ty(did) | ast::def_struct(did) => {

View file

@ -18,8 +18,8 @@ use middle::typeck::check::{instantiate_path, lookup_def};
use middle::typeck::check::{structure_of, valid_range_bounds}; use middle::typeck::check::{structure_of, valid_range_bounds};
use middle::typeck::require_same_types; use middle::typeck::require_same_types;
use core::hashmap::linear::{LinearMap, LinearSet};
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast; use syntax::ast;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::span; use syntax::codemap::span;
@ -228,7 +228,7 @@ pub fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path,
/// `class_fields` describes the type of each field of the struct. /// `class_fields` describes the type of each field of the struct.
/// `class_id` is the ID of the struct. /// `class_id` is the ID of the struct.
/// `substitutions` are the type substitutions applied to this struct type /// `substitutions` are the type substitutions applied to this struct type
/// (e.g. K,V in HashMap<K,V>). /// (e.g. K,V in LinearMap<K,V>).
/// `etc` is true if the pattern said '...' and false otherwise. /// `etc` is true if the pattern said '...' and false otherwise.
pub fn check_struct_pat_fields(pcx: pat_ctxt, pub fn check_struct_pat_fields(pcx: pat_ctxt,
span: span, span: span,
@ -241,23 +241,23 @@ pub fn check_struct_pat_fields(pcx: pat_ctxt,
let tcx = pcx.fcx.ccx.tcx; let tcx = pcx.fcx.ccx.tcx;
// Index the class fields. // Index the class fields.
let field_map = HashMap(); let mut field_map = LinearMap::new();
for class_fields.eachi |i, class_field| { for class_fields.eachi |i, class_field| {
field_map.insert(class_field.ident, i); field_map.insert(class_field.ident, i);
} }
// Typecheck each field. // Typecheck each field.
let found_fields = HashMap(); let mut found_fields = LinearSet::new();
for fields.each |field| { for fields.each |field| {
match field_map.find(&field.ident) { match field_map.find(&field.ident) {
Some(index) => { Some(&index) => {
let class_field = class_fields[index]; let class_field = class_fields[index];
let field_type = ty::lookup_field_type(tcx, let field_type = ty::lookup_field_type(tcx,
class_id, class_id,
class_field.id, class_field.id,
substitutions); substitutions);
check_pat(pcx, field.pat, field_type); check_pat(pcx, field.pat, field_type);
found_fields.insert(index, ()); found_fields.insert(index);
} }
None => { None => {
let name = pprust::path_to_str(path, tcx.sess.intr()); let name = pprust::path_to_str(path, tcx.sess.intr());
@ -272,7 +272,7 @@ pub fn check_struct_pat_fields(pcx: pat_ctxt,
// Report an error if not all the fields were specified. // Report an error if not all the fields were specified.
if !etc { if !etc {
for class_fields.eachi |i, field| { for class_fields.eachi |i, field| {
if found_fields.contains_key(&i) { if found_fields.contains(&i) {
loop; loop;
} }
tcx.sess.span_err(span, tcx.sess.span_err(span,
@ -293,11 +293,11 @@ pub fn check_struct_pat(pcx: pat_ctxt, pat_id: ast::node_id, span: span,
// Check to ensure that the struct is the one specified. // Check to ensure that the struct is the one specified.
match tcx.def_map.find(&pat_id) { match tcx.def_map.find(&pat_id) {
Some(ast::def_struct(supplied_def_id)) Some(&ast::def_struct(supplied_def_id))
if supplied_def_id == class_id => { if supplied_def_id == class_id => {
// OK. // OK.
} }
Some(ast::def_struct(*)) | Some(ast::def_variant(*)) => { Some(&ast::def_struct(*)) | Some(&ast::def_variant(*)) => {
let name = pprust::path_to_str(path, tcx.sess.intr()); let name = pprust::path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("mismatched types: expected `%s` but \ fmt!("mismatched types: expected `%s` but \
@ -334,7 +334,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: pat_ctxt,
// Find the variant that was specified. // Find the variant that was specified.
match tcx.def_map.find(&pat_id) { match tcx.def_map.find(&pat_id) {
Some(ast::def_variant(found_enum_id, variant_id)) Some(&ast::def_variant(found_enum_id, variant_id))
if found_enum_id == enum_id => { if found_enum_id == enum_id => {
// Get the struct fields from this struct-like enum variant. // Get the struct fields from this struct-like enum variant.
let class_fields = ty::lookup_struct_fields(tcx, variant_id); let class_fields = ty::lookup_struct_fields(tcx, variant_id);
@ -342,7 +342,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: pat_ctxt,
check_struct_pat_fields(pcx, span, path, fields, class_fields, check_struct_pat_fields(pcx, span, path, fields, class_fields,
variant_id, substitutions, etc); variant_id, substitutions, etc);
} }
Some(ast::def_struct(*)) | Some(ast::def_variant(*)) => { Some(&ast::def_struct(*)) | Some(&ast::def_variant(*)) => {
let name = pprust::path_to_str(path, tcx.sess.intr()); let name = pprust::path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("mismatched types: expected `%s` but \ fmt!("mismatched types: expected `%s` but \
@ -394,7 +394,7 @@ pub fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
} }
ast::pat_enum(*) | ast::pat_enum(*) |
ast::pat_ident(*) if pat_is_const(tcx.def_map, pat) => { ast::pat_ident(*) if pat_is_const(tcx.def_map, pat) => {
let const_did = ast_util::def_id_of_def(tcx.def_map.get(&pat.id)); let const_did = ast_util::def_id_of_def(*tcx.def_map.get(&pat.id));
let const_tpt = ty::lookup_item_type(tcx, const_did); let const_tpt = ty::lookup_item_type(tcx, const_did);
demand::suptype(fcx, pat.span, expected, const_tpt.ty); demand::suptype(fcx, pat.span, expected, const_tpt.ty);
fcx.write_ty(pat.id, const_tpt.ty); fcx.write_ty(pat.id, const_tpt.ty);
@ -421,7 +421,7 @@ pub fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
} }
} }
let canon_id = pcx.map.get(&ast_util::path_to_ident(name)); let canon_id = *pcx.map.get(&ast_util::path_to_ident(name));
if canon_id != pat.id { if canon_id != pat.id {
let ct = fcx.local_ty(pat.span, canon_id); let ct = fcx.local_ty(pat.span, canon_id);
demand::eqtype(fcx, pat.span, ct, typ); demand::eqtype(fcx, pat.span, ct, typ);

View file

@ -95,10 +95,10 @@ use middle::typeck::{method_self, method_static, method_trait, method_super};
use util::common::indenter; use util::common::indenter;
use util::ppaux::expr_repr; use util::ppaux::expr_repr;
use core::hashmap::linear::LinearSet;
use core::result; use core::result;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::ast::{def_id, sty_by_ref, sty_value, sty_region, sty_box}; use syntax::ast::{def_id, sty_by_ref, sty_value, sty_region, sty_box};
use syntax::ast::{sty_uniq, sty_static, node_id, by_copy, by_ref}; use syntax::ast::{sty_uniq, sty_static, node_id, by_copy, by_ref};
use syntax::ast::{m_const, m_mutbl, m_imm}; use syntax::ast::{m_const, m_mutbl, m_imm};
@ -131,6 +131,7 @@ pub fn lookup(
check_traits: CheckTraitsFlag, // Whether we check traits only. check_traits: CheckTraitsFlag, // Whether we check traits only.
autoderef_receiver: AutoderefReceiverFlag) autoderef_receiver: AutoderefReceiverFlag)
-> Option<method_map_entry> { -> Option<method_map_entry> {
let mut impl_dups = LinearSet::new();
let lcx = LookupContext { let lcx = LookupContext {
fcx: fcx, fcx: fcx,
expr: expr, expr: expr,
@ -138,7 +139,7 @@ pub fn lookup(
callee_id: callee_id, callee_id: callee_id,
m_name: m_name, m_name: m_name,
supplied_tps: supplied_tps, supplied_tps: supplied_tps,
impl_dups: HashMap(), impl_dups: &mut impl_dups,
inherent_candidates: @mut ~[], inherent_candidates: @mut ~[],
extension_candidates: @mut ~[], extension_candidates: @mut ~[],
deref_args: deref_args, deref_args: deref_args,
@ -158,7 +159,7 @@ pub struct LookupContext {
callee_id: node_id, callee_id: node_id,
m_name: ast::ident, m_name: ast::ident,
supplied_tps: &'self [ty::t], supplied_tps: &'self [ty::t],
impl_dups: HashMap<def_id, ()>, impl_dups: &'self mut LinearSet<def_id>,
inherent_candidates: @mut ~[Candidate], inherent_candidates: @mut ~[Candidate],
extension_candidates: @mut ~[Candidate], extension_candidates: @mut ~[Candidate],
deref_args: check::DerefArgs, deref_args: check::DerefArgs,
@ -344,8 +345,8 @@ pub impl<'self> LookupContext<'self> {
// If the method being called is associated with a trait, then // If the method being called is associated with a trait, then
// find all the impls of that trait. Each of those are // find all the impls of that trait. Each of those are
// candidates. // candidates.
let opt_applicable_traits = self.fcx.ccx.trait_map.find( let trait_map: &mut resolve::TraitMap = &mut self.fcx.ccx.trait_map;
&self.expr.id); let opt_applicable_traits = trait_map.find(&self.expr.id);
for opt_applicable_traits.each |applicable_traits| { for opt_applicable_traits.each |applicable_traits| {
for applicable_traits.each |trait_did| { for applicable_traits.each |trait_did| {
let coherence_info = self.fcx.ccx.coherence_info; let coherence_info = self.fcx.ccx.coherence_info;
@ -362,7 +363,7 @@ pub impl<'self> LookupContext<'self> {
// Look for default methods. // Look for default methods.
match self.tcx().provided_methods.find(trait_did) { match self.tcx().provided_methods.find(trait_did) {
Some(methods) => { Some(&methods) => {
self.push_candidates_from_provided_methods( self.push_candidates_from_provided_methods(
self.extension_candidates, self_ty, *trait_did, self.extension_candidates, self_ty, *trait_did,
methods); methods);
@ -384,7 +385,7 @@ pub impl<'self> LookupContext<'self> {
let mut next_bound_idx = 0; // count only trait bounds let mut next_bound_idx = 0; // count only trait bounds
let bounds = tcx.ty_param_bounds.get(&param_ty.def_id.node); let bounds = tcx.ty_param_bounds.get(&param_ty.def_id.node);
for vec::each(*bounds) |bound| { for bounds.each |bound| {
let bound_trait_ty = match *bound { let bound_trait_ty = match *bound {
ty::bound_trait(bound_t) => bound_t, ty::bound_trait(bound_t) => bound_t,
@ -639,7 +640,7 @@ pub impl<'self> LookupContext<'self> {
fn push_candidates_from_impl(&self, candidates: &mut ~[Candidate], fn push_candidates_from_impl(&self, candidates: &mut ~[Candidate],
impl_info: &resolve::Impl) { impl_info: &resolve::Impl) {
if !self.impl_dups.insert(impl_info.did, ()) { if !self.impl_dups.insert(impl_info.did) {
return; // already visited return; // already visited
} }
@ -1195,7 +1196,7 @@ pub impl<'self> LookupContext<'self> {
match candidate.origin { match candidate.origin {
method_static(method_id) | method_self(method_id, _) method_static(method_id) | method_self(method_id, _)
| method_super(method_id, _) => { | method_super(method_id, _) => {
bad = self.tcx().destructors.contains_key(&method_id); bad = self.tcx().destructors.contains(&method_id);
} }
method_param(method_param { trait_id: trait_id, _ }) | method_param(method_param { trait_id: trait_id, _ }) |
method_trait(trait_id, _, _) => { method_trait(trait_id, _, _) => {

View file

@ -112,6 +112,7 @@ use util::ppaux::{bound_region_to_str, expr_repr, pat_repr};
use util::ppaux; use util::ppaux;
use core::either; use core::either;
use core::hashmap::linear::LinearMap;
use core::option; use core::option;
use core::ptr; use core::ptr;
use core::result::{Result, Ok, Err}; use core::result::{Result, Ok, Err};
@ -119,8 +120,6 @@ use core::result;
use core::str; use core::str;
use core::vec; use core::vec;
use std::list::Nil; use std::list::Nil;
use std::oldmap::HashMap;
use std::oldmap;
use syntax::ast::{provided, required, ty_i}; use syntax::ast::{provided, required, ty_i};
use syntax::ast; use syntax::ast;
use syntax::ast_map; use syntax::ast_map;
@ -160,12 +159,12 @@ pub struct SelfInfo {
/// share the inherited fields. /// share the inherited fields.
pub struct inherited { pub struct inherited {
infcx: @mut infer::InferCtxt, infcx: @mut infer::InferCtxt,
locals: HashMap<ast::node_id, ty::t>, locals: @mut LinearMap<ast::node_id, ty::t>,
// Temporary tables: // Temporary tables:
node_types: HashMap<ast::node_id, ty::t>, node_types: @mut LinearMap<ast::node_id, ty::t>,
node_type_substs: HashMap<ast::node_id, ty::substs>, node_type_substs: @mut LinearMap<ast::node_id, ty::substs>,
adjustments: HashMap<ast::node_id, @ty::AutoAdjustment>, adjustments: @mut LinearMap<ast::node_id, @ty::AutoAdjustment>,
method_map: method_map, method_map: method_map,
vtable_map: vtable_map, vtable_map: vtable_map,
} }
@ -222,12 +221,12 @@ pub struct FnCtxt {
pub fn blank_inherited(ccx: @mut CrateCtxt) -> @inherited { pub fn blank_inherited(ccx: @mut CrateCtxt) -> @inherited {
@inherited { @inherited {
infcx: infer::new_infer_ctxt(ccx.tcx), infcx: infer::new_infer_ctxt(ccx.tcx),
locals: HashMap(), locals: @mut LinearMap::new(),
node_types: oldmap::HashMap(), node_types: @mut LinearMap::new(),
node_type_substs: oldmap::HashMap(), node_type_substs: @mut LinearMap::new(),
adjustments: oldmap::HashMap(), adjustments: @mut LinearMap::new(),
method_map: oldmap::HashMap(), method_map: @mut LinearMap::new(),
vtable_map: oldmap::HashMap(), vtable_map: @mut LinearMap::new(),
} }
} }
@ -399,7 +398,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
assign(self_info.self_id, Some(self_info.self_ty)); assign(self_info.self_id, Some(self_info.self_ty));
debug!("self is assigned to %s", debug!("self is assigned to %s",
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&self_info.self_id))); *fcx.inh.locals.get(&self_info.self_id)));
} }
// Add formal parameters. // Add formal parameters.
@ -432,7 +431,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
debug!("Local variable %s is assigned type %s", debug!("Local variable %s is assigned type %s",
fcx.pat_to_str(local.node.pat), fcx.pat_to_str(local.node.pat),
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&local.node.id))); *fcx.inh.locals.get(&local.node.id)));
visit::visit_local(local, e, v); visit::visit_local(local, e, v);
}; };
@ -445,7 +444,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
debug!("Pattern binding %s is assigned to %s", debug!("Pattern binding %s is assigned to %s",
*tcx.sess.str_of(path.idents[0]), *tcx.sess.str_of(path.idents[0]),
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&p.id))); *fcx.inh.locals.get(&p.id)));
} }
_ => {} _ => {}
} }
@ -506,11 +505,12 @@ pub fn check_method(ccx: @mut CrateCtxt,
pub fn check_no_duplicate_fields(tcx: ty::ctxt, pub fn check_no_duplicate_fields(tcx: ty::ctxt,
fields: ~[(ast::ident, span)]) { fields: ~[(ast::ident, span)]) {
let field_names = HashMap(); let mut field_names = LinearMap::new();
for fields.each |p| { for fields.each |p| {
let (id, sp) = *p; let (id, sp) = *p;
match field_names.find(&id) { let orig_sp = field_names.find(&id).map_consume(|x| *x);
match orig_sp {
Some(orig_sp) => { Some(orig_sp) => {
tcx.sess.span_err(sp, fmt!("Duplicate field \ tcx.sess.span_err(sp, fmt!("Duplicate field \
name %s in record type declaration", name %s in record type declaration",
@ -572,7 +572,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
check_bare_fn(ccx, decl, body, it.id, None); check_bare_fn(ccx, decl, body, it.id, None);
} }
ast::item_impl(_, _, ty, ref ms) => { ast::item_impl(_, _, ty, ref ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id); let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("item_impl %s with id %d rp %?", debug!("item_impl %s with id %d rp %?",
*ccx.tcx.sess.str_of(it.ident), it.id, rp); *ccx.tcx.sess.str_of(it.ident), it.id, rp);
let self_ty = ccx.to_ty(&rscope::type_rscope(rp), ty); let self_ty = ccx.to_ty(&rscope::type_rscope(rp), ty);
@ -679,7 +679,7 @@ pub impl FnCtxt {
fn local_ty(&self, span: span, nid: ast::node_id) -> ty::t { fn local_ty(&self, span: span, nid: ast::node_id) -> ty::t {
match self.inh.locals.find(&nid) { match self.inh.locals.find(&nid) {
Some(t) => t, Some(&t) => t,
None => { None => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
span, span,
@ -766,7 +766,7 @@ pub impl FnCtxt {
fn expr_ty(&self, ex: @ast::expr) -> ty::t { fn expr_ty(&self, ex: @ast::expr) -> ty::t {
match self.inh.node_types.find(&ex.id) { match self.inh.node_types.find(&ex.id) {
Some(t) => t, Some(&t) => t,
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
fmt!("no type for %s in fcx %s", fmt!("no type for %s in fcx %s",
@ -776,7 +776,7 @@ pub impl FnCtxt {
} }
fn node_ty(&self, id: ast::node_id) -> ty::t { fn node_ty(&self, id: ast::node_id) -> ty::t {
match self.inh.node_types.find(&id) { match self.inh.node_types.find(&id) {
Some(t) => t, Some(&t) => t,
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
fmt!("no type for node %d: %s in fcx %s", fmt!("no type for node %d: %s in fcx %s",
@ -789,7 +789,7 @@ pub impl FnCtxt {
} }
fn node_ty_substs(&self, id: ast::node_id) -> ty::substs { fn node_ty_substs(&self, id: ast::node_id) -> ty::substs {
match self.inh.node_type_substs.find(&id) { match self.inh.node_type_substs.find(&id) {
Some(ref ts) => (/*bad*/copy *ts), Some(ts) => (/*bad*/copy *ts),
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
fmt!("no type substs for node %d: %s in fcx %s", fmt!("no type substs for node %d: %s in fcx %s",
@ -800,10 +800,14 @@ pub impl FnCtxt {
} }
} }
} }
fn opt_node_ty_substs(&self, id: ast::node_id) -> Option<ty::substs> {
self.inh.node_type_substs.find(&id)
}
fn opt_node_ty_substs(&self, id: ast::node_id,
f: &fn(&ty::substs) -> bool) {
match self.inh.node_type_substs.find(&id) {
Some(s) => { f(s); }
None => ()
}
}
fn mk_subty(&self, fn mk_subty(&self,
a_is_expected: bool, a_is_expected: bool,
@ -1059,7 +1063,8 @@ pub fn impl_self_ty(vcx: &VtableContext,
let tcx = vcx.tcx(); let tcx = vcx.tcx();
let (n_tps, region_param, raw_ty) = if did.crate == ast::local_crate { let (n_tps, region_param, raw_ty) = if did.crate == ast::local_crate {
let region_param = tcx.region_paramd_items.find(&did.node); let region_param = tcx.region_paramd_items.find(&did.node).
map_consume(|x| *x);
match tcx.items.find(&did.node) { match tcx.items.find(&did.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_impl(ref ts, _, st, _), node: ast::item_impl(ref ts, _, st, _),
@ -1769,7 +1774,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
check_completeness: bool) { check_completeness: bool) {
let tcx = fcx.ccx.tcx; let tcx = fcx.ccx.tcx;
let class_field_map = HashMap(); let mut class_field_map = LinearMap::new();
let mut fields_found = 0; let mut fields_found = 0;
for field_types.each |field| { for field_types.each |field| {
// XXX: Check visibility here. // XXX: Check visibility here.
@ -1782,7 +1787,9 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
for ast_fields.each |field| { for ast_fields.each |field| {
let mut expected_field_type = ty::mk_err(tcx); let mut expected_field_type = ty::mk_err(tcx);
match class_field_map.find(&field.node.ident) { let pair = class_field_map.find(&field.node.ident).
map_consume(|x| *x);
match pair {
None => { None => {
tcx.sess.span_err( tcx.sess.span_err(
field.span, field.span,
@ -1825,7 +1832,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let mut missing_fields = ~[]; let mut missing_fields = ~[];
for field_types.each |class_field| { for field_types.each |class_field| {
let name = class_field.ident; let name = class_field.ident;
let (_, seen) = class_field_map.get(&name); let (_, seen) = *class_field_map.get(&name);
if !seen { if !seen {
missing_fields.push( missing_fields.push(
~"`" + *tcx.sess.str_of(name) + ~"`"); ~"`" + *tcx.sess.str_of(name) + ~"`");
@ -1862,7 +1869,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let type_parameter_count, region_parameterized, raw_type; let type_parameter_count, region_parameterized, raw_type;
if class_id.crate == ast::local_crate { if class_id.crate == ast::local_crate {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&class_id.node); tcx.region_paramd_items.find(&class_id.node).
map_consume(|x| *x);
match tcx.items.find(&class_id.node) { match tcx.items.find(&class_id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_struct(_, ref generics), node: ast::item_struct(_, ref generics),
@ -1950,7 +1958,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let type_parameter_count, region_parameterized, raw_type; let type_parameter_count, region_parameterized, raw_type;
if enum_id.crate == ast::local_crate { if enum_id.crate == ast::local_crate {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&enum_id.node); tcx.region_paramd_items.find(&enum_id.node).map_consume(|x| *x);
match tcx.items.find(&enum_id.node) { match tcx.items.find(&enum_id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_enum(_, ref generics), node: ast::item_enum(_, ref generics),
@ -2730,11 +2738,11 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ast::expr_struct(path, ref fields, base_expr) => { ast::expr_struct(path, ref fields, base_expr) => {
// Resolve the path. // Resolve the path.
match tcx.def_map.find(&id) { match tcx.def_map.find(&id) {
Some(ast::def_struct(type_def_id)) => { Some(&ast::def_struct(type_def_id)) => {
check_struct_constructor(fcx, id, expr.span, type_def_id, check_struct_constructor(fcx, id, expr.span, type_def_id,
*fields, base_expr); *fields, base_expr);
} }
Some(ast::def_variant(enum_id, variant_id)) => { Some(&ast::def_variant(enum_id, variant_id)) => {
check_struct_enum_variant(fcx, id, expr.span, enum_id, check_struct_enum_variant(fcx, id, expr.span, enum_id,
variant_id, *fields); variant_id, *fields);
} }
@ -2834,7 +2842,7 @@ pub fn check_decl_local(fcx: @mut FnCtxt, local: @ast::local) {
} }
let region = let region =
ty::re_scope(tcx.region_map.get(&local.node.id)); ty::re_scope(*tcx.region_map.get(&local.node.id));
let pcx = pat_ctxt { let pcx = pat_ctxt {
fcx: fcx, fcx: fcx,
map: pat_id_map(tcx.def_map, local.node.pat), map: pat_id_map(tcx.def_map, local.node.pat),
@ -3350,7 +3358,7 @@ pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: &ast::blk) -> bool {
match e.node { match e.node {
ast::expr_break(Some(_)) => ast::expr_break(Some(_)) =>
match cx.def_map.find(&e.id) { match cx.def_map.find(&e.id) {
Some(ast::def_label(loop_id)) if id == loop_id => true, Some(&ast::def_label(loop_id)) if id == loop_id => true,
_ => false, _ => false,
}, },
_ => false _ => false

View file

@ -180,7 +180,7 @@ pub fn visit_block(b: &ast::blk, &&rcx: @mut Rcx, v: rvt) {
pub fn visit_expr(expr: @ast::expr, &&rcx: @mut Rcx, v: rvt) { pub fn visit_expr(expr: @ast::expr, &&rcx: @mut Rcx, v: rvt) {
debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr)); debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| { for rcx.fcx.inh.adjustments.find(&expr.id).each |&adjustment| {
match *adjustment { match *adjustment {
@ty::AutoDerefRef( @ty::AutoDerefRef(
ty::AutoDerefRef { ty::AutoDerefRef {
@ -331,7 +331,7 @@ pub fn constrain_auto_ref(rcx: @mut Rcx, expr: @ast::expr) {
let adjustment = rcx.fcx.inh.adjustments.find(&expr.id); let adjustment = rcx.fcx.inh.adjustments.find(&expr.id);
let region = match adjustment { let region = match adjustment {
Some(@ty::AutoDerefRef( Some(&@ty::AutoDerefRef(
ty::AutoDerefRef { ty::AutoDerefRef {
autoref: Some(ref auto_ref), _})) => { autoref: Some(ref auto_ref), _})) => {
auto_ref.region auto_ref.region
@ -727,7 +727,7 @@ pub mod guarantor {
debug!("before adjustments, cat=%?", expr_ct.cat); debug!("before adjustments, cat=%?", expr_ct.cat);
match rcx.fcx.inh.adjustments.find(&expr.id) { match rcx.fcx.inh.adjustments.find(&expr.id) {
Some(@ty::AutoAddEnv(*)) => { Some(&@ty::AutoAddEnv(*)) => {
// This is basically an rvalue, not a pointer, no regions // This is basically an rvalue, not a pointer, no regions
// involved. // involved.
expr_ct.cat = ExprCategorization { expr_ct.cat = ExprCategorization {
@ -736,7 +736,7 @@ pub mod guarantor {
}; };
} }
Some(@ty::AutoDerefRef(ref adjustment)) => { Some(&@ty::AutoDerefRef(ref adjustment)) => {
debug!("adjustment=%?", adjustment); debug!("adjustment=%?", adjustment);
expr_ct = apply_autoderefs( expr_ct = apply_autoderefs(

View file

@ -192,7 +192,7 @@ pub fn lookup_vtable(vcx: &VtableContext,
match ty::get(ty).sty { match ty::get(ty).sty {
ty::ty_param(param_ty {idx: n, def_id: did}) => { ty::ty_param(param_ty {idx: n, def_id: did}) => {
let mut n_bound = 0; let mut n_bound = 0;
let bounds = tcx.ty_param_bounds.get(&did.node); let bounds = *tcx.ty_param_bounds.get(&did.node);
for ty::iter_bound_traits_and_supertraits( for ty::iter_bound_traits_and_supertraits(
tcx, bounds) |ity| { tcx, bounds) |ity| {
debug!("checking bounds trait %?", debug!("checking bounds trait %?",
@ -511,9 +511,8 @@ pub fn early_resolve_expr(ex: @ast::expr,
let cx = fcx.ccx; let cx = fcx.ccx;
match ex.node { match ex.node {
ast::expr_path(*) => { ast::expr_path(*) => {
match fcx.opt_node_ty_substs(ex.id) { for fcx.opt_node_ty_substs(ex.id) |substs| {
Some(ref substs) => { let def = *cx.tcx.def_map.get(&ex.id);
let def = cx.tcx.def_map.get(&ex.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
let item_ty = ty::lookup_item_type(cx.tcx, did); let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def %? %?, %?, %?", ex.id, did, def, debug!("early resolve expr: def %? %?, %?, %?", ex.id, did, def,
@ -532,8 +531,6 @@ pub fn early_resolve_expr(ex: @ast::expr,
} }
} }
} }
_ => ()
}
} }
ast::expr_paren(e) => { ast::expr_paren(e) => {

View file

@ -119,7 +119,7 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
match fcx.inh.adjustments.find(&id) { match fcx.inh.adjustments.find(&id) {
None => (), None => (),
Some(@ty::AutoAddEnv(r, s)) => { Some(&@ty::AutoAddEnv(r, s)) => {
match resolve_region(fcx.infcx(), r, resolve_all | force_all) { match resolve_region(fcx.infcx(), r, resolve_all | force_all) {
Err(e) => { Err(e) => {
// This should not, I think, happen: // This should not, I think, happen:
@ -135,7 +135,7 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
} }
} }
Some(@ty::AutoDerefRef(adj)) => { Some(&@ty::AutoDerefRef(adj)) => {
let resolved_autoref = match adj.autoref { let resolved_autoref = match adj.autoref {
Some(ref autoref) => { Some(ref autoref) => {
match resolve_region(fcx.infcx(), autoref.region, match resolve_region(fcx.infcx(), autoref.region,
@ -176,10 +176,9 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)", debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t)); id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t); write_ty_to_tcx(tcx, id, t);
match fcx.opt_node_ty_substs(id) { for fcx.opt_node_ty_substs(id) |substs| {
Some(ref substs) => {
let mut new_tps = ~[]; let mut new_tps = ~[];
for (*substs).tps.each |subst| { for substs.tps.each |subst| {
match resolve_type_vars_in_type(fcx, sp, *subst) { match resolve_type_vars_in_type(fcx, sp, *subst) {
Some(t) => new_tps.push(t), Some(t) => new_tps.push(t),
None => { wbcx.success = false; return None; } None => { wbcx.success = false; return None; }
@ -187,8 +186,6 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
} }
write_substs_to_tcx(tcx, id, new_tps); write_substs_to_tcx(tcx, id, new_tps);
} }
None => ()
}
return Some(t); return Some(t);
} }
} }

View file

@ -58,7 +58,6 @@ use util::ppaux::ty_to_str;
use core::result::Ok; use core::result::Ok;
use core::hashmap::linear::{LinearMap, LinearSet}; use core::hashmap::linear::{LinearMap, LinearSet};
use core::uint; use core::uint;
use std::oldmap::HashMap;
pub struct UniversalQuantificationResult { pub struct UniversalQuantificationResult {
monotype: t, monotype: t,
@ -187,7 +186,7 @@ pub fn CoherenceChecker(crate_context: @mut CrateCtxt) -> CoherenceChecker {
crate_context: crate_context, crate_context: crate_context,
inference_context: new_infer_ctxt(crate_context.tcx), inference_context: new_infer_ctxt(crate_context.tcx),
base_type_def_ids: HashMap() base_type_def_ids: @mut LinearMap::new(),
} }
} }
@ -198,7 +197,7 @@ pub struct CoherenceChecker {
// A mapping from implementations to the corresponding base type // A mapping from implementations to the corresponding base type
// definition ID. // definition ID.
base_type_def_ids: HashMap<def_id,def_id>, base_type_def_ids: @mut LinearMap<def_id,def_id>,
} }
pub impl CoherenceChecker { pub impl CoherenceChecker {
@ -475,7 +474,7 @@ pub impl CoherenceChecker {
ty_to_str(self.crate_context.tcx, self_t)); ty_to_str(self.crate_context.tcx, self_t));
match self.crate_context.tcx.trait_impls.find(&trait_t) { match self.crate_context.tcx.trait_impls.find(&trait_t) {
None => { None => {
let m = HashMap(); let m = @mut LinearMap::new();
m.insert(self_t, the_impl); m.insert(self_t, the_impl);
self.crate_context.tcx.trait_impls.insert(trait_t, m); self.crate_context.tcx.trait_impls.insert(trait_t, m);
} }
@ -505,14 +504,14 @@ pub impl CoherenceChecker {
f: &fn(x: &ty::method) -> bool) { f: &fn(x: &ty::method) -> bool) {
// Make a list of all the names of the provided methods. // Make a list of all the names of the provided methods.
// XXX: This is horrible. // XXX: This is horrible.
let provided_method_idents = HashMap(); let mut provided_method_idents = LinearSet::new();
let tcx = self.crate_context.tcx; let tcx = self.crate_context.tcx;
for ty::provided_trait_methods(tcx, trait_did).each |ident| { for ty::provided_trait_methods(tcx, trait_did).each |ident| {
provided_method_idents.insert(*ident, ()); provided_method_idents.insert(*ident);
} }
for ty::trait_methods(tcx, trait_did).each |method| { for ty::trait_methods(tcx, trait_did).each |method| {
if provided_method_idents.contains_key(&method.ident) { if provided_method_idents.contains(&method.ident) {
if !f(method) { if !f(method) {
break; break;
} }
@ -622,7 +621,7 @@ pub impl CoherenceChecker {
fn get_self_type_for_implementation(&self, implementation: @Impl) fn get_self_type_for_implementation(&self, implementation: @Impl)
-> ty_param_bounds_and_ty { -> ty_param_bounds_and_ty {
return self.crate_context.tcx.tcache.get(&implementation.did); return *self.crate_context.tcx.tcache.get(&implementation.did);
} }
// Privileged scope checking // Privileged scope checking
@ -694,7 +693,7 @@ pub impl CoherenceChecker {
fn trait_ref_to_trait_def_id(&self, trait_ref: @trait_ref) -> def_id { fn trait_ref_to_trait_def_id(&self, trait_ref: @trait_ref) -> def_id {
let def_map = self.crate_context.tcx.def_map; let def_map = self.crate_context.tcx.def_map;
let trait_def = def_map.get(&trait_ref.ref_id); let trait_def = *def_map.get(&trait_ref.ref_id);
let trait_id = def_id_of_def(trait_def); let trait_id = def_id_of_def(trait_def);
return trait_id; return trait_id;
} }
@ -773,7 +772,7 @@ pub impl CoherenceChecker {
has no provided methods", trait_did.node); has no provided methods", trait_did.node);
/* fall through */ /* fall through */
} }
Some(all_provided) => { Some(&all_provided) => {
debug!("(creating impl) trait with node_id `%d` \ debug!("(creating impl) trait with node_id `%d` \
has provided methods", trait_did.node); has provided methods", trait_did.node);
// Add all provided methods. // Add all provided methods.
@ -815,7 +814,7 @@ pub impl CoherenceChecker {
// External crate handling // External crate handling
fn add_impls_for_module(&self, impls_seen: HashMap<def_id,()>, fn add_impls_for_module(&self, impls_seen: &mut LinearSet<def_id>,
crate_store: @mut CStore, crate_store: @mut CStore,
module_def_id: def_id) { module_def_id: def_id) {
let implementations = get_impls_for_mod(crate_store, let implementations = get_impls_for_mod(crate_store,
@ -828,16 +827,11 @@ pub impl CoherenceChecker {
// Make sure we don't visit the same implementation // Make sure we don't visit the same implementation
// multiple times. // multiple times.
match impls_seen.find(&implementation.did) { if !impls_seen.insert(implementation.did) {
None => {
// Good. Continue.
impls_seen.insert(implementation.did, ());
}
Some(_) => {
// Skip this one. // Skip this one.
loop; loop;
} }
} // Good. Continue.
let self_type = lookup_item_type(self.crate_context.tcx, let self_type = lookup_item_type(self.crate_context.tcx,
implementation.did); implementation.did);
@ -939,11 +933,11 @@ pub impl CoherenceChecker {
// Adds implementations and traits from external crates to the coherence // Adds implementations and traits from external crates to the coherence
// info. // info.
fn add_external_crates(&self) { fn add_external_crates(&self) {
let impls_seen = HashMap(); let mut impls_seen = LinearSet::new();
let crate_store = self.crate_context.tcx.sess.cstore; let crate_store = self.crate_context.tcx.sess.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| { do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
self.add_impls_for_module(impls_seen, self.add_impls_for_module(&mut impls_seen,
crate_store, crate_store,
def_id { crate: crate_number, def_id { crate: crate_number,
node: 0 }); node: 0 });
@ -951,7 +945,7 @@ pub impl CoherenceChecker {
for each_path(crate_store, crate_number) |_p, def_like| { for each_path(crate_store, crate_number) |_p, def_like| {
match def_like { match def_like {
dl_def(def_mod(def_id)) => { dl_def(def_mod(def_id)) => {
self.add_impls_for_module(impls_seen, self.add_impls_for_module(&mut impls_seen,
crate_store, crate_store,
def_id); def_id);
} }
@ -1003,7 +997,7 @@ pub impl CoherenceChecker {
ty::ty_struct(type_def_id, _) => { ty::ty_struct(type_def_id, _) => {
tcx.destructor_for_type.insert(type_def_id, tcx.destructor_for_type.insert(type_def_id,
method_def_id); method_def_id);
tcx.destructors.insert(method_def_id, ()); tcx.destructors.insert(method_def_id);
} }
_ => { _ => {
// Destructors only work on nominal types. // Destructors only work on nominal types.

View file

@ -280,7 +280,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
let tcx = ccx.tcx; let tcx = ccx.tcx;
let region_paramd = tcx.region_paramd_items.find(&id); let region_paramd = tcx.region_paramd_items.find(&id).map_consume(|x| *x);
match tcx.items.get(&id) { match tcx.items.get(&id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_trait(ref generics, _, ref ms), node: ast::item_trait(ref generics, _, ref ms),
@ -615,7 +615,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
pub fn convert(ccx: &CrateCtxt, it: @ast::item) { pub fn convert(ccx: &CrateCtxt, it: @ast::item) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("convert: item %s with id %d rp %?", debug!("convert: item %s with id %d rp %?",
*tcx.sess.str_of(it.ident), it.id, rp); *tcx.sess.str_of(it.ident), it.id, rp);
match it.node { match it.node {
@ -828,10 +828,10 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: @ast::item)
let def_id = local_def(it.id); let def_id = local_def(it.id);
let tcx = ccx.tcx; let tcx = ccx.tcx;
match tcx.tcache.find(&def_id) { match tcx.tcache.find(&def_id) {
Some(tpt) => return tpt, Some(&tpt) => return tpt,
_ => {} _ => {}
} }
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
match it.node { match it.node {
ast::item_const(t, _) => { ast::item_const(t, _) => {
let typ = ccx.to_ty(&empty_rscope, t); let typ = ccx.to_ty(&empty_rscope, t);
@ -857,11 +857,11 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: @ast::item)
} }
ast::item_ty(t, ref generics) => { ast::item_ty(t, ref generics) => {
match tcx.tcache.find(&local_def(it.id)) { match tcx.tcache.find(&local_def(it.id)) {
Some(tpt) => return tpt, Some(&tpt) => return tpt,
None => { } None => { }
} }
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
let tpt = { let tpt = {
let ty = { let ty = {
let t0 = ccx.to_ty(&type_rscope(rp), t); let t0 = ccx.to_ty(&type_rscope(rp), t);
@ -991,7 +991,7 @@ pub fn ty_param_bounds(ccx: &CrateCtxt,
-> @~[ty::param_bounds] { -> @~[ty::param_bounds] {
@do generics.ty_params.map_to_vec |param| { @do generics.ty_params.map_to_vec |param| {
match ccx.tcx.ty_param_bounds.find(&param.id) { match ccx.tcx.ty_param_bounds.find(&param.id) {
Some(bs) => bs, Some(&bs) => bs,
None => { None => {
let bounds = compute_bounds(ccx, param.bounds); let bounds = compute_bounds(ccx, param.bounds);
ccx.tcx.ty_param_bounds.insert(param.id, bounds); ccx.tcx.ty_param_bounds.insert(param.id, bounds);

View file

@ -549,11 +549,11 @@ use util::ppaux::note_and_explain_region;
use core::cell::{Cell, empty_cell}; use core::cell::{Cell, empty_cell};
use core::cmp; use core::cmp;
use core::hashmap::linear::{LinearMap, LinearSet};
use core::result::{Err, Ok, Result}; use core::result::{Err, Ok, Result};
use core::to_bytes; use core::to_bytes;
use core::uint; use core::uint;
use core::vec; use core::vec;
use std::oldmap::HashMap;
use syntax::codemap::span; use syntax::codemap::span;
enum Constraint { enum Constraint {
@ -619,15 +619,19 @@ enum UndoLogEntry {
Snapshot, Snapshot,
AddVar(RegionVid), AddVar(RegionVid),
AddConstraint(Constraint), AddConstraint(Constraint),
AddCombination(CombineMap, TwoRegions) AddCombination(CombineMapType, TwoRegions)
} }
type CombineMap = HashMap<TwoRegions, RegionVid>; enum CombineMapType {
Lub, Glb
}
type CombineMap = LinearMap<TwoRegions, RegionVid>;
pub struct RegionVarBindings { pub struct RegionVarBindings {
tcx: ty::ctxt, tcx: ty::ctxt,
var_spans: ~[span], var_spans: ~[span],
constraints: HashMap<Constraint, span>, constraints: LinearMap<Constraint, span>,
lubs: CombineMap, lubs: CombineMap,
glbs: CombineMap, glbs: CombineMap,
skolemization_count: uint, skolemization_count: uint,
@ -654,22 +658,15 @@ pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {
tcx: tcx, tcx: tcx,
var_spans: ~[], var_spans: ~[],
values: empty_cell(), values: empty_cell(),
constraints: HashMap(), constraints: LinearMap::new(),
lubs: CombineMap(), lubs: LinearMap::new(),
glbs: CombineMap(), glbs: LinearMap::new(),
skolemization_count: 0, skolemization_count: 0,
bound_count: 0, bound_count: 0,
undo_log: ~[] undo_log: ~[]
} }
} }
// Note: takes two regions but doesn't care which is `a` and which is
// `b`! Not obvious that this is the most efficient way to go about
// it.
fn CombineMap() -> CombineMap {
return HashMap();
}
pub impl RegionVarBindings { pub impl RegionVarBindings {
fn in_snapshot(&mut self) -> bool { fn in_snapshot(&mut self) -> bool {
self.undo_log.len() > 0 self.undo_log.len() > 0
@ -706,8 +703,11 @@ pub impl RegionVarBindings {
AddConstraint(ref constraint) => { AddConstraint(ref constraint) => {
self.constraints.remove(constraint); self.constraints.remove(constraint);
} }
AddCombination(map, ref regions) => { AddCombination(Glb, ref regions) => {
map.remove(regions); self.glbs.remove(regions);
}
AddCombination(Lub, ref regions) => {
self.lubs.remove(regions);
} }
} }
} }
@ -825,7 +825,7 @@ pub impl RegionVarBindings {
(re_infer(ReVar(*)), _) | (_, re_infer(ReVar(*))) => { (re_infer(ReVar(*)), _) | (_, re_infer(ReVar(*))) => {
self.combine_vars( self.combine_vars(
self.lubs, a, b, span, Lub, a, b, span,
|this, old_r, new_r| this.make_subregion(span, old_r, new_r)) |this, old_r, new_r| this.make_subregion(span, old_r, new_r))
} }
@ -852,7 +852,7 @@ pub impl RegionVarBindings {
(re_infer(ReVar(*)), _) | (_, re_infer(ReVar(*))) => { (re_infer(ReVar(*)), _) | (_, re_infer(ReVar(*))) => {
self.combine_vars( self.combine_vars(
self.glbs, a, b, span, Glb, a, b, span,
|this, old_r, new_r| this.make_subregion(span, new_r, old_r)) |this, old_r, new_r| this.make_subregion(span, new_r, old_r))
} }
@ -905,7 +905,7 @@ pub impl RegionVarBindings {
} }
fn combine_vars(&mut self, fn combine_vars(&mut self,
combines: CombineMap, t: CombineMapType,
a: Region, a: Region,
b: Region, b: Region,
span: span, span: span,
@ -914,13 +914,29 @@ pub impl RegionVarBindings {
new_r: Region) -> cres<()>) new_r: Region) -> cres<()>)
-> cres<Region> { -> cres<Region> {
let vars = TwoRegions { a: a, b: b }; let vars = TwoRegions { a: a, b: b };
let c;
{
// FIXME (#3850): shouldn't need a scope, nor should this need to be
// done twice to get the maps out
{
let combines = match t {
Glb => &self.glbs, Lub => &self.lubs
};
match combines.find(&vars) { match combines.find(&vars) {
Some(c) => Ok(re_infer(ReVar(c))), Some(&c) => return Ok(re_infer(ReVar(c))),
None => { None => ()
let c = self.new_region_var(span); }
}
c = self.new_region_var(span);
{
let combines = match t {
Glb => &mut self.glbs, Lub => &mut self.lubs
};
combines.insert(vars, c); combines.insert(vars, c);
}
}
if self.in_snapshot() { if self.in_snapshot() {
self.undo_log.push(AddCombination(combines, vars)); self.undo_log.push(AddCombination(t, vars));
} }
do relate(self, a, re_infer(ReVar(c))).then { do relate(self, a, re_infer(ReVar(c))).then {
do relate(self, b, re_infer(ReVar(c))).then { do relate(self, b, re_infer(ReVar(c))).then {
@ -929,8 +945,6 @@ pub impl RegionVarBindings {
} }
} }
} }
}
}
fn vars_created_since_snapshot(&mut self, fn vars_created_since_snapshot(&mut self,
snapshot: uint) snapshot: uint)
@ -1206,11 +1220,7 @@ struct SpannedRegion {
span: span, span: span,
} }
type TwoRegionsMap = HashMap<TwoRegions, ()>; type TwoRegionsMap = LinearSet<TwoRegions>;
fn TwoRegionsMap() -> TwoRegionsMap {
return HashMap();
}
pub impl RegionVarBindings { pub impl RegionVarBindings {
fn infer_variable_values(&mut self) -> ~[GraphNodeValue] { fn infer_variable_values(&mut self) -> ~[GraphNodeValue] {
@ -1239,7 +1249,7 @@ pub impl RegionVarBindings {
// It would be nice to write this using map(): // It would be nice to write this using map():
let mut edges = vec::with_capacity(num_edges); let mut edges = vec::with_capacity(num_edges);
for self.constraints.each |constraint, span| { for self.constraints.each |&(constraint, span)| {
edges.push(GraphEdge { edges.push(GraphEdge {
next_edge: [uint::max_value, uint::max_value], next_edge: [uint::max_value, uint::max_value],
constraint: *constraint, constraint: *constraint,
@ -1439,7 +1449,7 @@ pub impl RegionVarBindings {
&mut self, &mut self,
graph: &Graph) -> ~[GraphNodeValue] graph: &Graph) -> ~[GraphNodeValue]
{ {
let dup_map = TwoRegionsMap(); let mut dup_map = LinearSet::new();
graph.nodes.mapi(|idx, node| { graph.nodes.mapi(|idx, node| {
match node.value { match node.value {
Value(_) => { Value(_) => {
@ -1478,11 +1488,11 @@ pub impl RegionVarBindings {
match node.classification { match node.classification {
Expanding => { Expanding => {
self.report_error_for_expanding_node( self.report_error_for_expanding_node(
graph, dup_map, node_vid); graph, &mut dup_map, node_vid);
} }
Contracting => { Contracting => {
self.report_error_for_contracting_node( self.report_error_for_contracting_node(
graph, dup_map, node_vid); graph, &mut dup_map, node_vid);
} }
} }
} }
@ -1494,17 +1504,17 @@ pub impl RegionVarBindings {
// Used to suppress reporting the same basic error over and over // Used to suppress reporting the same basic error over and over
fn is_reported(&mut self, fn is_reported(&mut self,
dup_map: TwoRegionsMap, dup_map: &mut TwoRegionsMap,
r_a: Region, r_a: Region,
r_b: Region) r_b: Region)
-> bool { -> bool {
let key = TwoRegions { a: r_a, b: r_b }; let key = TwoRegions { a: r_a, b: r_b };
!dup_map.insert(key, ()) !dup_map.insert(key)
} }
fn report_error_for_expanding_node(&mut self, fn report_error_for_expanding_node(&mut self,
graph: &Graph, graph: &Graph,
dup_map: TwoRegionsMap, dup_map: &mut TwoRegionsMap,
node_idx: RegionVid) { node_idx: RegionVid) {
// Errors in expanding nodes result from a lower-bound that is // Errors in expanding nodes result from a lower-bound that is
// not contained by an upper-bound. // not contained by an upper-bound.
@ -1557,7 +1567,7 @@ pub impl RegionVarBindings {
fn report_error_for_contracting_node(&mut self, fn report_error_for_contracting_node(&mut self,
graph: &Graph, graph: &Graph,
dup_map: TwoRegionsMap, dup_map: &mut TwoRegionsMap,
node_idx: RegionVid) { node_idx: RegionVid) {
// Errors in contracting nodes result from two upper-bounds // Errors in contracting nodes result from two upper-bounds
// that have no intersection. // that have no intersection.
@ -1614,9 +1624,9 @@ pub impl RegionVarBindings {
orig_node_idx: RegionVid, orig_node_idx: RegionVid,
dir: Direction) dir: Direction)
-> ~[SpannedRegion] { -> ~[SpannedRegion] {
let set = HashMap(); let mut set = LinearSet::new();
let mut stack = ~[orig_node_idx]; let mut stack = ~[orig_node_idx];
set.insert(orig_node_idx.to_uint(), ()); set.insert(orig_node_idx.to_uint());
let mut result = ~[]; let mut result = ~[];
while !vec::is_empty(stack) { while !vec::is_empty(stack) {
let node_idx = stack.pop(); let node_idx = stack.pop();
@ -1627,7 +1637,7 @@ pub impl RegionVarBindings {
Incoming => from_vid, Incoming => from_vid,
Outgoing => to_vid Outgoing => to_vid
}; };
if set.insert(vid.to_uint(), ()) { if set.insert(vid.to_uint()) {
stack.push(vid); stack.push(vid);
} }
} }

View file

@ -55,12 +55,12 @@ use middle::ty;
use util::common::time; use util::common::time;
use util::ppaux; use util::ppaux;
use core::hashmap::linear::LinearMap;
use core::result; use core::result;
use core::vec; use core::vec;
use std::list::{List, Nil, Cons}; use std::list::{List, Nil, Cons};
use std::list; use std::list;
use std::oldmap::HashMap; use std::oldmap::HashMap;
use std::oldmap;
use syntax::codemap::{span, spanned, respan}; use syntax::codemap::{span, spanned, respan};
use syntax::print::pprust::*; use syntax::print::pprust::*;
use syntax::{ast, ast_util, ast_map}; use syntax::{ast, ast_util, ast_map};
@ -171,7 +171,7 @@ pub impl vtable_origin {
} }
} }
pub type vtable_map = HashMap<ast::node_id, vtable_res>; pub type vtable_map = @mut LinearMap<ast::node_id, vtable_res>;
pub struct CrateCtxt { pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method. // A mapping from method call sites to traits that have that method.
@ -209,7 +209,7 @@ pub fn write_tpt_to_tcx(tcx: ty::ctxt,
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def { pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) { match tcx.def_map.find(&id) {
Some(x) => x, Some(&x) => x,
_ => { _ => {
tcx.sess.span_fatal(sp, ~"internal error looking up a definition") tcx.sess.span_fatal(sp, ~"internal error looking up a definition")
} }
@ -337,14 +337,14 @@ fn check_for_main_fn(ccx: @mut CrateCtxt) {
} }
pub fn check_crate(tcx: ty::ctxt, pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap, +trait_map: resolve::TraitMap,
crate: @ast::crate) crate: @ast::crate)
-> (method_map, vtable_map) { -> (method_map, vtable_map) {
let time_passes = tcx.sess.time_passes(); let time_passes = tcx.sess.time_passes();
let ccx = @mut CrateCtxt { let ccx = @mut CrateCtxt {
trait_map: trait_map, trait_map: trait_map,
method_map: oldmap::HashMap(), method_map: HashMap(),
vtable_map: oldmap::HashMap(), vtable_map: @mut LinearMap::new(),
coherence_info: @coherence::CoherenceInfo(), coherence_info: @coherence::CoherenceInfo(),
tcx: tcx tcx: tcx
}; };

View file

@ -123,6 +123,8 @@ pub mod lib {
pub mod llvm; pub mod llvm;
} }
use core::prelude::*;
use driver::driver::{host_triple, optgroups, early_error}; use driver::driver::{host_triple, optgroups, early_error};
use driver::driver::{str_input, file_input, build_session_options}; use driver::driver::{str_input, file_input, build_session_options};
use driver::driver::{build_session, build_configuration, parse_pretty}; use driver::driver::{build_session, build_configuration, parse_pretty};
@ -166,7 +168,7 @@ Available lint options:
let lint_dict = lint::get_lint_dict(); let lint_dict = lint::get_lint_dict();
let mut max_key = 0; let mut max_key = 0;
for lint_dict.each_key |&k| { max_key = uint::max(k.len(), max_key); } for lint_dict.each_key |k| { max_key = uint::max(k.len(), max_key); }
fn padded(max: uint, s: &str) -> ~str { fn padded(max: uint, s: &str) -> ~str {
str::from_bytes(vec::from_elem(max - s.len(), ' ' as u8)) + s str::from_bytes(vec::from_elem(max - s.len(), ' ' as u8)) + s
} }
@ -175,7 +177,7 @@ Available lint options:
padded(max_key, ~"name"), ~"default", ~"meaning")); padded(max_key, ~"name"), ~"default", ~"meaning"));
io::println(fmt!(" %s %7.7s %s\n", io::println(fmt!(" %s %7.7s %s\n",
padded(max_key, ~"----"), ~"-------", ~"-------")); padded(max_key, ~"----"), ~"-------", ~"-------"));
for lint_dict.each |&k, &v| { for lint_dict.each |&(k, v)| {
let k = str::replace(*k, ~"_", ~"-"); let k = str::replace(*k, ~"_", ~"-");
io::println(fmt!(" %s %7.7s %s", io::println(fmt!(" %s %7.7s %s",
padded(max_key, k), padded(max_key, k),

View file

@ -14,8 +14,8 @@ use syntax::ast;
use syntax::codemap::{span}; use syntax::codemap::{span};
use syntax::visit; use syntax::visit;
use core::hashmap::linear::LinearSet;
use core::str; use core::str;
use std::oldmap::HashMap;
use std; use std;
pub fn time<T>(do_it: bool, what: ~str, thunk: &fn() -> T) -> T { pub fn time<T>(do_it: bool, what: ~str, thunk: &fn() -> T) -> T {
@ -55,8 +55,6 @@ pub fn indenter() -> _indenter {
_indenter(()) _indenter(())
} }
pub type flag = HashMap<~str, ()>;
pub fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; } pub fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] { pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
@ -116,7 +114,7 @@ pub fn pluralize(n: uint, +s: ~str) -> ~str {
} }
// A set of node IDs (used to keep track of which node IDs are for statements) // A set of node IDs (used to keep track of which node IDs are for statements)
pub type stmt_set = HashMap<ast::node_id, ()>; pub type stmt_set = @mut LinearSet<ast::node_id>;
// //
// Local Variables: // Local Variables: