Revert "oldmap: use &K instead of K in find and get"

This reverts commit 8e643525d4.
This commit is contained in:
Graydon Hoare 2013-02-05 14:30:53 -08:00
parent 0110dfb27c
commit a4250a96fd
78 changed files with 479 additions and 482 deletions

View file

@ -731,7 +731,7 @@ pub fn configure(opts: Options) -> Cargo {
need_dir(&c.bindir); need_dir(&c.bindir);
for sources.each_key_ref |&k| { for sources.each_key_ref |&k| {
let mut s = sources.get(&k); let mut s = sources.get(k);
load_source_packages(&c, s); load_source_packages(&c, s);
sources.insert(k, s); sources.insert(k, s);
} }
@ -981,7 +981,7 @@ pub fn install_named(c: &mut Cargo, wd: &Path, name: ~str) {
pub fn install_uuid_specific(c: &mut Cargo, wd: &Path, src: ~str, pub fn install_uuid_specific(c: &mut Cargo, wd: &Path, src: ~str,
uuid: ~str) { uuid: ~str) {
match c.sources.find(&src) { match c.sources.find(src) {
Some(s) => { Some(s) => {
for s.packages.each |p| { for s.packages.each |p| {
if p.uuid == uuid { if p.uuid == uuid {
@ -997,7 +997,7 @@ pub fn install_uuid_specific(c: &mut Cargo, wd: &Path, src: ~str,
pub fn install_named_specific(c: &mut Cargo, wd: &Path, src: ~str, pub fn install_named_specific(c: &mut Cargo, wd: &Path, src: ~str,
name: ~str) { name: ~str) {
match c.sources.find(&src) { match c.sources.find(src) {
Some(s) => { Some(s) => {
for s.packages.each |p| { for s.packages.each |p| {
if p.name == name { if p.name == name {
@ -1064,7 +1064,7 @@ pub fn cmd_uninstall(c: &Cargo) {
} }
pub fn install_query(c: &mut Cargo, wd: &Path, target: ~str) { pub fn install_query(c: &mut Cargo, wd: &Path, target: ~str) {
match c.dep_cache.find(&target) { match c.dep_cache.find(target) {
Some(inst) => { Some(inst) => {
if inst { if inst {
return; return;
@ -1156,7 +1156,7 @@ pub fn cmd_install(c: &mut Cargo) {
pub fn sync(c: &Cargo) { pub fn sync(c: &Cargo) {
for c.sources.each_key_ref |&k| { for c.sources.each_key_ref |&k| {
let mut s = c.sources.get(&k); let mut s = c.sources.get(k);
sync_one(c, s); sync_one(c, s);
c.sources.insert(k, s); c.sources.insert(k, s);
} }
@ -1558,7 +1558,7 @@ pub fn cmd_list(c: &Cargo) {
if !valid_pkg_name(*name) { if !valid_pkg_name(*name) {
error(fmt!("'%s' is an invalid source name", *name)); error(fmt!("'%s' is an invalid source name", *name));
} else { } else {
match c.sources.find(name) { match c.sources.find(*name) {
Some(source) => { Some(source) => {
print_source(source); print_source(source);
} }
@ -1754,7 +1754,7 @@ pub fn cmd_sources(c: &Cargo) {
return; return;
} }
match c.sources.find(&name) { match c.sources.find(name) {
Some(source) => { Some(source) => {
let old = copy source.url; let old = copy source.url;
let method = assume_source_method(url); let method = assume_source_method(url);
@ -1785,7 +1785,7 @@ pub fn cmd_sources(c: &Cargo) {
return; return;
} }
match c.sources.find(&name) { match c.sources.find(name) {
Some(source) => { Some(source) => {
let old = copy source.method; let old = copy source.method;
@ -1823,7 +1823,7 @@ pub fn cmd_sources(c: &Cargo) {
return; return;
} }
match c.sources.find(&name) { match c.sources.find(name) {
Some(source) => { Some(source) => {
c.sources.remove(&name); c.sources.remove(&name);
c.sources.insert(newn, source); c.sources.insert(newn, source);

View file

@ -596,7 +596,7 @@ pub fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t,
} }
pub fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> @str { pub fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> @str {
match ccx.type_hashcodes.find(&t) { match ccx.type_hashcodes.find(t) {
Some(h) => h, Some(h) => h,
None => { None => {
let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta); let hash = symbol_hash(ccx.tcx, ccx.symbol_hasher, t, ccx.link_meta);

View file

@ -538,7 +538,7 @@ pub fn build_session_options(+binary: ~str,
getopts::opt_strs(matches, level_name)); getopts::opt_strs(matches, level_name));
for flags.each |lint_name| { for flags.each |lint_name| {
let lint_name = str::replace(*lint_name, ~"-", ~"_"); let lint_name = str::replace(*lint_name, ~"-", ~"_");
match lint_dict.find(&lint_name) { match lint_dict.find(/*bad*/ copy lint_name) {
None => { None => {
early_error(demitter, fmt!("unknown %s flag: %s", early_error(demitter, fmt!("unknown %s flag: %s",
level_name, lint_name)); level_name, lint_name));

View file

@ -1331,11 +1331,11 @@ pub fn associate_type(tn: type_names, s: @str, t: TypeRef) {
} }
pub fn type_has_name(tn: type_names, t: TypeRef) -> Option<@str> { pub fn type_has_name(tn: type_names, t: TypeRef) -> Option<@str> {
return tn.type_names.find(&t); return tn.type_names.find(t);
} }
pub fn name_has_type(tn: type_names, s: @str) -> Option<TypeRef> { pub fn name_has_type(tn: type_names, s: @str) -> Option<TypeRef> {
return tn.named_types.find(&s); return tn.named_types.find(s);
} }
pub fn mk_type_names() -> type_names { pub fn mk_type_names() -> type_names {

View file

@ -74,7 +74,7 @@ pub fn mk_cstore(intr: @ident_interner) -> CStore {
pub fn get_crate_data(cstore: CStore, cnum: ast::crate_num) pub fn get_crate_data(cstore: CStore, cnum: ast::crate_num)
-> crate_metadata { -> crate_metadata {
return p(cstore).metas.get(&cnum); return p(cstore).metas.get(cnum);
} }
pub fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str { pub fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str {
@ -139,7 +139,7 @@ pub fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id,
pub fn find_use_stmt_cnum(cstore: CStore, pub fn find_use_stmt_cnum(cstore: CStore,
use_id: ast::node_id) -> Option<ast::crate_num> { use_id: ast::node_id) -> Option<ast::crate_num> {
p(cstore).use_crate_map.find(&use_id) p(cstore).use_crate_map.find(use_id)
} }
// returns hashes of crates directly used by this crate. Hashes are // returns hashes of crates directly used by this crate. Hashes are

View file

@ -1099,7 +1099,7 @@ pub fn translate_def_id(cdata: cmd, did: ast::def_id) -> ast::def_id {
return ast::def_id { crate: cdata.cnum, node: did.node }; return ast::def_id { crate: cdata.cnum, node: did.node };
} }
match cdata.cnum_map.find(&did.crate) { match cdata.cnum_map.find(did.crate) {
option::Some(n) => ast::def_id { crate: n, node: did.node }, option::Some(n) => ast::def_id { crate: n, node: did.node },
option::None => die!(~"didn't find a crate in the cnum_map") option::None => die!(~"didn't find a crate in the cnum_map")
} }

View file

@ -118,7 +118,7 @@ pub fn encode_def_id(ebml_w: writer::Encoder, id: def_id) {
fn encode_region_param(ecx: @encode_ctxt, ebml_w: writer::Encoder, fn encode_region_param(ecx: @encode_ctxt, ebml_w: writer::Encoder,
it: @ast::item) { it: @ast::item) {
let opt_rp = ecx.tcx.region_paramd_items.find(&it.id); let opt_rp = ecx.tcx.region_paramd_items.find(it.id);
for opt_rp.each |rp| { for opt_rp.each |rp| {
do ebml_w.wr_tag(tag_region_param) { do ebml_w.wr_tag(tag_region_param) {
(*rp).encode(&ebml_w); (*rp).encode(&ebml_w);
@ -184,7 +184,7 @@ fn encode_ty_type_param_bounds(ebml_w: writer::Encoder, ecx: @encode_ctxt,
fn encode_type_param_bounds(ebml_w: writer::Encoder, ecx: @encode_ctxt, fn encode_type_param_bounds(ebml_w: writer::Encoder, ecx: @encode_ctxt,
params: &[ty_param]) { params: &[ty_param]) {
let ty_param_bounds = let ty_param_bounds =
@params.map(|param| ecx.tcx.ty_param_bounds.get(&param.id)); @params.map(|param| ecx.tcx.ty_param_bounds.get(param.id));
encode_ty_type_param_bounds(ebml_w, ecx, ty_param_bounds); encode_ty_type_param_bounds(ebml_w, ecx, ty_param_bounds);
} }
@ -224,7 +224,7 @@ fn encode_type(ecx: @encode_ctxt, ebml_w: writer::Encoder, typ: ty::t) {
fn encode_symbol(ecx: @encode_ctxt, ebml_w: writer::Encoder, id: node_id) { fn encode_symbol(ecx: @encode_ctxt, ebml_w: writer::Encoder, id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol); ebml_w.start_tag(tag_items_data_item_symbol);
let sym = match ecx.item_symbols.find(&id) { let sym = match ecx.item_symbols.find(id) {
Some(ref x) => (/*bad*/copy *x), Some(ref x) => (/*bad*/copy *x),
None => { None => {
ecx.diag.handler().bug( ecx.diag.handler().bug(
@ -238,7 +238,7 @@ fn encode_symbol(ecx: @encode_ctxt, ebml_w: writer::Encoder, id: node_id) {
fn encode_discriminant(ecx: @encode_ctxt, ebml_w: writer::Encoder, fn encode_discriminant(ecx: @encode_ctxt, ebml_w: writer::Encoder,
id: node_id) { id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol); ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::to_bytes(ecx.discrim_symbols.get(&id))); ebml_w.writer.write(str::to_bytes(ecx.discrim_symbols.get(id)));
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -349,7 +349,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: writer::Encoder,
// Encode the reexports of this module. // Encode the reexports of this module.
debug!("(encoding info for module) encoding reexports for %d", id); debug!("(encoding info for module) encoding reexports for %d", id);
match ecx.reexports2.find(&id) { match ecx.reexports2.find(id) {
Some(ref exports) => { Some(ref exports) => {
debug!("(encoding info for module) found reexports for %d", id); debug!("(encoding info for module) found reexports for %d", id);
for (*exports).each |exp| { for (*exports).each |exp| {
@ -813,7 +813,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: writer::Encoder,
encode_name(ecx, ebml_w, ty_m.ident); encode_name(ecx, ebml_w, ty_m.ident);
encode_family(ebml_w, encode_family(ebml_w,
purity_static_method_family(ty_m.purity)); purity_static_method_family(ty_m.purity));
let polyty = ecx.tcx.tcache.get(&local_def(ty_m.id)); let polyty = ecx.tcx.tcache.get(local_def(ty_m.id));
encode_ty_type_param_bounds(ebml_w, ecx, polyty.bounds); encode_ty_type_param_bounds(ebml_w, ecx, polyty.bounds);
encode_type(ecx, ebml_w, polyty.ty); encode_type(ecx, ebml_w, polyty.ty);
let mut m_path = vec::append(~[], path); // :-( let mut m_path = vec::append(~[], path); // :-(
@ -881,7 +881,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: writer::Encoder,
let ebml_w = copy ebml_w; let ebml_w = copy ebml_w;
|i, cx, v| { |i, cx, v| {
visit::visit_item(i, cx, v); visit::visit_item(i, cx, v);
match ecx.tcx.items.get(&i.id) { match ecx.tcx.items.get(i.id) {
ast_map::node_item(_, pt) => { ast_map::node_item(_, pt) => {
encode_info_for_item(ecx, ebml_w, i, encode_info_for_item(ecx, ebml_w, i,
index, *pt); index, *pt);
@ -894,7 +894,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: writer::Encoder,
let ebml_w = copy ebml_w; let ebml_w = copy ebml_w;
|ni, cx, v| { |ni, cx, v| {
visit::visit_foreign_item(ni, cx, v); visit::visit_foreign_item(ni, cx, v);
match ecx.tcx.items.get(&ni.id) { match ecx.tcx.items.get(ni.id) {
ast_map::node_foreign_item(_, abi, pt) => { ast_map::node_foreign_item(_, abi, pt) => {
encode_info_for_foreign_item(ecx, ebml_w, ni, encode_info_for_foreign_item(ecx, ebml_w, ni,
index, /*bad*/copy *pt, index, /*bad*/copy *pt,

View file

@ -329,7 +329,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
let len = parse_hex(st); let len = parse_hex(st);
assert (next(st) == '#'); assert (next(st) == '#');
let key = creader_cache_key { cnum: st.crate, pos: pos, len: len }; let key = creader_cache_key { cnum: st.crate, pos: pos, len: len };
match st.tcx.rcache.find(&key) { match st.tcx.rcache.find(key) {
Some(tt) => return tt, Some(tt) => return tt,
None => { None => {
let ps = @{pos: pos ,.. copy *st}; let ps = @{pos: pos ,.. copy *st};

View file

@ -56,7 +56,7 @@ fn cx_uses_abbrevs(cx: @ctxt) -> bool {
pub fn enc_ty(w: io::Writer, cx: @ctxt, t: ty::t) { pub fn enc_ty(w: io::Writer, cx: @ctxt, t: ty::t) {
match cx.abbrevs { match cx.abbrevs {
ac_no_abbrevs => { ac_no_abbrevs => {
let result_str = match cx.tcx.short_names_cache.find(&t) { let result_str = match cx.tcx.short_names_cache.find(t) {
Some(s) => /*bad*/copy *s, Some(s) => /*bad*/copy *s,
None => { None => {
let s = do io::with_str_writer |wr| { let s = do io::with_str_writer |wr| {
@ -69,7 +69,7 @@ pub fn enc_ty(w: io::Writer, cx: @ctxt, t: ty::t) {
w.write_str(result_str); w.write_str(result_str);
} }
ac_use_abbrevs(abbrevs) => { ac_use_abbrevs(abbrevs) => {
match abbrevs.find(&t) { match abbrevs.find(t) {
Some(a) => { w.write_str(*a.s); return; } Some(a) => { w.write_str(*a.s); return; }
None => { None => {
let pos = w.tell(); let pos = w.tell();

View file

@ -838,7 +838,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
debug!("Encoding side tables for id %d", id); debug!("Encoding side tables for id %d", id);
do option::iter(&tcx.def_map.find(&id)) |def| { do option::iter(&tcx.def_map.find(id)) |def| {
do ebml_w.tag(c::tag_table_def) { do ebml_w.tag(c::tag_table_def) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -855,7 +855,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&tcx.node_type_substs.find(&id)) |tys| { do option::iter(&tcx.node_type_substs.find(id)) |tys| {
do ebml_w.tag(c::tag_table_node_type_subst) { do ebml_w.tag(c::tag_table_node_type_subst) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -864,7 +864,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&tcx.freevars.find(&id)) |fv| { do option::iter(&tcx.freevars.find(id)) |fv| {
do ebml_w.tag(c::tag_table_freevars) { do ebml_w.tag(c::tag_table_freevars) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -876,7 +876,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
let lid = ast::def_id { crate: ast::local_crate, node: id }; let lid = ast::def_id { crate: ast::local_crate, node: id };
do option::iter(&tcx.tcache.find(&lid)) |tpbt| { do option::iter(&tcx.tcache.find(lid)) |tpbt| {
do ebml_w.tag(c::tag_table_tcache) { do ebml_w.tag(c::tag_table_tcache) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -885,7 +885,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&tcx.ty_param_bounds.find(&id)) |pbs| { do option::iter(&tcx.ty_param_bounds.find(id)) |pbs| {
do ebml_w.tag(c::tag_table_param_bounds) { do ebml_w.tag(c::tag_table_param_bounds) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -899,7 +899,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
// is what we actually use in trans, all modes will have been // is what we actually use in trans, all modes will have been
// resolved. // resolved.
// //
//option::iter(tcx.inferred_modes.find(&id)) {|m| //option::iter(tcx.inferred_modes.find(id)) {|m|
// ebml_w.tag(c::tag_table_inferred_modes) {|| // ebml_w.tag(c::tag_table_inferred_modes) {||
// ebml_w.id(id); // ebml_w.id(id);
// ebml_w.tag(c::tag_table_val) {|| // ebml_w.tag(c::tag_table_val) {||
@ -908,13 +908,13 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
// } // }
//} //}
do option::iter(&maps.mutbl_map.find(&id)) |_m| { do option::iter(&maps.mutbl_map.find(id)) |_m| {
do ebml_w.tag(c::tag_table_mutbl) { do ebml_w.tag(c::tag_table_mutbl) {
ebml_w.id(id); ebml_w.id(id);
} }
} }
do option::iter(&maps.last_use_map.find(&id)) |m| { do option::iter(&maps.last_use_map.find(id)) |m| {
do ebml_w.tag(c::tag_table_last_use) { do ebml_w.tag(c::tag_table_last_use) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -925,7 +925,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&maps.method_map.find(&id)) |mme| { do option::iter(&maps.method_map.find(id)) |mme| {
do ebml_w.tag(c::tag_table_method_map) { do ebml_w.tag(c::tag_table_method_map) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -934,7 +934,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&maps.vtable_map.find(&id)) |dr| { do option::iter(&maps.vtable_map.find(id)) |dr| {
do ebml_w.tag(c::tag_table_vtable_map) { do ebml_w.tag(c::tag_table_vtable_map) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -943,7 +943,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&tcx.adjustments.find(&id)) |adj| { do option::iter(&tcx.adjustments.find(id)) |adj| {
do ebml_w.tag(c::tag_table_adjustments) { do ebml_w.tag(c::tag_table_adjustments) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {
@ -952,19 +952,19 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
} }
} }
do option::iter(&tcx.legacy_boxed_traits.find(&id)) |_x| { do option::iter(&tcx.legacy_boxed_traits.find(id)) |_x| {
do ebml_w.tag(c::tag_table_legacy_boxed_trait) { do ebml_w.tag(c::tag_table_legacy_boxed_trait) {
ebml_w.id(id); ebml_w.id(id);
} }
} }
for maps.moves_map.find(&id).each |_| { for maps.moves_map.find(id).each |_| {
do ebml_w.tag(c::tag_table_moves_map) { do ebml_w.tag(c::tag_table_moves_map) {
ebml_w.id(id); ebml_w.id(id);
} }
} }
for maps.capture_map.find(&id).each |cap_vars| { for maps.capture_map.find(id).each |cap_vars| {
do ebml_w.tag(c::tag_table_capture_map) { do ebml_w.tag(c::tag_table_capture_map) {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) { do ebml_w.tag(c::tag_table_val) {

View file

@ -126,12 +126,12 @@ impl check_loan_ctxt {
let region_map = self.tcx().region_map; let region_map = self.tcx().region_map;
let pure_map = self.req_maps.pure_map; let pure_map = self.req_maps.pure_map;
loop { loop {
match pure_map.find(&scope_id) { match pure_map.find(scope_id) {
None => (), None => (),
Some(ref e) => return Some(pc_cmt((*e))) Some(ref e) => return Some(pc_cmt((*e)))
} }
match region_map.find(&scope_id) { match region_map.find(scope_id) {
None => return default_purity, None => return default_purity,
Some(next_scope_id) => scope_id = next_scope_id Some(next_scope_id) => scope_id = next_scope_id
} }
@ -144,13 +144,13 @@ impl check_loan_ctxt {
let req_loan_map = self.req_maps.req_loan_map; let req_loan_map = self.req_maps.req_loan_map;
loop { loop {
for req_loan_map.find(&scope_id).each |loans| { for req_loan_map.find(scope_id).each |loans| {
for loans.each |loan| { for loans.each |loan| {
if !f(loan) { return; } if !f(loan) { return; }
} }
} }
match region_map.find(&scope_id) { match region_map.find(scope_id) {
None => return, None => return,
Some(next_scope_id) => scope_id = next_scope_id, Some(next_scope_id) => scope_id = next_scope_id,
} }
@ -199,7 +199,7 @@ impl check_loan_ctxt {
Some(expr) => { Some(expr) => {
match expr.node { match expr.node {
ast::expr_path(_) if pc == pc_pure_fn => { ast::expr_path(_) if pc == pc_pure_fn => {
let def = self.tcx().def_map.get(&expr.id); let def = self.tcx().def_map.get(expr.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
let is_fn_arg = let is_fn_arg =
did.crate == ast::local_crate && did.crate == ast::local_crate &&
@ -247,7 +247,7 @@ impl check_loan_ctxt {
fn is_allowed_pure_arg(expr: @ast::expr) -> bool { fn is_allowed_pure_arg(expr: @ast::expr) -> bool {
return match expr.node { return match expr.node {
ast::expr_path(_) => { ast::expr_path(_) => {
let def = self.tcx().def_map.get(&expr.id); let def = self.tcx().def_map.get(expr.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
did.crate == ast::local_crate && did.crate == ast::local_crate &&
(*self.fn_args).contains(&(did.node)) (*self.fn_args).contains(&(did.node))
@ -262,14 +262,14 @@ impl check_loan_ctxt {
fn check_for_conflicting_loans(scope_id: ast::node_id) { fn check_for_conflicting_loans(scope_id: ast::node_id) {
debug!("check_for_conflicting_loans(scope_id=%?)", scope_id); debug!("check_for_conflicting_loans(scope_id=%?)", scope_id);
let new_loans = match self.req_maps.req_loan_map.find(&scope_id) { let new_loans = match self.req_maps.req_loan_map.find(scope_id) {
None => return, None => return,
Some(loans) => loans Some(loans) => loans
}; };
debug!("new_loans has length %?", new_loans.len()); debug!("new_loans has length %?", new_loans.len());
let par_scope_id = self.tcx().region_map.get(&scope_id); let par_scope_id = self.tcx().region_map.get(scope_id);
for self.walk_loans(par_scope_id) |old_loan| { for self.walk_loans(par_scope_id) |old_loan| {
debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan)); debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan));
@ -325,7 +325,7 @@ impl check_loan_ctxt {
fn check_assignment(at: assignment_type, ex: @ast::expr) { fn check_assignment(at: assignment_type, ex: @ast::expr) {
// We don't use cat_expr() here because we don't want to treat // We don't use cat_expr() here because we don't want to treat
// auto-ref'd parameters in overloaded operators as rvalues. // auto-ref'd parameters in overloaded operators as rvalues.
let cmt = match self.bccx.tcx.adjustments.find(&ex.id) { let cmt = match self.bccx.tcx.adjustments.find(ex.id) {
None => self.bccx.cat_expr_unadjusted(ex), None => self.bccx.cat_expr_unadjusted(ex),
Some(adj) => self.bccx.cat_expr_autoderefd(ex, adj) Some(adj) => self.bccx.cat_expr_autoderefd(ex, adj)
}; };
@ -612,7 +612,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
{ {
match fty_proto { match fty_proto {
ast::ProtoBox | ast::ProtoUniq => { ast::ProtoBox | ast::ProtoUniq => {
let cap_vars = self.bccx.capture_map.get(&id); let cap_vars = self.bccx.capture_map.get(id);
for cap_vars.each |cap_var| { for cap_vars.each |cap_var| {
match cap_var.mode { match cap_var.mode {
moves::CapRef | moves::CapCopy => { loop; } moves::CapRef | moves::CapCopy => { loop; }

View file

@ -126,7 +126,7 @@ fn req_loans_in_expr(ex: @ast::expr,
// If this expression is borrowed, have to ensure it remains valid: // If this expression is borrowed, have to ensure it remains valid:
if !self.ignore_adjustments.contains(&ex.id) { if !self.ignore_adjustments.contains(&ex.id) {
for tcx.adjustments.find(&ex.id).each |adjustments| { for tcx.adjustments.find(ex.id).each |adjustments| {
self.guarantee_adjustments(ex, *adjustments); self.guarantee_adjustments(ex, *adjustments);
} }
} }
@ -172,7 +172,7 @@ fn req_loans_in_expr(ex: @ast::expr,
} }
} }
match self.bccx.method_map.find(&ex.id) { match self.bccx.method_map.find(ex.id) {
Some(ref method_map_entry) => { Some(ref method_map_entry) => {
match (*method_map_entry).explicit_self { match (*method_map_entry).explicit_self {
ast::sty_by_ref => { ast::sty_by_ref => {
@ -250,7 +250,7 @@ fn req_loans_in_expr(ex: @ast::expr,
// (if used like `a.b(...)`), the call where it's an argument // (if used like `a.b(...)`), the call where it's an argument
// (if used like `x(a.b)`), or the block (if used like `let x // (if used like `x(a.b)`), or the block (if used like `let x
// = a.b`). // = a.b`).
let scope_r = ty::re_scope(self.tcx().region_map.get(&ex.id)); let scope_r = ty::re_scope(self.tcx().region_map.get(ex.id));
let rcvr_cmt = self.bccx.cat_expr(rcvr); let rcvr_cmt = self.bccx.cat_expr(rcvr);
self.guarantee_valid(rcvr_cmt, m_imm, scope_r); self.guarantee_valid(rcvr_cmt, m_imm, scope_r);
visit::visit_expr(ex, self, vt); visit::visit_expr(ex, self, vt);
@ -536,7 +536,7 @@ impl gather_loan_ctxt {
fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) { fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) {
debug!("adding %u loans to scope_id %?", loans.len(), scope_id); debug!("adding %u loans to scope_id %?", loans.len(), scope_id);
match self.req_maps.req_loan_map.find(&scope_id) { match self.req_maps.req_loan_map.find(scope_id) {
Some(req_loans) => { Some(req_loans) => {
req_loans.push_all(loans); req_loans.push_all(loans);
} }

View file

@ -129,7 +129,7 @@ impl LoanContext {
~"rvalue with a non-none lp"); ~"rvalue with a non-none lp");
} }
cat_local(local_id) | cat_arg(local_id) | cat_self(local_id) => { cat_local(local_id) | cat_arg(local_id) | cat_self(local_id) => {
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = self.tcx().region_map.get(local_id);
self.issue_loan(cmt, ty::re_scope(local_scope_id), req_mutbl, self.issue_loan(cmt, ty::re_scope(local_scope_id), req_mutbl,
owns_lent_data) owns_lent_data)
} }

View file

@ -109,7 +109,7 @@ impl PreserveCtxt {
// Maybe if we pass in the parent instead here, // Maybe if we pass in the parent instead here,
// we can prevent the "scope not found" error // we can prevent the "scope not found" error
debug!("scope_region thing: %? ", cmt.id); debug!("scope_region thing: %? ", cmt.id);
ty::re_scope(self.tcx().region_map.get(&cmt.id)) ty::re_scope(self.tcx().region_map.get(cmt.id))
}; };
self.compare_scope(cmt, scope_region) self.compare_scope(cmt, scope_region)
@ -129,14 +129,14 @@ impl PreserveCtxt {
cmt.span, cmt.span,
~"preserve() called with local and !root_managed_data"); ~"preserve() called with local and !root_managed_data");
} }
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = self.tcx().region_map.get(local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_binding(local_id) => { cat_binding(local_id) => {
// Bindings are these kind of weird implicit pointers (cc // Bindings are these kind of weird implicit pointers (cc
// #2329). We require (in gather_loans) that they be // #2329). We require (in gather_loans) that they be
// rooted in an immutable location. // rooted in an immutable location.
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = self.tcx().region_map.get(local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_arg(local_id) => { cat_arg(local_id) => {
@ -144,11 +144,11 @@ impl PreserveCtxt {
// modes). In that case, the caller guarantees stability // modes). In that case, the caller guarantees stability
// for at least the scope of the fn. This is basically a // for at least the scope of the fn. This is basically a
// deref of a region ptr. // deref of a region ptr.
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = self.tcx().region_map.get(local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_self(local_id) => { cat_self(local_id) => {
let local_scope_id = self.tcx().region_map.get(&local_id); let local_scope_id = self.tcx().region_map.get(local_id);
self.compare_scope(cmt, ty::re_scope(local_scope_id)) self.compare_scope(cmt, ty::re_scope(local_scope_id))
} }
cat_comp(cmt_base, comp_field(*)) | cat_comp(cmt_base, comp_field(*)) |

View file

@ -126,7 +126,7 @@ pub fn check_expr(sess: Session,
e.span, ~"paths in constants may only refer to \ e.span, ~"paths in constants may only refer to \
items without type parameters"); items without type parameters");
} }
match def_map.find(&e.id) { match def_map.find(e.id) {
Some(def_const(def_id)) | Some(def_const(def_id)) |
Some(def_fn(def_id, _)) | Some(def_fn(def_id, _)) |
Some(def_variant(_, def_id)) | Some(def_variant(_, def_id)) |
@ -151,7 +151,7 @@ pub fn check_expr(sess: Session,
} }
} }
expr_call(callee, _, false) => { expr_call(callee, _, false) => {
match def_map.find(&callee.id) { match def_map.find(callee.id) {
Some(def_struct(*)) => {} // OK. Some(def_struct(*)) => {} // OK.
Some(def_variant(*)) => {} // OK. Some(def_variant(*)) => {} // OK.
_ => { _ => {
@ -247,9 +247,9 @@ pub fn check_item_recursion(sess: Session,
fn visit_expr(e: @expr, &&env: env, v: visit::vt<env>) { fn visit_expr(e: @expr, &&env: env, v: visit::vt<env>) {
match e.node { match e.node {
expr_path(*) => { expr_path(*) => {
match env.def_map.find(&e.id) { match env.def_map.find(e.id) {
Some(def_const(def_id)) => { Some(def_const(def_id)) => {
match env.ast_map.get(&def_id.node) { match env.ast_map.get(def_id.node) {
ast_map::node_item(it, _) => { ast_map::node_item(it, _) => {
(v.visit_item)(it, env, v); (v.visit_item)(it, env, v);
} }

View file

@ -292,7 +292,7 @@ pub fn pat_ctor_id(cx: @MatchCheckCtxt, p: @pat) -> Option<ctor> {
match /*bad*/copy pat.node { match /*bad*/copy pat.node {
pat_wild => { None } pat_wild => { None }
pat_ident(_, _, _) | pat_enum(_, _) => { pat_ident(_, _, _) | pat_enum(_, _) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(pat.id) {
Some(def_variant(_, id)) => Some(variant(id)), Some(def_variant(_, id)) => Some(variant(id)),
Some(def_const(did)) => { Some(def_const(did)) => {
let const_expr = lookup_const_by_id(cx.tcx, did).get(); let const_expr = lookup_const_by_id(cx.tcx, did).get();
@ -306,7 +306,7 @@ pub fn pat_ctor_id(cx: @MatchCheckCtxt, p: @pat) -> Option<ctor> {
Some(range(eval_const_expr(cx.tcx, lo), eval_const_expr(cx.tcx, hi))) Some(range(eval_const_expr(cx.tcx, lo), eval_const_expr(cx.tcx, hi)))
} }
pat_struct(*) => { pat_struct(*) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(pat.id) {
Some(def_variant(_, id)) => Some(variant(id)), Some(def_variant(_, id)) => Some(variant(id)),
_ => Some(single) _ => Some(single)
} }
@ -329,7 +329,7 @@ pub fn is_wild(cx: @MatchCheckCtxt, p: @pat) -> bool {
match pat.node { match pat.node {
pat_wild => { true } pat_wild => { true }
pat_ident(_, _, _) => { pat_ident(_, _, _) => {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(pat.id) {
Some(def_variant(_, _)) | Some(def_const(*)) => { false } Some(def_variant(_, _)) | Some(def_const(*)) => { false }
_ => { true } _ => { true }
} }
@ -480,7 +480,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
pat_wild => Some(vec::append(vec::from_elem(arity, wild()), pat_wild => Some(vec::append(vec::from_elem(arity, wild()),
vec::tail(r))), vec::tail(r))),
pat_ident(_, _, _) => { pat_ident(_, _, _) => {
match cx.tcx.def_map.find(&pat_id) { match cx.tcx.def_map.find(pat_id) {
Some(def_variant(_, id)) => { Some(def_variant(_, id)) => {
if variant(id) == ctor_id { Some(vec::tail(r)) } if variant(id) == ctor_id { Some(vec::tail(r)) }
else { None } else { None }
@ -505,7 +505,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
} }
pat_enum(_, args) => { pat_enum(_, args) => {
match cx.tcx.def_map.get(&pat_id) { match cx.tcx.def_map.get(pat_id) {
def_variant(_, id) if variant(id) == ctor_id => { def_variant(_, id) if variant(id) == ctor_id => {
let args = match args { let args = match args {
Some(args) => args, Some(args) => args,
@ -541,7 +541,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
} }
pat_struct(_, ref flds, _) => { pat_struct(_, ref flds, _) => {
// Is this a struct or an enum variant? // Is this a struct or an enum variant?
match cx.tcx.def_map.get(&pat_id) { match cx.tcx.def_map.get(pat_id) {
def_variant(_, variant_id) => { def_variant(_, variant_id) => {
if variant(variant_id) == ctor_id { if variant(variant_id) == ctor_id {
// FIXME #4731: Is this right? --pcw // FIXME #4731: Is this right? --pcw
@ -678,7 +678,7 @@ pub fn check_fn(cx: @MatchCheckCtxt,
} }
pub fn is_refutable(cx: @MatchCheckCtxt, pat: &pat) -> bool { pub fn is_refutable(cx: @MatchCheckCtxt, pat: &pat) -> bool {
match cx.tcx.def_map.find(&pat.id) { match cx.tcx.def_map.find(pat.id) {
Some(def_variant(enum_id, _)) => { Some(def_variant(enum_id, _)) => {
if vec::len(*ty::enum_variants(cx.tcx, enum_id)) != 1u { if vec::len(*ty::enum_variants(cx.tcx, enum_id)) != 1u {
return true; return true;

View file

@ -78,7 +78,7 @@ pub fn classify(e: @expr,
tcx: ty::ctxt) tcx: ty::ctxt)
-> constness { -> constness {
let did = ast_util::local_def(e.id); let did = ast_util::local_def(e.id);
match tcx.ccache.find(&did) { match tcx.ccache.find(did) {
Some(x) => x, Some(x) => x,
None => { None => {
let cn = let cn =
@ -170,7 +170,7 @@ pub fn classify(e: @expr,
} }
pub fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> { pub fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> {
match tcx.def_map.find(&e.id) { match tcx.def_map.find(e.id) {
Some(ast::def_const(def_id)) => lookup_const_by_id(tcx, def_id), Some(ast::def_const(def_id)) => lookup_const_by_id(tcx, def_id),
_ => None _ => None
} }
@ -180,7 +180,7 @@ pub fn lookup_const_by_id(tcx: ty::ctxt,
def_id: ast::def_id) def_id: ast::def_id)
-> Option<@expr> { -> Option<@expr> {
if ast_util::is_local(def_id) { if ast_util::is_local(def_id) {
match tcx.items.find(&def_id.node) { match tcx.items.find(def_id.node) {
None => None, None => None,
Some(ast_map::node_item(it, _)) => match it.node { Some(ast_map::node_item(it, _)) => match it.node {
item_const(_, const_expr) => Some(const_expr), item_const(_, const_expr) => Some(const_expr),

View file

@ -58,7 +58,7 @@ fn collect_freevars(def_map: resolve::DefMap, blk: ast::blk)
} }
ast::expr_path(*) => { ast::expr_path(*) => {
let mut i = 0; let mut i = 0;
match def_map.find(&expr.id) { match def_map.find(expr.id) {
None => die!(~"path not found"), None => die!(~"path not found"),
Some(df) => { Some(df) => {
let mut def = df; let mut def = df;
@ -118,7 +118,7 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) ->
} }
pub fn get_freevars(tcx: ty::ctxt, fid: ast::node_id) -> freevar_info { pub fn get_freevars(tcx: ty::ctxt, fid: ast::node_id) -> freevar_info {
match tcx.freevars.find(&fid) { match tcx.freevars.find(fid) {
None => die!(~"get_freevars: " + int::str(fid) + ~" has no freevars"), None => die!(~"get_freevars: " + int::str(fid) + ~" has no freevars"),
Some(d) => return d Some(d) => return d
} }

View file

@ -195,10 +195,10 @@ pub fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
expr_unary(*)|expr_binary(*)|expr_method_call(*) => e.callee_id, expr_unary(*)|expr_binary(*)|expr_method_call(*) => e.callee_id,
_ => e.id _ => e.id
}; };
do option::iter(&cx.tcx.node_type_substs.find(&type_parameter_id)) |ts| { do option::iter(&cx.tcx.node_type_substs.find(type_parameter_id)) |ts| {
let bounds = match e.node { let bounds = match e.node {
expr_path(_) => { expr_path(_) => {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get(&e.id)); let did = ast_util::def_id_of_def(cx.tcx.def_map.get(e.id));
ty::lookup_item_type(cx.tcx, did).bounds ty::lookup_item_type(cx.tcx, did).bounds
} }
_ => { _ => {
@ -292,8 +292,8 @@ pub fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
fn check_ty(aty: @Ty, cx: ctx, v: visit::vt<ctx>) { fn check_ty(aty: @Ty, cx: ctx, v: visit::vt<ctx>) {
match aty.node { match aty.node {
ty_path(_, id) => { ty_path(_, id) => {
do option::iter(&cx.tcx.node_type_substs.find(&id)) |ts| { do option::iter(&cx.tcx.node_type_substs.find(id)) |ts| {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get(&id)); let did = ast_util::def_id_of_def(cx.tcx.def_map.get(id));
let bounds = ty::lookup_item_type(cx.tcx, did).bounds; let bounds = ty::lookup_item_type(cx.tcx, did).bounds;
for vec::each2(*ts, *bounds) |ty, bound| { for vec::each2(*ts, *bounds) |ty, bound| {
check_bounds(cx, aty.id, aty.span, *ty, *bound) check_bounds(cx, aty.id, aty.span, *ty, *bound)
@ -334,7 +334,7 @@ pub fn check_bounds(cx: ctx, id: node_id, sp: span,
fn is_nullary_variant(cx: ctx, ex: @expr) -> bool { fn is_nullary_variant(cx: ctx, ex: @expr) -> bool {
match ex.node { match ex.node {
expr_path(_) => { expr_path(_) => {
match cx.tcx.def_map.get(&ex.id) { match cx.tcx.def_map.get(ex.id) {
def_variant(edid, vdid) => { def_variant(edid, vdid) => {
vec::len(ty::enum_variant_with_id(cx.tcx, edid, vdid).args) == 0u vec::len(ty::enum_variant_with_id(cx.tcx, edid, vdid).args) == 0u
} }

View file

@ -352,7 +352,7 @@ impl LanguageItemCollector {
return; // Didn't match. return; // Didn't match.
} }
match self.item_refs.find(&value) { match self.item_refs.find(/*bad*/copy value) {
None => { None => {
// Didn't match. // Didn't match.
} }

View file

@ -265,7 +265,7 @@ pub fn get_lint_settings_level(settings: lint_settings,
_expr_id: ast::node_id, _expr_id: ast::node_id,
item_id: ast::node_id) item_id: ast::node_id)
-> level { -> level {
match settings.settings_map.find(&item_id) { match settings.settings_map.find(item_id) {
Some(modes) => get_lint_level(modes, lint_mode), Some(modes) => get_lint_level(modes, lint_mode),
None => get_lint_level(settings.default_settings, lint_mode) None => get_lint_level(settings.default_settings, lint_mode)
} }
@ -346,7 +346,7 @@ impl ctxt {
for triples.each |pair| { for triples.each |pair| {
let (meta, level, lintname) = /*bad*/copy *pair; let (meta, level, lintname) = /*bad*/copy *pair;
match self.dict.find(&lintname) { match self.dict.find(/*bad*/ copy lintname) {
None => { None => {
self.span_lint( self.span_lint(
new_ctxt.get_level(unrecognized_lint), new_ctxt.get_level(unrecognized_lint),
@ -684,7 +684,7 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) {
for vec::each(vec::append_one(tys, decl.output)) |ty| { for vec::each(vec::append_one(tys, decl.output)) |ty| {
match ty.node { match ty.node {
ast::ty_path(_, id) => { ast::ty_path(_, id) => {
match cx.def_map.get(&id) { match cx.def_map.get(id) {
ast::def_prim_ty(ast::ty_int(ast::ty_i)) => { ast::def_prim_ty(ast::ty_int(ast::ty_i)) => {
cx.sess.span_lint( cx.sess.span_lint(
ctypes, id, fn_id, ctypes, id, fn_id,

View file

@ -368,7 +368,7 @@ impl IrMaps {
} }
fn variable(node_id: node_id, span: span) -> Variable { fn variable(node_id: node_id, span: span) -> Variable {
match self.variable_map.find(&node_id) { match self.variable_map.find(node_id) {
Some(var) => var, Some(var) => var,
None => { None => {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
@ -390,7 +390,7 @@ impl IrMaps {
} }
fn captures(expr: @expr) -> @~[CaptureInfo] { fn captures(expr: @expr) -> @~[CaptureInfo] {
match self.capture_info_map.find(&expr.id) { match self.capture_info_map.find(expr.id) {
Some(caps) => caps, Some(caps) => caps,
None => { None => {
self.tcx.sess.span_bug(expr.span, ~"no registered caps"); self.tcx.sess.span_bug(expr.span, ~"no registered caps");
@ -410,7 +410,7 @@ impl IrMaps {
Local(LocalInfo {id: id, kind: FromLetNoInitializer, _}) | Local(LocalInfo {id: id, kind: FromLetNoInitializer, _}) |
Local(LocalInfo {id: id, kind: FromLetWithInitializer, _}) | Local(LocalInfo {id: id, kind: FromLetWithInitializer, _}) |
Local(LocalInfo {id: id, kind: FromMatch(_), _}) => { Local(LocalInfo {id: id, kind: FromMatch(_), _}) => {
let v = match self.last_use_map.find(&expr_id) { let v = match self.last_use_map.find(expr_id) {
Some(v) => v, Some(v) => v,
None => { None => {
let v = @DVec(); let v = @DVec();
@ -552,7 +552,7 @@ fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
match expr.node { match expr.node {
// live nodes required for uses or definitions of variables: // live nodes required for uses or definitions of variables:
expr_path(_) => { expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = self.tcx.def_map.get(expr.id);
debug!("expr %d: path that leads to %?", expr.id, def); debug!("expr %d: path that leads to %?", expr.id, def);
if relevant_def(def).is_some() { if relevant_def(def).is_some() {
self.add_live_node_for_node(expr.id, ExprNode(expr.span)); self.add_live_node_for_node(expr.id, ExprNode(expr.span));
@ -569,7 +569,7 @@ fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
// being the location that the variable is used. This results // being the location that the variable is used. This results
// in better error messages than just pointing at the closure // in better error messages than just pointing at the closure
// construction site. // construction site.
let cvs = self.capture_map.get(&expr.id); let cvs = self.capture_map.get(expr.id);
let mut call_caps = ~[]; let mut call_caps = ~[];
for cvs.each |cv| { for cvs.each |cv| {
match relevant_def(cv.def) { match relevant_def(cv.def) {
@ -685,7 +685,7 @@ fn Liveness(ir: @IrMaps, specials: Specials) -> Liveness {
impl Liveness { impl Liveness {
fn live_node(node_id: node_id, span: span) -> LiveNode { fn live_node(node_id: node_id, span: span) -> LiveNode {
match self.ir.live_node_map.find(&node_id) { match self.ir.live_node_map.find(node_id) {
Some(ln) => ln, Some(ln) => ln,
None => { None => {
// This must be a mismatch between the ir_map construction // This must be a mismatch between the ir_map construction
@ -702,7 +702,7 @@ impl Liveness {
fn variable_from_path(expr: @expr) -> Option<Variable> { fn variable_from_path(expr: @expr) -> Option<Variable> {
match expr.node { match expr.node {
expr_path(_) => { expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = self.tcx.def_map.get(expr.id);
relevant_def(def).map( relevant_def(def).map(
|rdef| self.variable(*rdef, expr.span) |rdef| self.variable(*rdef, expr.span)
) )
@ -717,7 +717,7 @@ impl Liveness {
fn variable_from_def_map(node_id: node_id, fn variable_from_def_map(node_id: node_id,
span: span) -> Option<Variable> { span: span) -> Option<Variable> {
match self.tcx.def_map.find(&node_id) { match self.tcx.def_map.find(node_id) {
Some(def) => { Some(def) => {
relevant_def(def).map( relevant_def(def).map(
|rdef| self.variable(*rdef, span) |rdef| self.variable(*rdef, span)
@ -837,7 +837,7 @@ impl Liveness {
match opt_label { match opt_label {
Some(_) => // Refers to a labeled loop. Use the results of resolve Some(_) => // Refers to a labeled loop. Use the results of resolve
// to find with one // to find with one
match self.tcx.def_map.find(&id) { match self.tcx.def_map.find(id) {
Some(def_label(loop_id)) => loop_id, Some(def_label(loop_id)) => loop_id,
_ => self.tcx.sess.span_bug(sp, ~"Label on break/loop \ _ => self.tcx.sess.span_bug(sp, ~"Label on break/loop \
doesn't refer to a loop") doesn't refer to a loop")
@ -1203,7 +1203,7 @@ impl Liveness {
// Now that we know the label we're going to, // Now that we know the label we're going to,
// look it up in the break loop nodes table // look it up in the break loop nodes table
match self.break_ln.find(&sc) { match self.break_ln.find(sc) {
Some(b) => b, Some(b) => b,
None => self.tcx.sess.span_bug(expr.span, None => self.tcx.sess.span_bug(expr.span,
~"Break to unknown label") ~"Break to unknown label")
@ -1217,7 +1217,7 @@ impl Liveness {
// Now that we know the label we're going to, // Now that we know the label we're going to,
// look it up in the continue loop nodes table // look it up in the continue loop nodes table
match self.cont_ln.find(&sc) { match self.cont_ln.find(sc) {
Some(b) => b, Some(b) => b,
None => self.tcx.sess.span_bug(expr.span, None => self.tcx.sess.span_bug(expr.span,
~"Loop to unknown label") ~"Loop to unknown label")
@ -1424,7 +1424,7 @@ impl Liveness {
} }
fn access_path(expr: @expr, succ: LiveNode, acc: uint) -> LiveNode { fn access_path(expr: @expr, succ: LiveNode, acc: uint) -> LiveNode {
let def = self.tcx.def_map.get(&expr.id); let def = self.tcx.def_map.get(expr.id);
match relevant_def(def) { match relevant_def(def) {
Some(nid) => { Some(nid) => {
let ln = self.live_node(expr.id, expr.span); let ln = self.live_node(expr.id, expr.span);
@ -1560,7 +1560,7 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) {
let ln = self.live_node(expr.id, expr.span); let ln = self.live_node(expr.id, expr.span);
self.consider_last_use(expr, ln, *var); self.consider_last_use(expr, ln, *var);
match self.ir.variable_moves_map.find(&expr.id) { match self.ir.variable_moves_map.find(expr.id) {
None => {} None => {}
Some(entire_expr) => { Some(entire_expr) => {
debug!("(checking expr) is a move: `%s`", debug!("(checking expr) is a move: `%s`",
@ -1689,7 +1689,7 @@ impl @Liveness {
fn check_lvalue(expr: @expr, vt: vt<@Liveness>) { fn check_lvalue(expr: @expr, vt: vt<@Liveness>) {
match expr.node { match expr.node {
expr_path(_) => { expr_path(_) => {
match self.tcx.def_map.get(&expr.id) { match self.tcx.def_map.get(expr.id) {
def_local(nid, false) => { def_local(nid, false) => {
// Assignment to an immutable variable or argument: // Assignment to an immutable variable or argument:
// only legal if there is no later assignment. // only legal if there is no later assignment.

View file

@ -308,7 +308,7 @@ pub struct mem_categorization_ctxt {
pub impl &mem_categorization_ctxt { pub impl &mem_categorization_ctxt {
fn cat_expr(expr: @ast::expr) -> cmt { fn cat_expr(expr: @ast::expr) -> cmt {
match self.tcx.adjustments.find(&expr.id) { match self.tcx.adjustments.find(expr.id) {
None => { None => {
// No adjustments. // No adjustments.
self.cat_expr_unadjusted(expr) self.cat_expr_unadjusted(expr)
@ -375,7 +375,7 @@ pub impl &mem_categorization_ctxt {
} }
ast::expr_path(_) => { ast::expr_path(_) => {
let def = self.tcx.def_map.get(&expr.id); let def = self.tcx.def_map.get(expr.id);
self.cat_def(expr.id, expr.span, expr_ty, def) self.cat_def(expr.id, expr.span, expr_ty, def)
} }
@ -840,7 +840,7 @@ pub impl &mem_categorization_ctxt {
// variant(*) // variant(*)
} }
ast::pat_enum(_, Some(ref subpats)) => { ast::pat_enum(_, Some(ref subpats)) => {
match self.tcx.def_map.find(&pat.id) { match self.tcx.def_map.find(pat.id) {
Some(ast::def_variant(enum_did, _)) => { Some(ast::def_variant(enum_did, _)) => {
// variant(x, y, z) // variant(x, y, z)
for subpats.each |subpat| { for subpats.each |subpat| {
@ -1063,7 +1063,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
} }
} }
ty::ty_enum(*) => { ty::ty_enum(*) => {
match tcx.def_map.get(&node_id) { match tcx.def_map.get(node_id) {
ast::def_variant(_, variant_id) => { ast::def_variant(_, variant_id) => {
for ty::lookup_struct_fields(tcx, variant_id).each |fld| { for ty::lookup_struct_fields(tcx, variant_id).each |fld| {
if fld.ident == f_name { if fld.ident == f_name {

View file

@ -409,7 +409,7 @@ impl VisitContext {
// `expr_mode` refers to the post-adjustment value. If one of // `expr_mode` refers to the post-adjustment value. If one of
// those adjustments is to take a reference, then it's only // those adjustments is to take a reference, then it's only
// reading the underlying expression, not moving it. // reading the underlying expression, not moving it.
let comp_mode = match self.tcx.adjustments.find(&expr.id) { let comp_mode = match self.tcx.adjustments.find(expr.id) {
Some(adj) if adj.autoref.is_some() => Read, Some(adj) if adj.autoref.is_some() => Read,
_ => expr_mode.component_mode(expr) _ => expr_mode.component_mode(expr)
}; };
@ -713,7 +713,7 @@ impl VisitContext {
receiver_expr: @expr, receiver_expr: @expr,
visitor: vt<VisitContext>) visitor: vt<VisitContext>)
{ {
let callee_mode = match self.method_map.find(&expr_id) { let callee_mode = match self.method_map.find(expr_id) {
Some(ref method_map_entry) => { Some(ref method_map_entry) => {
match method_map_entry.explicit_self { match method_map_entry.explicit_self {
sty_by_ref => by_ref, sty_by_ref => by_ref,

View file

@ -35,7 +35,7 @@ pub fn pat_id_map(dm: resolve::DefMap, pat: @pat) -> PatIdMap {
pub fn pat_is_variant_or_struct(dm: resolve::DefMap, pat: @pat) -> bool { pub fn pat_is_variant_or_struct(dm: resolve::DefMap, pat: @pat) -> bool {
match pat.node { match pat.node {
pat_enum(_, _) | pat_ident(_, _, None) | pat_struct(*) => { pat_enum(_, _) | pat_ident(_, _, None) | pat_struct(*) => {
match dm.find(&pat.id) { match dm.find(pat.id) {
Some(def_variant(*)) | Some(def_struct(*)) => true, Some(def_variant(*)) | Some(def_struct(*)) => true,
_ => false _ => false
} }
@ -47,7 +47,7 @@ pub fn pat_is_variant_or_struct(dm: resolve::DefMap, pat: @pat) -> bool {
pub fn pat_is_const(dm: resolve::DefMap, pat: &pat) -> bool { pub fn pat_is_const(dm: resolve::DefMap, pat: &pat) -> bool {
match pat.node { match pat.node {
pat_ident(_, _, None) => { pat_ident(_, _, None) => {
match dm.find(&pat.id) { match dm.find(pat.id) {
Some(def_const(*)) => true, Some(def_const(*)) => true,
_ => false _ => false
} }

View file

@ -105,7 +105,7 @@ pub fn check_crate(tcx: ty::ctxt,
match *origin { match *origin {
method_static(method_id) => { method_static(method_id) => {
if method_id.crate == local_crate { if method_id.crate == local_crate {
match tcx.items.find(&method_id.node) { match tcx.items.find(method_id.node) {
Some(node_method(method, impl_id, _)) => { Some(node_method(method, impl_id, _)) => {
if method.vis == private && if method.vis == private &&
(impl_id.crate != local_crate || (impl_id.crate != local_crate ||
@ -143,7 +143,7 @@ pub fn check_crate(tcx: ty::ctxt,
method_self(trait_id, method_num) | method_self(trait_id, method_num) |
method_super(trait_id, method_num) => { method_super(trait_id, method_num) => {
if trait_id.crate == local_crate { if trait_id.crate == local_crate {
match tcx.items.find(&trait_id.node) { match tcx.items.find(trait_id.node) {
Some(node_item(item, _)) => { Some(node_item(item, _)) => {
match item.node { match item.node {
item_trait(_, _, ref methods) => { item_trait(_, _, ref methods) => {
@ -221,7 +221,7 @@ pub fn check_crate(tcx: ty::ctxt,
ty_struct(id, _) ty_struct(id, _)
if id.crate != local_crate || if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) => { !privileged_items.contains(&(id.node)) => {
match method_map.find(&expr.id) { match method_map.find(expr.id) {
None => { None => {
debug!("(privacy checking) checking \ debug!("(privacy checking) checking \
field access"); field access");
@ -244,7 +244,7 @@ pub fn check_crate(tcx: ty::ctxt,
ty_struct(id, _) ty_struct(id, _)
if id.crate != local_crate || if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) => { !privileged_items.contains(&(id.node)) => {
match method_map.find(&expr.id) { match method_map.find(expr.id) {
None => { None => {
tcx.sess.span_bug(expr.span, tcx.sess.span_bug(expr.span,
~"method call not in \ ~"method call not in \
@ -276,7 +276,7 @@ pub fn check_crate(tcx: ty::ctxt,
ty_enum(id, _) => { ty_enum(id, _) => {
if id.crate != local_crate || if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) { !privileged_items.contains(&(id.node)) {
match tcx.def_map.get(&expr.id) { match tcx.def_map.get(expr.id) {
def_variant(_, variant_id) => { def_variant(_, variant_id) => {
for (*fields).each |field| { for (*fields).each |field| {
debug!("(privacy checking) \ debug!("(privacy checking) \
@ -343,7 +343,7 @@ pub fn check_crate(tcx: ty::ctxt,
if enum_id.crate != local_crate || if enum_id.crate != local_crate ||
!privileged_items.contains( !privileged_items.contains(
&enum_id.node) { &enum_id.node) {
match tcx.def_map.find(&pattern.id) { match tcx.def_map.find(pattern.id) {
Some(def_variant(_, variant_id)) => { Some(def_variant(_, variant_id)) => {
for fields.each |field| { for fields.each |field| {
debug!("(privacy checking) \ debug!("(privacy checking) \

View file

@ -115,7 +115,7 @@ pub fn scope_contains(region_map: region_map, superscope: ast::node_id,
subscope: ast::node_id) -> bool { subscope: ast::node_id) -> bool {
let mut subscope = subscope; let mut subscope = subscope;
while superscope != subscope { while superscope != subscope {
match region_map.find(&subscope) { match region_map.find(subscope) {
None => return false, None => return false,
Some(scope) => subscope = scope Some(scope) => subscope = scope
} }
@ -159,7 +159,7 @@ pub fn nearest_common_ancestor(region_map: region_map,
let mut result = ~[scope]; let mut result = ~[scope];
let mut scope = scope; let mut scope = scope;
loop { loop {
match region_map.find(&scope) { match region_map.find(scope) {
None => return result, None => return result,
Some(superscope) => { Some(superscope) => {
result.push(superscope); result.push(superscope);
@ -237,7 +237,7 @@ pub fn resolve_arm(arm: ast::arm, cx: ctxt, visitor: visit::vt<ctxt>) {
pub fn resolve_pat(pat: @ast::pat, cx: ctxt, visitor: visit::vt<ctxt>) { pub fn resolve_pat(pat: @ast::pat, cx: ctxt, visitor: visit::vt<ctxt>) {
match pat.node { match pat.node {
ast::pat_ident(*) => { ast::pat_ident(*) => {
let defn_opt = cx.def_map.find(&pat.id); let defn_opt = cx.def_map.find(pat.id);
match defn_opt { match defn_opt {
Some(ast::def_variant(_,_)) => { Some(ast::def_variant(_,_)) => {
/* Nothing to do; this names a variant. */ /* Nothing to do; this names a variant. */
@ -475,7 +475,7 @@ pub impl determine_rp_ctxt {
/// the new variance is joined with the old variance. /// the new variance is joined with the old variance.
fn add_rp(id: ast::node_id, variance: region_variance) { fn add_rp(id: ast::node_id, variance: region_variance) {
assert id != 0; assert id != 0;
let old_variance = self.region_paramd_items.find(&id); let old_variance = self.region_paramd_items.find(id);
let joined_variance = match old_variance { let joined_variance = match old_variance {
None => variance, None => variance,
Some(v) => join_variance(v, variance) Some(v) => join_variance(v, variance)
@ -505,7 +505,7 @@ pub impl determine_rp_ctxt {
ast_map::node_id_to_str(self.ast_map, self.item_id, ast_map::node_id_to_str(self.ast_map, self.item_id,
self.sess.parse_sess.interner), self.sess.parse_sess.interner),
copy self.ambient_variance); copy self.ambient_variance);
let vec = match self.dep_map.find(&from) { let vec = match self.dep_map.find(from) {
Some(vec) => vec, Some(vec) => vec,
None => { None => {
let vec = @DVec(); let vec = @DVec();
@ -685,7 +685,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
// that as a direct dependency. // that as a direct dependency.
match ty.node { match ty.node {
ast::ty_path(path, id) => { ast::ty_path(path, id) => {
match cx.def_map.find(&id) { match cx.def_map.find(id) {
Some(ast::def_ty(did)) | Some(ast::def_struct(did)) => { Some(ast::def_ty(did)) | Some(ast::def_struct(did)) => {
if did.crate == ast::local_crate { if did.crate == ast::local_crate {
if cx.opt_region_is_relevant(path.rp) { if cx.opt_region_is_relevant(path.rp) {
@ -818,9 +818,9 @@ pub fn determine_rp_in_crate(sess: Session,
// update the region-parameterization of D to reflect the result. // update the region-parameterization of D to reflect the result.
while cx.worklist.len() != 0 { while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop(); let c_id = cx.worklist.pop();
let c_variance = cx.region_paramd_items.get(&c_id); let c_variance = cx.region_paramd_items.get(c_id);
debug!("popped %d from worklist", c_id); debug!("popped %d from worklist", c_id);
match cx.dep_map.find(&c_id) { match cx.dep_map.find(c_id) {
None => {} None => {}
Some(deps) => { Some(deps) => {
for deps.each |dep| { for deps.each |dep| {

View file

@ -947,7 +947,7 @@ pub impl Resolver {
// Add or reuse the child. // Add or reuse the child.
let new_parent = ModuleReducedGraphParent(module_); let new_parent = ModuleReducedGraphParent(module_);
match module_.children.find(&name) { match module_.children.find(name) {
None => { None => {
let child = @NameBindings(); let child = @NameBindings();
module_.children.insert(name, child); module_.children.insert(name, child);
@ -1558,7 +1558,7 @@ pub impl Resolver {
%s", final_ident); %s", final_ident);
let parent_link = self.get_parent_link(new_parent, ident); let parent_link = self.get_parent_link(new_parent, ident);
match modules.find(&def_id) { match modules.find(def_id) {
None => { None => {
child_name_bindings.define_module(Public, child_name_bindings.define_module(Public,
parent_link, parent_link,
@ -1582,8 +1582,7 @@ pub impl Resolver {
die!(~"can't happen"); die!(~"can't happen");
} }
ModuleParentLink(parent_module, ident) => { ModuleParentLink(parent_module, ident) => {
let name_bindings = parent_module.children.get( let name_bindings = parent_module.children.get(ident);
&ident);
resolution.type_target = resolution.type_target =
Some(Target(parent_module, name_bindings)); Some(Target(parent_module, name_bindings));
} }
@ -1842,7 +1841,7 @@ pub impl Resolver {
self.idents_to_str(module_path.get()), self.idents_to_str(module_path.get()),
self.session.str_of(target)); self.session.str_of(target));
match module_.import_resolutions.find(&target) { match module_.import_resolutions.find(target) {
Some(resolution) => { Some(resolution) => {
debug!("(building import directive) bumping \ debug!("(building import directive) bumping \
reference"); reference");
@ -2113,7 +2112,7 @@ pub impl Resolver {
let mut type_result = UnknownResult; let mut type_result = UnknownResult;
// Search for direct children of the containing module. // Search for direct children of the containing module.
match containing_module.children.find(&source) { match containing_module.children.find(source) {
None => { None => {
// Continue. // Continue.
} }
@ -2149,7 +2148,7 @@ pub impl Resolver {
// Now search the exported imports within the containing // Now search the exported imports within the containing
// module. // module.
match containing_module.import_resolutions.find(&source) { match containing_module.import_resolutions.find(source) {
None => { None => {
// The containing module definitely doesn't have an // The containing module definitely doesn't have an
// exported import with the name in question. We can // exported import with the name in question. We can
@ -2213,7 +2212,7 @@ pub impl Resolver {
// We've successfully resolved the import. Write the results in. // We've successfully resolved the import. Write the results in.
assert module_.import_resolutions.contains_key_ref(&target); assert module_.import_resolutions.contains_key_ref(&target);
let import_resolution = module_.import_resolutions.get(&target); let import_resolution = module_.import_resolutions.get(target);
match value_result { match value_result {
BoundResult(target_module, name_bindings) => { BoundResult(target_module, name_bindings) => {
@ -2298,7 +2297,7 @@ pub impl Resolver {
let mut module_result = UnknownResult; let mut module_result = UnknownResult;
// Search for direct children of the containing module. // Search for direct children of the containing module.
match containing_module.children.find(&source) { match containing_module.children.find(source) {
None => { None => {
// Continue. // Continue.
} }
@ -2328,7 +2327,7 @@ pub impl Resolver {
// Now search the exported imports within the containing // Now search the exported imports within the containing
// module. // module.
match containing_module.import_resolutions.find(&source) { match containing_module.import_resolutions.find(source) {
None => { None => {
// The containing module definitely doesn't have an // The containing module definitely doesn't have an
// exported import with the name in question. We can // exported import with the name in question. We can
@ -2372,7 +2371,7 @@ pub impl Resolver {
// We've successfully resolved the import. Write the results in. // We've successfully resolved the import. Write the results in.
assert module_.import_resolutions.contains_key_ref(&target); assert module_.import_resolutions.contains_key_ref(&target);
let import_resolution = module_.import_resolutions.get(&target); let import_resolution = module_.import_resolutions.get(target);
match module_result { match module_result {
BoundResult(target_module, name_bindings) => { BoundResult(target_module, name_bindings) => {
@ -2440,7 +2439,7 @@ pub impl Resolver {
self.module_to_str(module_)); self.module_to_str(module_));
// Here we merge two import resolutions. // Here we merge two import resolutions.
match module_.import_resolutions.find(&ident) { match module_.import_resolutions.find(ident) {
None => { None => {
// Simple: just copy the old import resolution. // Simple: just copy the old import resolution.
let new_import_resolution = let new_import_resolution =
@ -2483,7 +2482,7 @@ pub impl Resolver {
// Add all children from the containing module. // Add all children from the containing module.
for containing_module.children.each_ref |&ident, &name_bindings| { for containing_module.children.each_ref |&ident, &name_bindings| {
let mut dest_import_resolution; let mut dest_import_resolution;
match module_.import_resolutions.find(&ident) { match module_.import_resolutions.find(ident) {
None => { None => {
// Create a new import resolution from this child. // Create a new import resolution from this child.
dest_import_resolution = @ImportResolution(privacy, span); dest_import_resolution = @ImportResolution(privacy, span);
@ -2687,7 +2686,7 @@ pub impl Resolver {
// The current module node is handled specially. First, check for // The current module node is handled specially. First, check for
// its immediate children. // its immediate children.
match module_.children.find(&name) { match module_.children.find(name) {
Some(name_bindings) Some(name_bindings)
if (*name_bindings).defined_in_namespace(namespace) => { if (*name_bindings).defined_in_namespace(namespace) => {
return Success(Target(module_, name_bindings)); return Success(Target(module_, name_bindings));
@ -2700,7 +2699,7 @@ pub impl Resolver {
// adjacent import statements are processed as though they mutated the // adjacent import statements are processed as though they mutated the
// current scope. // current scope.
match module_.import_resolutions.find(&name) { match module_.import_resolutions.find(name) {
None => { None => {
// Not found; continue. // Not found; continue.
} }
@ -2924,7 +2923,7 @@ pub impl Resolver {
self.module_to_str(module_)); self.module_to_str(module_));
// First, check the direct children of the module. // First, check the direct children of the module.
match module_.children.find(&name) { match module_.children.find(name) {
Some(name_bindings) Some(name_bindings)
if (*name_bindings).defined_in_namespace(namespace) => { if (*name_bindings).defined_in_namespace(namespace) => {
@ -2945,7 +2944,7 @@ pub impl Resolver {
} }
// Otherwise, we check the list of resolved imports. // Otherwise, we check the list of resolved imports.
match module_.import_resolutions.find(&name) { match module_.import_resolutions.find(name) {
Some(import_resolution) => { Some(import_resolution) => {
if import_resolution.outstanding_references != 0 { if import_resolution.outstanding_references != 0 {
debug!("(resolving name in module) import unresolved; \ debug!("(resolving name in module) import unresolved; \
@ -3115,7 +3114,7 @@ pub impl Resolver {
} }
// Otherwise, proceed and write in the bindings. // Otherwise, proceed and write in the bindings.
match module_.import_resolutions.find(&target_name) { match module_.import_resolutions.find(target_name) {
None => { None => {
die!(~"(resolving one-level renaming import) reduced graph \ die!(~"(resolving one-level renaming import) reduced graph \
construction or glob importing should have created the \ construction or glob importing should have created the \
@ -3329,7 +3328,7 @@ pub impl Resolver {
// Nothing to do. // Nothing to do.
} }
Some(name) => { Some(name) => {
match orig_module.children.find(&name) { match orig_module.children.find(name) {
None => { None => {
debug!("!!! (with scope) didn't find `%s` in `%s`", debug!("!!! (with scope) didn't find `%s` in `%s`",
self.session.str_of(name), self.session.str_of(name),
@ -3406,7 +3405,7 @@ pub impl Resolver {
// If the def is a ty param, and came from the parent // If the def is a ty param, and came from the parent
// item, it's ok // item, it's ok
match def { match def {
def_ty_param(did, _) if self.def_map.find(&did.node) def_ty_param(did, _) if self.def_map.find(copy(did.node))
== Some(def_typaram_binder(item_id)) => { == Some(def_typaram_binder(item_id)) => {
// ok // ok
} }
@ -3478,7 +3477,7 @@ pub impl Resolver {
while i != 0 { while i != 0 {
i -= 1; i -= 1;
let rib = (*ribs).get_elt(i); let rib = (*ribs).get_elt(i);
match rib.bindings.find(&name) { match rib.bindings.find(name) {
Some(def_like) => { Some(def_like) => {
return self.upvarify(ribs, i, def_like, span, return self.upvarify(ribs, i, def_like, span,
allow_capturing_self); allow_capturing_self);
@ -4070,7 +4069,7 @@ pub impl Resolver {
let map_i = self.binding_mode_map(*p); let map_i = self.binding_mode_map(*p);
for map_0.each_ref |&key, &binding_0| { for map_0.each_ref |&key, &binding_0| {
match map_i.find(&key) { match map_i.find(key) {
None => { None => {
self.session.span_err( self.session.span_err(
p.span, p.span,
@ -4127,7 +4126,7 @@ pub impl Resolver {
// Move down in the graph, if there's an anonymous module rooted here. // Move down in the graph, if there's an anonymous module rooted here.
let orig_module = self.current_module; let orig_module = self.current_module;
match self.current_module.anonymous_children.find(&block.node.id) { match self.current_module.anonymous_children.find(block.node.id) {
None => { /* Nothing to do. */ } None => { /* Nothing to do. */ }
Some(anonymous_module) => { Some(anonymous_module) => {
debug!("(resolving block) found anonymous module, moving \ debug!("(resolving block) found anonymous module, moving \
@ -4162,7 +4161,7 @@ pub impl Resolver {
match self.primitive_type_table match self.primitive_type_table
.primitive_types .primitive_types
.find(&name) { .find(name) {
Some(primitive_type) => { Some(primitive_type) => {
result_def = result_def =
@ -4328,7 +4327,7 @@ pub impl Resolver {
bindings_list.insert(ident, pat_id); bindings_list.insert(ident, pat_id);
} }
Some(b) => { Some(b) => {
if b.find(&ident) == Some(pat_id) { if b.find(ident) == Some(pat_id) {
// Then this is a duplicate variable // Then this is a duplicate variable
// in the same disjunct, which is an // in the same disjunct, which is an
// error // error
@ -4527,7 +4526,7 @@ pub impl Resolver {
-> NameDefinition { -> NameDefinition {
// First, search children. // First, search children.
match containing_module.children.find(&name) { match containing_module.children.find(name) {
Some(child_name_bindings) => { Some(child_name_bindings) => {
match (child_name_bindings.def_for_namespace(namespace), match (child_name_bindings.def_for_namespace(namespace),
child_name_bindings.privacy_for_namespace(namespace)) { child_name_bindings.privacy_for_namespace(namespace)) {
@ -4550,7 +4549,7 @@ pub impl Resolver {
} }
// Next, search import resolutions. // Next, search import resolutions.
match containing_module.import_resolutions.find(&name) { match containing_module.import_resolutions.find(name) {
Some(import_resolution) if import_resolution.privacy == Public || Some(import_resolution) if import_resolution.privacy == Public ||
xray == Xray => { xray == Xray => {
match (*import_resolution).target_for_namespace(namespace) { match (*import_resolution).target_for_namespace(namespace) {
@ -5077,7 +5076,7 @@ pub impl Resolver {
trait_def_id.node, trait_def_id.node,
self.session.str_of(name)); self.session.str_of(name));
match self.trait_info.find(&trait_def_id) { match self.trait_info.find(trait_def_id) {
Some(trait_info) if trait_info.contains_key_ref(&name) => { Some(trait_info) if trait_info.contains_key_ref(&name) => {
debug!("(adding trait info if containing method) found trait \ debug!("(adding trait info if containing method) found trait \
%d:%d for method '%s'", %d:%d for method '%s'",

View file

@ -282,7 +282,7 @@ pub fn trans_opt(bcx: block, o: &Opt) -> opt_result {
} }
pub fn variant_opt(tcx: ty::ctxt, pat_id: ast::node_id) -> Opt { pub fn variant_opt(tcx: ty::ctxt, pat_id: ast::node_id) -> Opt {
match tcx.def_map.get(&pat_id) { match tcx.def_map.get(pat_id) {
ast::def_variant(enum_id, var_id) => { ast::def_variant(enum_id, var_id) => {
let variants = ty::enum_variants(tcx, enum_id); let variants = ty::enum_variants(tcx, enum_id);
for vec::each(*variants) |v| { for vec::each(*variants) |v| {
@ -377,7 +377,7 @@ pub fn expand_nested_bindings(bcx: block, m: &[@Match/&r],
vec::view(br.pats, col + 1u, br.pats.len()))); vec::view(br.pats, col + 1u, br.pats.len())));
let binding_info = let binding_info =
br.data.bindings_map.get(&path_to_ident(path)); br.data.bindings_map.get(path_to_ident(path));
Store(bcx, val, binding_info.llmatch); Store(bcx, val, binding_info.llmatch);
@Match {pats: pats, data: br.data} @Match {pats: pats, data: br.data}
@ -424,8 +424,7 @@ pub fn enter_match(bcx: block, dm: DefMap, m: &[@Match/&r],
ast::pat_ident(_, path, None) => { ast::pat_ident(_, path, None) => {
if pat_is_binding(dm, self) { if pat_is_binding(dm, self) {
let binding_info = let binding_info =
br.data.bindings_map.get( br.data.bindings_map.get(path_to_ident(path));
&path_to_ident(path));
Store(bcx, val, binding_info.llmatch); Store(bcx, val, binding_info.llmatch);
} }
} }
@ -519,7 +518,7 @@ pub fn enter_opt(bcx: block, m: &[@Match/&r], opt: &Opt, col: uint,
} }
} }
ast::pat_ident(_, _, None) if pat_is_const(tcx.def_map, p) => { ast::pat_ident(_, _, None) if pat_is_const(tcx.def_map, p) => {
let const_def = tcx.def_map.get(&p.id); let const_def = tcx.def_map.get(p.id);
let const_def_id = ast_util::def_id_of_def(const_def); let const_def_id = ast_util::def_id_of_def(const_def);
if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) { if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) {
Some(~[]) Some(~[])
@ -537,7 +536,7 @@ pub fn enter_opt(bcx: block, m: &[@Match/&r], opt: &Opt, col: uint,
if opt_eq(tcx, &variant_opt(tcx, p.id), opt) { if opt_eq(tcx, &variant_opt(tcx, p.id), opt) {
// Look up the struct variant ID. // Look up the struct variant ID.
let struct_id; let struct_id;
match tcx.def_map.get(&p.id) { match tcx.def_map.get(p.id) {
ast::def_variant(_, found_struct_id) => { ast::def_variant(_, found_struct_id) => {
struct_id = found_struct_id; struct_id = found_struct_id;
} }
@ -775,7 +774,7 @@ pub fn get_options(ccx: @crate_ctxt, m: &[@Match], col: uint) -> ~[Opt] {
ast::pat_ident(*) => { ast::pat_ident(*) => {
// This is one of: an enum variant, a unit-like struct, or a // This is one of: an enum variant, a unit-like struct, or a
// variable binding. // variable binding.
match ccx.tcx.def_map.find(&cur.id) { match ccx.tcx.def_map.find(cur.id) {
Some(ast::def_variant(*)) => { Some(ast::def_variant(*)) => {
add_to_set(ccx.tcx, &found, add_to_set(ccx.tcx, &found,
variant_opt(ccx.tcx, cur.id)); variant_opt(ccx.tcx, cur.id));
@ -794,7 +793,7 @@ pub fn get_options(ccx: @crate_ctxt, m: &[@Match], col: uint) -> ~[Opt] {
ast::pat_enum(*) | ast::pat_struct(*) => { ast::pat_enum(*) | ast::pat_struct(*) => {
// This could be one of: a tuple-like enum variant, a // This could be one of: a tuple-like enum variant, a
// struct-like enum variant, or a struct. // struct-like enum variant, or a struct.
match ccx.tcx.def_map.find(&cur.id) { match ccx.tcx.def_map.find(cur.id) {
Some(ast::def_variant(*)) => { Some(ast::def_variant(*)) => {
add_to_set(ccx.tcx, &found, add_to_set(ccx.tcx, &found,
variant_opt(ccx.tcx, cur.id)); variant_opt(ccx.tcx, cur.id));
@ -927,7 +926,7 @@ pub fn root_pats_as_necessary(bcx: block,
let pat_id = br.pats[col].id; let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u }; let key = root_map_key {id: pat_id, derefs: 0u };
match bcx.ccx().maps.root_map.find(&key) { match bcx.ccx().maps.root_map.find(key) {
None => (), None => (),
Some(root_info) => { Some(root_info) => {
// Note: the scope_id will always be the id of the match. See // Note: the scope_id will always be the id of the match. See
@ -982,7 +981,7 @@ pub fn any_tuple_struct_pat(bcx: block, m: &[@Match], col: uint) -> bool {
let pat = br.pats[col]; let pat = br.pats[col];
match pat.node { match pat.node {
ast::pat_enum(_, Some(_)) => { ast::pat_enum(_, Some(_)) => {
match bcx.tcx().def_map.find(&pat.id) { match bcx.tcx().def_map.find(pat.id) {
Some(ast::def_struct(*)) => true, Some(ast::def_struct(*)) => true,
_ => false _ => false
} }
@ -1718,9 +1717,9 @@ pub fn bind_irrefutable_pat(bcx: block,
} }
} }
ast::pat_enum(_, sub_pats) => { ast::pat_enum(_, sub_pats) => {
match bcx.tcx().def_map.find(&pat.id) { match bcx.tcx().def_map.find(pat.id) {
Some(ast::def_variant(*)) => { Some(ast::def_variant(*)) => {
let pat_def = ccx.tcx.def_map.get(&pat.id); let pat_def = ccx.tcx.def_map.get(pat.id);
let vdefs = ast_util::variant_def_ids(pat_def); let vdefs = ast_util::variant_def_ids(pat_def);
let args = extract_variant_args(bcx, pat.id, vdefs, val); let args = extract_variant_args(bcx, pat.id, vdefs, val);
for sub_pats.each |sub_pat| { for sub_pats.each |sub_pat| {

View file

@ -169,7 +169,7 @@ pub fn get_extern_fn(externs: HashMap<~str, ValueRef>,
+name: ~str, +name: ~str,
cc: lib::llvm::CallConv, cc: lib::llvm::CallConv,
ty: TypeRef) -> ValueRef { ty: TypeRef) -> ValueRef {
if externs.contains_key_ref(&name) { return externs.get(&name); } if externs.contains_key_ref(&name) { return externs.get(name); }
// XXX: Bad copy. // XXX: Bad copy.
let f = decl_fn(llmod, copy name, cc, ty); let f = decl_fn(llmod, copy name, cc, ty);
externs.insert(name, f); externs.insert(name, f);
@ -179,7 +179,7 @@ pub fn get_extern_fn(externs: HashMap<~str, ValueRef>,
pub fn get_extern_const(externs: HashMap<~str, ValueRef>, llmod: ModuleRef, pub fn get_extern_const(externs: HashMap<~str, ValueRef>, llmod: ModuleRef,
+name: ~str, ty: TypeRef) -> ValueRef { +name: ~str, ty: TypeRef) -> ValueRef {
unsafe { unsafe {
if externs.contains_key_ref(&name) { return externs.get(&name); } if externs.contains_key_ref(&name) { return externs.get(name); }
let c = str::as_c_str(name, |buf| { let c = str::as_c_str(name, |buf| {
llvm::LLVMAddGlobal(llmod, ty, buf) llvm::LLVMAddGlobal(llmod, ty, buf)
}); });
@ -372,7 +372,7 @@ pub fn get_tydesc_simple(ccx: @crate_ctxt, t: ty::t) -> ValueRef {
} }
pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
match ccx.tydescs.find(&t) { match ccx.tydescs.find(t) {
Some(inf) => inf, Some(inf) => inf,
_ => { _ => {
ccx.stats.n_static_tydescs += 1u; ccx.stats.n_static_tydescs += 1u;
@ -807,7 +807,7 @@ pub fn get_discrim_val(cx: @crate_ctxt, span: span, enum_did: ast::def_id,
pub fn lookup_discriminant(ccx: @crate_ctxt, vid: ast::def_id) -> ValueRef { pub fn lookup_discriminant(ccx: @crate_ctxt, vid: ast::def_id) -> ValueRef {
unsafe { unsafe {
let _icx = ccx.insn_ctxt("lookup_discriminant"); let _icx = ccx.insn_ctxt("lookup_discriminant");
match ccx.discrims.find(&vid) { match ccx.discrims.find(vid) {
None => { None => {
// It's an external discriminant that we haven't seen yet. // It's an external discriminant that we haven't seen yet.
assert (vid.crate != ast::local_crate); assert (vid.crate != ast::local_crate);
@ -1095,7 +1095,7 @@ pub fn init_local(bcx: block, local: @ast::local) -> block {
} }
} }
let llptr = match bcx.fcx.lllocals.find(&local.node.id) { let llptr = match bcx.fcx.lllocals.find(local.node.id) {
Some(local_mem(v)) => v, Some(local_mem(v)) => v,
_ => { bcx.tcx().sess.span_bug(local.span, _ => { bcx.tcx().sess.span_bug(local.span,
~"init_local: Someone forgot to document why it's\ ~"init_local: Someone forgot to document why it's\
@ -1437,7 +1437,7 @@ pub fn call_memcpy(cx: block, dst: ValueRef, src: ValueRef,
session::arch_x86 | session::arch_arm => ~"llvm.memcpy.p0i8.p0i8.i32", session::arch_x86 | session::arch_arm => ~"llvm.memcpy.p0i8.p0i8.i32",
session::arch_x86_64 => ~"llvm.memcpy.p0i8.p0i8.i64" session::arch_x86_64 => ~"llvm.memcpy.p0i8.p0i8.i64"
}; };
let memcpy = ccx.intrinsics.get(&key); let memcpy = ccx.intrinsics.get(key);
let src_ptr = PointerCast(cx, src, T_ptr(T_i8())); let src_ptr = PointerCast(cx, src, T_ptr(T_i8()));
let dst_ptr = PointerCast(cx, dst, T_ptr(T_i8())); let dst_ptr = PointerCast(cx, dst, T_ptr(T_i8()));
let size = IntCast(cx, n_bytes, ccx.int_type); let size = IntCast(cx, n_bytes, ccx.int_type);
@ -1484,7 +1484,7 @@ pub fn memzero(cx: block, llptr: ValueRef, llty: TypeRef) {
} }
} }
let llintrinsicfn = ccx.intrinsics.get(&intrinsic_key); let llintrinsicfn = ccx.intrinsics.get(intrinsic_key);
let llptr = PointerCast(cx, llptr, T_ptr(T_i8())); let llptr = PointerCast(cx, llptr, T_ptr(T_i8()));
let llzeroval = C_u8(0); let llzeroval = C_u8(0);
let size = IntCast(cx, machine::llsize_of(ccx, llty), ccx.int_type); let size = IntCast(cx, machine::llsize_of(ccx, llty), ccx.int_type);
@ -1884,7 +1884,7 @@ pub fn trans_enum_variant(ccx: @crate_ctxt,
// If this argument to this function is a enum, it'll have come in to // If this argument to this function is a enum, it'll have come in to
// this function as an opaque blob due to the way that type_of() // this function as an opaque blob due to the way that type_of()
// works. So we have to cast to the destination's view of the type. // works. So we have to cast to the destination's view of the type.
let llarg = match fcx.llargs.find(&va.id) { let llarg = match fcx.llargs.find(va.id) {
Some(local_mem(x)) => x, Some(local_mem(x)) => x,
_ => die!(~"trans_enum_variant: how do we know this works?"), _ => die!(~"trans_enum_variant: how do we know this works?"),
}; };
@ -1930,7 +1930,7 @@ pub fn trans_tuple_struct(ccx: @crate_ctxt,
for fields.eachi |i, field| { for fields.eachi |i, field| {
let lldestptr = GEPi(bcx, fcx.llretptr, [0, 0, i]); let lldestptr = GEPi(bcx, fcx.llretptr, [0, 0, i]);
let llarg = match fcx.llargs.get(&field.node.id) { let llarg = match fcx.llargs.get(field.node.id) {
local_mem(x) => x, local_mem(x) => x,
_ => { _ => {
ccx.tcx.sess.bug(~"trans_tuple_struct: llarg wasn't \ ccx.tcx.sess.bug(~"trans_tuple_struct: llarg wasn't \
@ -2019,7 +2019,7 @@ pub fn trans_enum_def(ccx: @crate_ctxt, enum_definition: ast::enum_def,
pub fn trans_item(ccx: @crate_ctxt, item: ast::item) { pub fn trans_item(ccx: @crate_ctxt, item: ast::item) {
let _icx = ccx.insn_ctxt("trans_item"); let _icx = ccx.insn_ctxt("trans_item");
let path = match ccx.tcx.items.get(&item.id) { let path = match ccx.tcx.items.get(item.id) {
ast_map::node_item(_, p) => p, ast_map::node_item(_, p) => p,
// tjc: ? // tjc: ?
_ => die!(~"trans_item"), _ => die!(~"trans_item"),
@ -2275,7 +2275,7 @@ pub fn fill_fn_pair(bcx: block, pair: ValueRef, llfn: ValueRef,
pub fn item_path(ccx: @crate_ctxt, i: @ast::item) -> path { pub fn item_path(ccx: @crate_ctxt, i: @ast::item) -> path {
vec::append( vec::append(
/*bad*/copy *match ccx.tcx.items.get(&i.id) { /*bad*/copy *match ccx.tcx.items.get(i.id) {
ast_map::node_item(_, p) => p, ast_map::node_item(_, p) => p,
// separate map for paths? // separate map for paths?
_ => die!(~"item_path") _ => die!(~"item_path")
@ -2291,7 +2291,7 @@ pub fn get_dtor_symbol(ccx: @crate_ctxt,
+substs: Option<param_substs>) +substs: Option<param_substs>)
-> ~str { -> ~str {
let t = ty::node_id_to_type(ccx.tcx, id); let t = ty::node_id_to_type(ccx.tcx, id);
match ccx.item_symbols.find(&id) { match ccx.item_symbols.find(id) {
Some(ref s) => (/*bad*/copy *s), Some(ref s) => (/*bad*/copy *s),
None if substs.is_none() => { None if substs.is_none() => {
let s = mangle_exported_name( let s = mangle_exported_name(
@ -2326,12 +2326,12 @@ pub fn get_dtor_symbol(ccx: @crate_ctxt,
pub fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { pub fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
debug!("get_item_val(id=`%?`)", id); debug!("get_item_val(id=`%?`)", id);
let tcx = ccx.tcx; let tcx = ccx.tcx;
match ccx.item_vals.find(&id) { match ccx.item_vals.find(id) {
Some(v) => v, Some(v) => v,
None => { None => {
let mut exprt = false; let mut exprt = false;
let val = match ccx.tcx.items.get(&id) { let val = match ccx.tcx.items.get(id) {
ast_map::node_item(i, pth) => { ast_map::node_item(i, pth) => {
let my_path = vec::append(/*bad*/copy *pth, let my_path = vec::append(/*bad*/copy *pth,
~[path_name(i.ident)]); ~[path_name(i.ident)]);
@ -2770,7 +2770,7 @@ pub fn declare_dbg_intrinsics(llmod: ModuleRef,
pub fn trap(bcx: block) { pub fn trap(bcx: block) {
let v: ~[ValueRef] = ~[]; let v: ~[ValueRef] = ~[];
match bcx.ccx().intrinsics.find(&~"llvm.trap") { match bcx.ccx().intrinsics.find(~"llvm.trap") {
Some(x) => { Call(bcx, x, v); }, Some(x) => { Call(bcx, x, v); },
_ => bcx.sess().bug(~"unbound llvm.trap in trap") _ => bcx.sess().bug(~"unbound llvm.trap in trap")
} }

View file

@ -70,7 +70,7 @@ pub fn count_insn(cx: block, category: &str) {
i = 0u; i = 0u;
while i < len { while i < len {
let e = /*bad*/copy v[i]; let e = /*bad*/copy v[i];
i = mm.get(&e); i = mm.get(/*bad*/ copy e);
s += ~"/"; s += ~"/";
s += e; s += e;
i += 1u; i += 1u;
@ -79,7 +79,7 @@ pub fn count_insn(cx: block, category: &str) {
s += ~"/"; s += ~"/";
s += category; s += category;
let n = match h.find(&s) { let n = match h.find(/*bad*/ copy s) {
Some(n) => n, Some(n) => n,
_ => 0u _ => 0u
}; };

View file

@ -70,7 +70,7 @@ pub fn trans(bcx: block, expr: @ast::expr) -> Callee {
return trans_def(bcx, bcx.def(expr.id), expr); return trans_def(bcx, bcx.def(expr.id), expr);
} }
ast::expr_field(base, _, _) => { ast::expr_field(base, _, _) => {
match bcx.ccx().maps.method_map.find(&expr.id) { match bcx.ccx().maps.method_map.find(expr.id) {
Some(ref origin) => { // An impl method Some(ref origin) => { // An impl method
return meth::trans_method_callee(bcx, expr.id, return meth::trans_method_callee(bcx, expr.id,
base, (*origin)); base, (*origin));
@ -208,7 +208,7 @@ pub fn trans_fn_ref_with_vtables(
// Modify the def_id if this is a default method; we want to be // Modify the def_id if this is a default method; we want to be
// monomorphizing the trait's code. // monomorphizing the trait's code.
let (def_id, opt_impl_did) = let (def_id, opt_impl_did) =
match tcx.provided_method_sources.find(&def_id) { match tcx.provided_method_sources.find(def_id) {
None => (def_id, None), None => (def_id, None),
Some(source) => (source.method_id, Some(source.impl_id)) Some(source) => (source.method_id, Some(source.impl_id))
}; };
@ -234,7 +234,7 @@ pub fn trans_fn_ref_with_vtables(
} else if def_id.crate == ast::local_crate { } else if def_id.crate == ast::local_crate {
let map_node = session::expect( let map_node = session::expect(
ccx.sess, ccx.sess,
ccx.tcx.items.find(&def_id.node), ccx.tcx.items.find(def_id.node),
|| fmt!("local item should be in ast map")); || fmt!("local item should be in ast map"));
match map_node { match map_node {
@ -313,7 +313,7 @@ pub fn trans_method_call(in_cx: block,
node_id_type(in_cx, call_ex.callee_id), node_id_type(in_cx, call_ex.callee_id),
expr_ty(in_cx, call_ex), expr_ty(in_cx, call_ex),
|cx| { |cx| {
match cx.ccx().maps.method_map.find(&call_ex.id) { match cx.ccx().maps.method_map.find(call_ex.id) {
Some(ref origin) => { Some(ref origin) => {
meth::trans_method_callee(cx, meth::trans_method_callee(cx,
call_ex.callee_id, call_ex.callee_id,

View file

@ -419,7 +419,7 @@ pub fn trans_expr_fn(bcx: block,
let Result {bcx: bcx, val: closure} = match proto { let Result {bcx: bcx, val: closure} = match proto {
ast::ProtoBorrowed | ast::ProtoBox | ast::ProtoUniq => { ast::ProtoBorrowed | ast::ProtoBox | ast::ProtoUniq => {
let cap_vars = ccx.maps.capture_map.get(&user_id); let cap_vars = ccx.maps.capture_map.get(user_id);
let ret_handle = match is_loop_body {Some(x) => x, let ret_handle = match is_loop_body {Some(x) => x,
None => None}; None => None};
let {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, proto, let {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, proto,

View file

@ -700,7 +700,7 @@ pub impl block {
} }
fn def(nid: ast::node_id) -> ast::def { fn def(nid: ast::node_id) -> ast::def {
match self.tcx().def_map.find(&nid) { match self.tcx().def_map.find(nid) {
Some(v) => v, Some(v) => v,
None => { None => {
self.tcx().sess.bug(fmt!( self.tcx().sess.bug(fmt!(
@ -1134,7 +1134,7 @@ pub fn C_u8(i: uint) -> ValueRef {
// our boxed-and-length-annotated strings. // our boxed-and-length-annotated strings.
pub fn C_cstr(cx: @crate_ctxt, +s: ~str) -> ValueRef { pub fn C_cstr(cx: @crate_ctxt, +s: ~str) -> ValueRef {
unsafe { unsafe {
match cx.const_cstr_cache.find(&s) { match cx.const_cstr_cache.find(/*bad*/copy s) {
Some(llval) => return llval, Some(llval) => return llval,
None => () None => ()
} }
@ -1356,7 +1356,7 @@ pub fn node_id_type_params(bcx: block, id: ast::node_id) -> ~[ty::t] {
pub fn node_vtables(bcx: block, id: ast::node_id) pub fn node_vtables(bcx: block, id: ast::node_id)
-> Option<typeck::vtable_res> { -> Option<typeck::vtable_res> {
let raw_vtables = bcx.ccx().maps.vtable_map.find(&id); let raw_vtables = bcx.ccx().maps.vtable_map.find(id);
raw_vtables.map( raw_vtables.map(
|vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts)) |vts| resolve_vtables_in_fn_ctxt(bcx.fcx, *vts))
} }

View file

@ -82,7 +82,7 @@ pub fn const_vec(cx: @crate_ctxt, e: @ast::expr, es: &[@ast::expr])
pub fn const_deref(cx: @crate_ctxt, v: ValueRef) -> ValueRef { pub fn const_deref(cx: @crate_ctxt, v: ValueRef) -> ValueRef {
unsafe { unsafe {
let v = match cx.const_globals.find(&(v as int)) { let v = match cx.const_globals.find(v as int) {
Some(v) => v, Some(v) => v,
None => v None => v
}; };
@ -127,7 +127,7 @@ pub fn get_const_val(cx: @crate_ctxt, def_id: ast::def_id) -> ValueRef {
cx.tcx.sess.bug(~"cross-crate constants"); cx.tcx.sess.bug(~"cross-crate constants");
} }
if !cx.const_values.contains_key_ref(&def_id.node) { if !cx.const_values.contains_key_ref(&def_id.node) {
match cx.tcx.items.get(&def_id.node) { match cx.tcx.items.get(def_id.node) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_const(_, subexpr), _ node: ast::item_const(_, subexpr), _
}, _) => { }, _) => {
@ -136,7 +136,7 @@ pub fn get_const_val(cx: @crate_ctxt, def_id: ast::def_id) -> ValueRef {
_ => cx.tcx.sess.bug(~"expected a const to be an item") _ => cx.tcx.sess.bug(~"expected a const to be an item")
} }
} }
cx.const_values.get(&def_id.node) cx.const_values.get(def_id.node)
} }
pub fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef { pub fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef {
@ -402,7 +402,7 @@ pub fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef {
} }
ast::expr_path(pth) => { ast::expr_path(pth) => {
assert pth.types.len() == 0; assert pth.types.len() == 0;
match cx.tcx.def_map.find(&e.id) { match cx.tcx.def_map.find(e.id) {
Some(ast::def_fn(def_id, purity)) => { Some(ast::def_fn(def_id, purity)) => {
assert ast_util::is_local(def_id); assert ast_util::is_local(def_id);
let f = base::get_item_val(cx, def_id.node); let f = base::get_item_val(cx, def_id.node);
@ -437,7 +437,7 @@ pub fn const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef {
} }
} }
ast::expr_call(callee, args, _) => { ast::expr_call(callee, args, _) => {
match cx.tcx.def_map.find(&callee.id) { match cx.tcx.def_map.find(callee.id) {
Some(ast::def_struct(def_id)) => { Some(ast::def_struct(def_id)) => {
let ety = ty::expr_ty(cx.tcx, e); let ety = ty::expr_ty(cx.tcx, e);
let llty = type_of::type_of(cx, ety); let llty = type_of::type_of(cx, ety);
@ -482,7 +482,7 @@ pub fn trans_const(ccx: @crate_ctxt, _e: @ast::expr, id: ast::node_id) {
let g = base::get_item_val(ccx, id); let g = base::get_item_val(ccx, id);
// At this point, get_item_val has already translated the // At this point, get_item_val has already translated the
// constant's initializer to determine its LLVM type. // constant's initializer to determine its LLVM type.
let v = ccx.const_values.get(&id); let v = ccx.const_values.get(id);
llvm::LLVMSetInitializer(g, v); llvm::LLVMSetInitializer(g, v);
llvm::LLVMSetGlobalConstant(g, True); llvm::LLVMSetGlobalConstant(g, True);
} }

View file

@ -184,7 +184,7 @@ pub fn trans_log(log_ex: @ast::expr,
let modname = path_str(ccx.sess, copy modpath); let modname = path_str(ccx.sess, copy modpath);
let global = if ccx.module_data.contains_key_ref(&modname) { let global = if ccx.module_data.contains_key_ref(&modname) {
ccx.module_data.get(&modname) ccx.module_data.get(modname)
} else { } else {
let s = link::mangle_internal_name_by_path_and_seq( let s = link::mangle_internal_name_by_path_and_seq(
ccx, modpath, ~"loglevel"); ccx, modpath, ~"loglevel");

View file

@ -627,7 +627,7 @@ pub impl Datum {
// //
// (Note: root'd values are always boxes) // (Note: root'd values are always boxes)
let key = root_map_key { id: expr_id, derefs: derefs }; let key = root_map_key { id: expr_id, derefs: derefs };
let bcx = match ccx.maps.root_map.find(&key) { let bcx = match ccx.maps.root_map.find(key) {
None => bcx, None => bcx,
Some(root_info) => self.root(bcx, root_info) Some(root_info) => self.root(bcx, root_info)
}; };
@ -635,7 +635,7 @@ pub impl Datum {
// Perform the write guard, if necessary. // Perform the write guard, if necessary.
// //
// (Note: write-guarded values are always boxes) // (Note: write-guarded values are always boxes)
let bcx = match ccx.maps.write_guard_map.find(&key) { let bcx = match ccx.maps.write_guard_map.find(key) {
None => bcx, None => bcx,
Some(_) => self.perform_write_guard(bcx) Some(_) => self.perform_write_guard(bcx)
}; };

View file

@ -118,7 +118,7 @@ pub fn mk_ctxt(+crate: ~str, intr: @ident_interner) -> debug_ctxt {
fn update_cache(cache: metadata_cache, mdtag: int, val: debug_metadata) { fn update_cache(cache: metadata_cache, mdtag: int, val: debug_metadata) {
let existing = if cache.contains_key_ref(&mdtag) { let existing = if cache.contains_key_ref(&mdtag) {
cache.get(&mdtag) cache.get(mdtag)
} else { } else {
~[] ~[]
}; };
@ -177,7 +177,7 @@ fn cached_metadata<T: Copy>(cache: metadata_cache,
-> Option<T> { -> Option<T> {
unsafe { unsafe {
if cache.contains_key_ref(&mdtag) { if cache.contains_key_ref(&mdtag) {
let items = cache.get(&mdtag); let items = cache.get(mdtag);
for items.each |item| { for items.each |item| {
let md: T = md_from_metadata::<T>(*item); let md: T = md_from_metadata::<T>(*item);
if eq_fn(md) { if eq_fn(md) {
@ -289,7 +289,7 @@ fn create_block(cx: block) -> @metadata<block_md> {
Some(bcx) => create_block(bcx).node Some(bcx) => create_block(bcx).node
}; };
let file_node = create_file(cx.ccx(), fname); let file_node = create_file(cx.ccx(), fname);
let unique_id = match cache.find(&LexicalBlockTag) { let unique_id = match cache.find(LexicalBlockTag) {
option::Some(v) => vec::len(v) as int, option::Some(v) => vec::len(v) as int,
option::None => 0 option::None => 0
}; };
@ -679,14 +679,14 @@ pub fn create_local_var(bcx: block, local: @ast::local)
let mdval = @{node: mdnode, data: {id: local.node.id}}; let mdval = @{node: mdnode, data: {id: local.node.id}};
update_cache(cache, AutoVariableTag, local_var_metadata(mdval)); update_cache(cache, AutoVariableTag, local_var_metadata(mdval));
let llptr = match bcx.fcx.lllocals.find(&local.node.id) { let llptr = match bcx.fcx.lllocals.find(local.node.id) {
option::Some(local_mem(v)) => v, option::Some(local_mem(v)) => v,
option::Some(_) => { option::Some(_) => {
bcx.tcx().sess.span_bug(local.span, ~"local is bound to \ bcx.tcx().sess.span_bug(local.span, ~"local is bound to \
something weird"); something weird");
} }
option::None => { option::None => {
match bcx.fcx.lllocals.get(&local.node.pat.id) { match bcx.fcx.lllocals.get(local.node.pat.id) {
local_imm(v) => v, local_imm(v) => v,
_ => bcx.tcx().sess.span_bug(local.span, ~"local is bound to \ _ => bcx.tcx().sess.span_bug(local.span, ~"local is bound to \
something weird") something weird")
@ -694,7 +694,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
} }
}; };
let declargs = ~[llmdnode(~[llptr]), mdnode]; let declargs = ~[llmdnode(~[llptr]), mdnode];
trans::build::Call(bcx, cx.intrinsics.get(&~"llvm.dbg.declare"), trans::build::Call(bcx, cx.intrinsics.get(~"llvm.dbg.declare"),
declargs); declargs);
return mdval; return mdval;
} }
@ -731,12 +731,12 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
let mdval = @{node: mdnode, data: {id: arg.id}}; let mdval = @{node: mdnode, data: {id: arg.id}};
update_cache(cache, tg, argument_metadata(mdval)); update_cache(cache, tg, argument_metadata(mdval));
let llptr = match fcx.llargs.get(&arg.id) { let llptr = match fcx.llargs.get(arg.id) {
local_mem(v) | local_imm(v) => v, local_mem(v) | local_imm(v) => v,
}; };
let declargs = ~[llmdnode(~[llptr]), mdnode]; let declargs = ~[llmdnode(~[llptr]), mdnode];
trans::build::Call(bcx, trans::build::Call(bcx,
cx.intrinsics.get(&~"llvm.dbg.declare"), cx.intrinsics.get(~"llvm.dbg.declare"),
declargs); declargs);
return Some(mdval); return Some(mdval);
} }
@ -774,7 +774,7 @@ pub fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> {
let sp = fcx.span.get(); let sp = fcx.span.get();
log(debug, cx.sess.codemap.span_to_str(sp)); log(debug, cx.sess.codemap.span_to_str(sp));
let (ident, ret_ty, id) = match cx.tcx.items.get(&fcx.id) { let (ident, ret_ty, id) = match cx.tcx.items.get(fcx.id) {
ast_map::node_item(item, _) => { ast_map::node_item(item, _) => {
match /*bad*/copy item.node { match /*bad*/copy item.node {
ast::item_fn(decl, _, _, _) => { ast::item_fn(decl, _, _, _) => {

View file

@ -185,7 +185,7 @@ fn drop_and_cancel_clean(bcx: block, dat: Datum) -> block {
pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock { pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
debug!("trans_to_datum(expr=%s)", bcx.expr_to_str(expr)); debug!("trans_to_datum(expr=%s)", bcx.expr_to_str(expr));
return match bcx.tcx().adjustments.find(&expr.id) { return match bcx.tcx().adjustments.find(expr.id) {
None => { None => {
trans_to_datum_unadjusted(bcx, expr) trans_to_datum_unadjusted(bcx, expr)
} }
@ -329,7 +329,7 @@ fn trans_lvalue(bcx: block, expr: @ast::expr) -> DatumBlock {
* instead, but sometimes we call trans_lvalue() directly as a * instead, but sometimes we call trans_lvalue() directly as a
* means of asserting that a particular expression is an lvalue. */ * means of asserting that a particular expression is an lvalue. */
return match bcx.tcx().adjustments.find(&expr.id) { return match bcx.tcx().adjustments.find(expr.id) {
None => trans_lvalue_unadjusted(bcx, expr), None => trans_lvalue_unadjusted(bcx, expr),
Some(_) => { Some(_) => {
bcx.sess().span_bug( bcx.sess().span_bug(
@ -742,7 +742,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
// the lvalue in there, and then arrange for it to be cleaned up // the lvalue in there, and then arrange for it to be cleaned up
// at the end of the scope with id `scope_id`: // at the end of the scope with id `scope_id`:
let root_key = root_map_key { id: expr.id, derefs: 0u }; let root_key = root_map_key { id: expr.id, derefs: 0u };
for bcx.ccx().maps.root_map.find(&root_key).each |&root_info| { for bcx.ccx().maps.root_map.find(root_key).each |&root_info| {
bcx = unrooted_datum.root(bcx, root_info); bcx = unrooted_datum.root(bcx, root_info);
} }
@ -934,7 +934,7 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
ast::def_upvar(nid, _, _, _) => { ast::def_upvar(nid, _, _, _) => {
// Can't move upvars, so this is never a ZeroMemLastUse. // Can't move upvars, so this is never a ZeroMemLastUse.
let local_ty = node_id_type(bcx, nid); let local_ty = node_id_type(bcx, nid);
match bcx.fcx.llupvars.find(&nid) { match bcx.fcx.llupvars.find(nid) {
Some(val) => { Some(val) => {
Datum { Datum {
val: val, val: val,
@ -987,7 +987,7 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
fn take_local(bcx: block, fn take_local(bcx: block,
table: HashMap<ast::node_id, local_val>, table: HashMap<ast::node_id, local_val>,
nid: ast::node_id) -> Datum { nid: ast::node_id) -> Datum {
let (v, mode) = match table.find(&nid) { let (v, mode) = match table.find(nid) {
Some(local_mem(v)) => (v, ByRef), Some(local_mem(v)) => (v, ByRef),
Some(local_imm(v)) => (v, ByValue), Some(local_imm(v)) => (v, ByValue),
None => { None => {
@ -1066,7 +1066,7 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
ty_to_str(tcx, ty))); ty_to_str(tcx, ty)));
} }
Some(node_id) => { Some(node_id) => {
match tcx.def_map.get(&node_id) { match tcx.def_map.get(node_id) {
ast::def_variant(_, variant_id) => { ast::def_variant(_, variant_id) => {
op(false, struct_mutable_fields( op(false, struct_mutable_fields(
tcx, variant_id, substs)) tcx, variant_id, substs))
@ -1120,7 +1120,7 @@ fn trans_rec_or_struct(bcx: block,
let tcx = bcx.tcx(); let tcx = bcx.tcx();
let addr = match ty::get(ty).sty { let addr = match ty::get(ty).sty {
ty::ty_enum(_, ref substs) => { ty::ty_enum(_, ref substs) => {
match tcx.def_map.get(&id) { match tcx.def_map.get(id) {
ast::def_variant(enum_id, variant_id) => { ast::def_variant(enum_id, variant_id) => {
let variant_info = ty::enum_variant_with_id( let variant_info = ty::enum_variant_with_id(
tcx, enum_id, variant_id); tcx, enum_id, variant_id);
@ -1479,7 +1479,7 @@ fn trans_overloaded_op(bcx: block,
dest: Dest, dest: Dest,
+autoref_arg: AutorefArg) -> block +autoref_arg: AutorefArg) -> block
{ {
let origin = bcx.ccx().maps.method_map.get(&expr.id); let origin = bcx.ccx().maps.method_map.get(expr.id);
let fty = node_id_type(bcx, expr.callee_id); let fty = node_id_type(bcx, expr.callee_id);
return callee::trans_call_inner( return callee::trans_call_inner(
bcx, expr.info(), fty, bcx, expr.info(), fty,
@ -1636,7 +1636,7 @@ fn trans_assign_op(bcx: block,
let dst_datum = unpack_datum!(bcx, trans_lvalue_unadjusted(bcx, dst)); let dst_datum = unpack_datum!(bcx, trans_lvalue_unadjusted(bcx, dst));
// A user-defined operator method // A user-defined operator method
if bcx.ccx().maps.method_map.find(&expr.id).is_some() { if bcx.ccx().maps.method_map.find(expr.id).is_some() {
// FIXME(#2528) evaluates the receiver twice!! // FIXME(#2528) evaluates the receiver twice!!
let scratch = scratch_datum(bcx, dst_datum.ty, false); let scratch = scratch_datum(bcx, dst_datum.ty, false);
let bcx = trans_overloaded_op(bcx, expr, dst, ~[src], let bcx = trans_overloaded_op(bcx, expr, dst, ~[src],

View file

@ -498,7 +498,7 @@ pub fn trans_intrinsic(ccx: @crate_ctxt,
let tp_sz = machine::llbitsize_of_real(ccx, lltp_ty), let tp_sz = machine::llbitsize_of_real(ccx, lltp_ty),
out_sz = machine::llbitsize_of_real(ccx, llout_ty); out_sz = machine::llbitsize_of_real(ccx, llout_ty);
if tp_sz != out_sz { if tp_sz != out_sz {
let sp = match ccx.tcx.items.get(&ref_id.get()) { let sp = match ccx.tcx.items.get(ref_id.get()) {
ast_map::node_expr(e) => e.span, ast_map::node_expr(e) => e.span,
_ => die!(~"reinterpret_cast or forget has non-expr arg") _ => die!(~"reinterpret_cast or forget has non-expr arg")
}; };
@ -535,7 +535,7 @@ pub fn trans_intrinsic(ccx: @crate_ctxt,
abi::tydesc_field_visit_glue, None); abi::tydesc_field_visit_glue, None);
} }
~"frame_address" => { ~"frame_address" => {
let frameaddress = ccx.intrinsics.get(&~"llvm.frameaddress"); let frameaddress = ccx.intrinsics.get(~"llvm.frameaddress");
let frameaddress_val = Call(bcx, frameaddress, ~[C_i32(0i32)]); let frameaddress_val = Call(bcx, frameaddress, ~[C_i32(0i32)]);
let star_u8 = ty::mk_imm_ptr( let star_u8 = ty::mk_imm_ptr(
bcx.tcx(), bcx.tcx(),
@ -576,7 +576,7 @@ pub fn trans_intrinsic(ccx: @crate_ctxt,
let align = C_i32(1); let align = C_i32(1);
let volatile = C_bool(false); let volatile = C_bool(false);
let llfn = bcx.ccx().intrinsics.get( let llfn = bcx.ccx().intrinsics.get(
&~"llvm.memmove.p0i8.p0i8.i32"); ~"llvm.memmove.p0i8.p0i8.i32");
Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]); Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]);
} }
~"memmove64" => { ~"memmove64" => {
@ -586,248 +586,248 @@ pub fn trans_intrinsic(ccx: @crate_ctxt,
let align = C_i32(1); let align = C_i32(1);
let volatile = C_bool(false); let volatile = C_bool(false);
let llfn = bcx.ccx().intrinsics.get( let llfn = bcx.ccx().intrinsics.get(
&~"llvm.memmove.p0i8.p0i8.i64"); ~"llvm.memmove.p0i8.p0i8.i64");
Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]); Call(bcx, llfn, ~[dst_ptr, src_ptr, size, align, volatile]);
} }
~"sqrtf32" => { ~"sqrtf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = ccx.intrinsics.get(&~"llvm.sqrt.f32"); let sqrtf = ccx.intrinsics.get(~"llvm.sqrt.f32");
Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr);
} }
~"sqrtf64" => { ~"sqrtf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = ccx.intrinsics.get(&~"llvm.sqrt.f64"); let sqrtf = ccx.intrinsics.get(~"llvm.sqrt.f64");
Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sqrtf, ~[x]), fcx.llretptr);
} }
~"powif32" => { ~"powif32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = ccx.intrinsics.get(&~"llvm.powi.f32"); let powif = ccx.intrinsics.get(~"llvm.powi.f32");
Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr);
} }
~"powif64" => { ~"powif64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = ccx.intrinsics.get(&~"llvm.powi.f64"); let powif = ccx.intrinsics.get(~"llvm.powi.f64");
Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powif, ~[a, x]), fcx.llretptr);
} }
~"sinf32" => { ~"sinf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = ccx.intrinsics.get(&~"llvm.sin.f32"); let sinf = ccx.intrinsics.get(~"llvm.sin.f32");
Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr);
} }
~"sinf64" => { ~"sinf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = ccx.intrinsics.get(&~"llvm.sin.f64"); let sinf = ccx.intrinsics.get(~"llvm.sin.f64");
Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, sinf, ~[x]), fcx.llretptr);
} }
~"cosf32" => { ~"cosf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = ccx.intrinsics.get(&~"llvm.cos.f32"); let cosf = ccx.intrinsics.get(~"llvm.cos.f32");
Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr);
} }
~"cosf64" => { ~"cosf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = ccx.intrinsics.get(&~"llvm.cos.f64"); let cosf = ccx.intrinsics.get(~"llvm.cos.f64");
Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, cosf, ~[x]), fcx.llretptr);
} }
~"powf32" => { ~"powf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = ccx.intrinsics.get(&~"llvm.pow.f32"); let powf = ccx.intrinsics.get(~"llvm.pow.f32");
Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr);
} }
~"powf64" => { ~"powf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = ccx.intrinsics.get(&~"llvm.pow.f64"); let powf = ccx.intrinsics.get(~"llvm.pow.f64");
Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr); Store(bcx, Call(bcx, powf, ~[a, x]), fcx.llretptr);
} }
~"expf32" => { ~"expf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = ccx.intrinsics.get(&~"llvm.exp.f32"); let expf = ccx.intrinsics.get(~"llvm.exp.f32");
Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr);
} }
~"expf64" => { ~"expf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = ccx.intrinsics.get(&~"llvm.exp.f64"); let expf = ccx.intrinsics.get(~"llvm.exp.f64");
Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, expf, ~[x]), fcx.llretptr);
} }
~"exp2f32" => { ~"exp2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = ccx.intrinsics.get(&~"llvm.exp2.f32"); let exp2f = ccx.intrinsics.get(~"llvm.exp2.f32");
Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr);
} }
~"exp2f64" => { ~"exp2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = ccx.intrinsics.get(&~"llvm.exp2.f64"); let exp2f = ccx.intrinsics.get(~"llvm.exp2.f64");
Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, exp2f, ~[x]), fcx.llretptr);
} }
~"logf32" => { ~"logf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = ccx.intrinsics.get(&~"llvm.log.f32"); let logf = ccx.intrinsics.get(~"llvm.log.f32");
Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr);
} }
~"logf64" => { ~"logf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = ccx.intrinsics.get(&~"llvm.log.f64"); let logf = ccx.intrinsics.get(~"llvm.log.f64");
Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, logf, ~[x]), fcx.llretptr);
} }
~"log10f32" => { ~"log10f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = ccx.intrinsics.get(&~"llvm.log10.f32"); let log10f = ccx.intrinsics.get(~"llvm.log10.f32");
Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr);
} }
~"log10f64" => { ~"log10f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = ccx.intrinsics.get(&~"llvm.log10.f64"); let log10f = ccx.intrinsics.get(~"llvm.log10.f64");
Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log10f, ~[x]), fcx.llretptr);
} }
~"log2f32" => { ~"log2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = ccx.intrinsics.get(&~"llvm.log2.f32"); let log2f = ccx.intrinsics.get(~"llvm.log2.f32");
Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr);
} }
~"log2f64" => { ~"log2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = ccx.intrinsics.get(&~"llvm.log2.f64"); let log2f = ccx.intrinsics.get(~"llvm.log2.f64");
Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, log2f, ~[x]), fcx.llretptr);
} }
~"fmaf32" => { ~"fmaf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = ccx.intrinsics.get(&~"llvm.fma.f32"); let fmaf = ccx.intrinsics.get(~"llvm.fma.f32");
Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr); Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr);
} }
~"fmaf64" => { ~"fmaf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = ccx.intrinsics.get(&~"llvm.fma.f64"); let fmaf = ccx.intrinsics.get(~"llvm.fma.f64");
Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr); Store(bcx, Call(bcx, fmaf, ~[a, b, c]), fcx.llretptr);
} }
~"fabsf32" => { ~"fabsf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = ccx.intrinsics.get(&~"llvm.fabs.f32"); let fabsf = ccx.intrinsics.get(~"llvm.fabs.f32");
Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr);
} }
~"fabsf64" => { ~"fabsf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = ccx.intrinsics.get(&~"llvm.fabs.f64"); let fabsf = ccx.intrinsics.get(~"llvm.fabs.f64");
Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, fabsf, ~[x]), fcx.llretptr);
} }
~"floorf32" => { ~"floorf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = ccx.intrinsics.get(&~"llvm.floor.f32"); let floorf = ccx.intrinsics.get(~"llvm.floor.f32");
Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr);
} }
~"floorf64" => { ~"floorf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = ccx.intrinsics.get(&~"llvm.floor.f64"); let floorf = ccx.intrinsics.get(~"llvm.floor.f64");
Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, floorf, ~[x]), fcx.llretptr);
} }
~"ceilf32" => { ~"ceilf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = ccx.intrinsics.get(&~"llvm.ceil.f32"); let ceilf = ccx.intrinsics.get(~"llvm.ceil.f32");
Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr);
} }
~"ceilf64" => { ~"ceilf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = ccx.intrinsics.get(&~"llvm.ceil.f64"); let ceilf = ccx.intrinsics.get(~"llvm.ceil.f64");
Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, ceilf, ~[x]), fcx.llretptr);
} }
~"truncf32" => { ~"truncf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = ccx.intrinsics.get(&~"llvm.trunc.f32"); let truncf = ccx.intrinsics.get(~"llvm.trunc.f32");
Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr);
} }
~"truncf64" => { ~"truncf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = ccx.intrinsics.get(&~"llvm.trunc.f64"); let truncf = ccx.intrinsics.get(~"llvm.trunc.f64");
Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr); Store(bcx, Call(bcx, truncf, ~[x]), fcx.llretptr);
} }
~"ctpop8" => { ~"ctpop8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i8"); let ctpop = ccx.intrinsics.get(~"llvm.ctpop.i8");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop16" => { ~"ctpop16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i16"); let ctpop = ccx.intrinsics.get(~"llvm.ctpop.i16");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop32" => { ~"ctpop32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i32"); let ctpop = ccx.intrinsics.get(~"llvm.ctpop.i32");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctpop64" => { ~"ctpop64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = ccx.intrinsics.get(&~"llvm.ctpop.i64"); let ctpop = ccx.intrinsics.get(~"llvm.ctpop.i64");
Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, ctpop, ~[x]), fcx.llretptr)
} }
~"ctlz8" => { ~"ctlz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i8"); let ctlz = ccx.intrinsics.get(~"llvm.ctlz.i8");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz16" => { ~"ctlz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i16"); let ctlz = ccx.intrinsics.get(~"llvm.ctlz.i16");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz32" => { ~"ctlz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i32"); let ctlz = ccx.intrinsics.get(~"llvm.ctlz.i32");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"ctlz64" => { ~"ctlz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let ctlz = ccx.intrinsics.get(&~"llvm.ctlz.i64"); let ctlz = ccx.intrinsics.get(~"llvm.ctlz.i64");
Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, ctlz, ~[x, y]), fcx.llretptr)
} }
~"cttz8" => { ~"cttz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i8"); let cttz = ccx.intrinsics.get(~"llvm.cttz.i8");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz16" => { ~"cttz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i16"); let cttz = ccx.intrinsics.get(~"llvm.cttz.i16");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz32" => { ~"cttz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i32"); let cttz = ccx.intrinsics.get(~"llvm.cttz.i32");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"cttz64" => { ~"cttz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_bool(false); let y = C_bool(false);
let cttz = ccx.intrinsics.get(&~"llvm.cttz.i64"); let cttz = ccx.intrinsics.get(~"llvm.cttz.i64");
Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x, y]), fcx.llretptr)
} }
~"bswap16" => { ~"bswap16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i16"); let cttz = ccx.intrinsics.get(~"llvm.bswap.i16");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
~"bswap32" => { ~"bswap32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i32"); let cttz = ccx.intrinsics.get(~"llvm.bswap.i32");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
~"bswap64" => { ~"bswap64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = ccx.intrinsics.get(&~"llvm.bswap.i64"); let cttz = ccx.intrinsics.get(~"llvm.bswap.i64");
Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr) Store(bcx, Call(bcx, cttz, ~[x]), fcx.llretptr)
} }
_ => { _ => {
@ -955,7 +955,7 @@ pub fn register_foreign_fn(ccx: @crate_ctxt,
fn abi_of_foreign_fn(ccx: @crate_ctxt, i: @ast::foreign_item) fn abi_of_foreign_fn(ccx: @crate_ctxt, i: @ast::foreign_item)
-> ast::foreign_abi { -> ast::foreign_abi {
match attr::first_attr_value_str_by_name(i.attrs, ~"abi") { match attr::first_attr_value_str_by_name(i.attrs, ~"abi") {
None => match ccx.tcx.items.get(&i.id) { None => match ccx.tcx.items.get(i.id) {
ast_map::node_foreign_item(_, abi, _) => abi, ast_map::node_foreign_item(_, abi, _) => abi,
// ?? // ??
_ => die!(~"abi_of_foreign_fn: not foreign") _ => die!(~"abi_of_foreign_fn: not foreign")

View file

@ -381,7 +381,7 @@ pub fn make_visit_glue(bcx: block, v: ValueRef, t: ty::t) {
let mut bcx = bcx; let mut bcx = bcx;
let ty_visitor_name = special_idents::ty_visitor; let ty_visitor_name = special_idents::ty_visitor;
assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&ty_visitor_name); assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&ty_visitor_name);
let (trait_id, ty) = bcx.ccx().tcx.intrinsic_defs.get(&ty_visitor_name); let (trait_id, ty) = bcx.ccx().tcx.intrinsic_defs.get(ty_visitor_name);
let v = PointerCast(bcx, v, T_ptr(type_of::type_of(bcx.ccx(), ty))); let v = PointerCast(bcx, v, T_ptr(type_of::type_of(bcx.ccx(), ty)));
bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, trait_id); bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, trait_id);
build_return(bcx); build_return(bcx);

View file

@ -31,7 +31,7 @@ pub fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id,
translate: bool) translate: bool)
-> ast::def_id { -> ast::def_id {
let _icx = ccx.insn_ctxt("maybe_instantiate_inline"); let _icx = ccx.insn_ctxt("maybe_instantiate_inline");
match ccx.external.find(&fn_id) { match ccx.external.find(fn_id) {
Some(Some(node_id)) => { Some(Some(node_id)) => {
// Already inline // Already inline
debug!("maybe_instantiate_inline(%s): already inline as node id %d", debug!("maybe_instantiate_inline(%s): already inline as node id %d",

View file

@ -178,7 +178,7 @@ pub fn llalign_of(cx: @crate_ctxt, t: TypeRef) -> ValueRef {
// Computes the size of the data part of an enum. // Computes the size of the data part of an enum.
pub fn static_size_of_enum(cx: @crate_ctxt, t: ty::t) -> uint { pub fn static_size_of_enum(cx: @crate_ctxt, t: ty::t) -> uint {
if cx.enum_sizes.contains_key_ref(&t) { return cx.enum_sizes.get(&t); } if cx.enum_sizes.contains_key_ref(&t) { return cx.enum_sizes.get(t); }
match ty::get(t).sty { match ty::get(t).sty {
ty::ty_enum(tid, ref substs) => { ty::ty_enum(tid, ref substs) => {
// Compute max(variant sizes). // Compute max(variant sizes).

View file

@ -308,7 +308,7 @@ pub fn trans_static_method_callee(bcx: block,
}; };
let mname = if method_id.crate == ast::local_crate { let mname = if method_id.crate == ast::local_crate {
match bcx.tcx().items.get(&method_id.node) { match bcx.tcx().items.get(method_id.node) {
ast_map::node_trait_method(trait_method, _, _) => { ast_map::node_trait_method(trait_method, _, _) => {
ast_util::trait_method_to_ty_method(*trait_method).ident ast_util::trait_method_to_ty_method(*trait_method).ident
} }
@ -325,7 +325,7 @@ pub fn trans_static_method_callee(bcx: block,
name=%s", method_id, callee_id, ccx.sess.str_of(mname)); name=%s", method_id, callee_id, ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt( let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, ccx.maps.vtable_map.get(&callee_id)); bcx.fcx, ccx.maps.vtable_map.get(callee_id));
match /*bad*/copy vtbls[bound_index] { match /*bad*/copy vtbls[bound_index] {
typeck::vtable_static(impl_did, rcvr_substs, rcvr_origins) => { typeck::vtable_static(impl_did, rcvr_substs, rcvr_origins) => {
@ -362,7 +362,7 @@ pub fn method_from_methods(ms: ~[@ast::method], name: ast::ident)
pub fn method_with_name(ccx: @crate_ctxt, impl_id: ast::def_id, pub fn method_with_name(ccx: @crate_ctxt, impl_id: ast::def_id,
name: ast::ident) -> ast::def_id { name: ast::ident) -> ast::def_id {
if impl_id.crate == ast::local_crate { if impl_id.crate == ast::local_crate {
match ccx.tcx.items.get(&impl_id.node) { match ccx.tcx.items.get(impl_id.node) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref ms), node: ast::item_impl(_, _, _, ref ms),
_ _
@ -379,7 +379,7 @@ pub fn method_with_name(ccx: @crate_ctxt, impl_id: ast::def_id,
pub fn method_with_name_or_default(ccx: @crate_ctxt, impl_id: ast::def_id, pub fn method_with_name_or_default(ccx: @crate_ctxt, impl_id: ast::def_id,
name: ast::ident) -> ast::def_id { name: ast::ident) -> ast::def_id {
if impl_id.crate == ast::local_crate { if impl_id.crate == ast::local_crate {
match ccx.tcx.items.get(&impl_id.node) { match ccx.tcx.items.get(impl_id.node) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref ms), _ node: ast::item_impl(_, _, _, ref ms), _
}, _) => { }, _) => {
@ -389,7 +389,7 @@ pub fn method_with_name_or_default(ccx: @crate_ctxt, impl_id: ast::def_id,
} else { } else {
// Look for a default method // Look for a default method
let pmm = ccx.tcx.provided_methods; let pmm = ccx.tcx.provided_methods;
match pmm.find(&impl_id) { match pmm.find(impl_id) {
Some(pmis) => { Some(pmis) => {
for pmis.each |pmi| { for pmis.each |pmi| {
if pmi.method_info.ident == name { if pmi.method_info.ident == name {
@ -414,10 +414,10 @@ pub fn method_ty_param_count(ccx: @crate_ctxt, m_id: ast::def_id,
i_id: ast::def_id) -> uint { i_id: ast::def_id) -> uint {
debug!("method_ty_param_count: m_id: %?, i_id: %?", m_id, i_id); debug!("method_ty_param_count: m_id: %?, i_id: %?", m_id, i_id);
if m_id.crate == ast::local_crate { if m_id.crate == ast::local_crate {
match ccx.tcx.items.find(&m_id.node) { match ccx.tcx.items.find(m_id.node) {
Some(ast_map::node_method(m, _, _)) => m.tps.len(), Some(ast_map::node_method(m, _, _)) => m.tps.len(),
None => { None => {
match ccx.tcx.provided_method_sources.find(&m_id) { match ccx.tcx.provided_method_sources.find(m_id) {
Some(source) => { Some(source) => {
method_ty_param_count( method_ty_param_count(
ccx, source.method_id, source.impl_id) ccx, source.method_id, source.impl_id)
@ -779,7 +779,7 @@ pub fn get_vtable(ccx: @crate_ctxt,
-> ValueRef { -> ValueRef {
// XXX: Bad copy. // XXX: Bad copy.
let hash_id = vtable_id(ccx, copy origin); let hash_id = vtable_id(ccx, copy origin);
match ccx.vtables.find(&hash_id) { match ccx.vtables.find(hash_id) {
Some(val) => val, Some(val) => val,
None => match origin { None => match origin {
typeck::vtable_static(id, substs, sub_vtables) => { typeck::vtable_static(id, substs, sub_vtables) => {
@ -916,7 +916,7 @@ pub fn trans_trait_cast(bcx: block,
} }
// Store the vtable into the pair or triple. // Store the vtable into the pair or triple.
let orig = /*bad*/copy ccx.maps.vtable_map.get(&id)[0]; let orig = /*bad*/copy ccx.maps.vtable_map.get(id)[0];
let orig = resolve_vtable_in_fn_ctxt(bcx.fcx, orig); let orig = resolve_vtable_in_fn_ctxt(bcx.fcx, orig);
let vtable = get_vtable(bcx.ccx(), orig); let vtable = get_vtable(bcx.ccx(), orig);
Store(bcx, vtable, PointerCast(bcx, Store(bcx, vtable, PointerCast(bcx,

View file

@ -69,7 +69,7 @@ pub fn monomorphic_fn(ccx: @crate_ctxt,
real_substs.map(|s| ty_to_str(ccx.tcx, *s)), real_substs.map(|s| ty_to_str(ccx.tcx, *s)),
substs.map(|s| ty_to_str(ccx.tcx, *s)), hash_id); substs.map(|s| ty_to_str(ccx.tcx, *s)), hash_id);
match ccx.monomorphized.find(&hash_id) { match ccx.monomorphized.find(hash_id) {
Some(val) => { Some(val) => {
debug!("leaving monomorphic fn %s", debug!("leaving monomorphic fn %s",
ty::item_path_str(ccx.tcx, fn_id)); ty::item_path_str(ccx.tcx, fn_id));
@ -81,7 +81,7 @@ pub fn monomorphic_fn(ccx: @crate_ctxt,
let tpt = ty::lookup_item_type(ccx.tcx, fn_id); let tpt = ty::lookup_item_type(ccx.tcx, fn_id);
let mut llitem_ty = tpt.ty; let mut llitem_ty = tpt.ty;
let map_node = session::expect(ccx.sess, ccx.tcx.items.find(&fn_id.node), let map_node = session::expect(ccx.sess, ccx.tcx.items.find(fn_id.node),
|| fmt!("While monomorphizing %?, couldn't find it in the item map \ || fmt!("While monomorphizing %?, couldn't find it in the item map \
(may have attempted to monomorphize an item defined in a different \ (may have attempted to monomorphize an item defined in a different \
crate?)", fn_id)); crate?)", fn_id));
@ -136,7 +136,7 @@ pub fn monomorphic_fn(ccx: @crate_ctxt,
ccx.stats.n_monos += 1; ccx.stats.n_monos += 1;
let depth = option::get_or_default(ccx.monomorphizing.find(&fn_id), 0u); let depth = option::get_or_default(ccx.monomorphizing.find(fn_id), 0u);
// Random cut-off -- code that needs to instantiate the same function // Random cut-off -- code that needs to instantiate the same function
// recursively more than ten times can probably safely be assumed to be // recursively more than ten times can probably safely be assumed to be
// causing an infinite expansion. // causing an infinite expansion.

View file

@ -54,7 +54,7 @@ pub fn find_reachable(crate_mod: _mod, exp_map2: resolve::ExportMap2,
fn traverse_exports(cx: ctx, mod_id: node_id) -> bool { fn traverse_exports(cx: ctx, mod_id: node_id) -> bool {
let mut found_export = false; let mut found_export = false;
match cx.exp_map2.find(&mod_id) { match cx.exp_map2.find(mod_id) {
Some(ref exp2s) => { Some(ref exp2s) => {
for (*exp2s).each |e2| { for (*exp2s).each |e2| {
found_export = true; found_export = true;
@ -68,7 +68,7 @@ fn traverse_exports(cx: ctx, mod_id: node_id) -> bool {
fn traverse_def_id(cx: ctx, did: def_id) { fn traverse_def_id(cx: ctx, did: def_id) {
if did.crate != local_crate { return; } if did.crate != local_crate { return; }
let n = match cx.tcx.items.find(&did.node) { let n = match cx.tcx.items.find(did.node) {
None => return, // This can happen for self, for example None => return, // This can happen for self, for example
Some(ref n) => (/*bad*/copy *n) Some(ref n) => (/*bad*/copy *n)
}; };
@ -150,7 +150,7 @@ fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt<ctx>) {
match ty.node { match ty.node {
ty_path(p, p_id) => { ty_path(p, p_id) => {
match cx.tcx.def_map.find(&p_id) { match cx.tcx.def_map.find(p_id) {
// Kind of a hack to check this here, but I'm not sure what else // Kind of a hack to check this here, but I'm not sure what else
// to do // to do
Some(def_prim_ty(_)) => { /* do nothing */ } Some(def_prim_ty(_)) => { /* do nothing */ }
@ -169,7 +169,7 @@ fn traverse_inline_body(cx: ctx, body: blk) {
fn traverse_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { fn traverse_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
match e.node { match e.node {
expr_path(_) => { expr_path(_) => {
match cx.tcx.def_map.find(&e.id) { match cx.tcx.def_map.find(e.id) {
Some(d) => { Some(d) => {
traverse_def_id(cx, def_id_of_def(d)); traverse_def_id(cx, def_id_of_def(d));
} }
@ -179,7 +179,7 @@ fn traverse_inline_body(cx: ctx, body: blk) {
} }
} }
expr_field(_, _, _) => { expr_field(_, _, _) => {
match cx.method_map.find(&e.id) { match cx.method_map.find(e.id) {
Some(typeck::method_map_entry { Some(typeck::method_map_entry {
origin: typeck::method_static(did), origin: typeck::method_static(did),
_ _
@ -190,7 +190,7 @@ fn traverse_inline_body(cx: ctx, body: blk) {
} }
} }
expr_method_call(*) => { expr_method_call(*) => {
match cx.method_map.find(&e.id) { match cx.method_map.find(e.id) {
Some(typeck::method_map_entry { Some(typeck::method_map_entry {
origin: typeck::method_static(did), origin: typeck::method_static(did),
_ _

View file

@ -317,7 +317,7 @@ pub fn emit_calls_to_trait_visit_ty(bcx: block,
use syntax::parse::token::special_idents::tydesc; use syntax::parse::token::special_idents::tydesc;
let final = sub_block(bcx, ~"final"); let final = sub_block(bcx, ~"final");
assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&tydesc); assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&tydesc);
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(&tydesc); let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(tydesc);
let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty); let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty);
let r = reflector({ let r = reflector({
visitor_val: visitor_val, visitor_val: visitor_val,

View file

@ -87,7 +87,7 @@ pub fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
debug!("type_of %?: %?", t, ty::get(t)); debug!("type_of %?: %?", t, ty::get(t));
// Check the cache. // Check the cache.
if cx.lltypes.contains_key_ref(&t) { return cx.lltypes.get(&t); } if cx.lltypes.contains_key_ref(&t) { return cx.lltypes.get(t); }
// Replace any typedef'd types with their equivalent non-typedef // Replace any typedef'd types with their equivalent non-typedef
// type. This ensures that all LLVM nominal types that contain // type. This ensures that all LLVM nominal types that contain

View file

@ -53,7 +53,7 @@ pub type ctx = {ccx: @crate_ctxt, uses: ~[mut type_uses]};
pub fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) pub fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint)
-> ~[type_uses] { -> ~[type_uses] {
match ccx.type_use_cache.find(&fn_id) { match ccx.type_use_cache.find(fn_id) {
Some(uses) => return uses, Some(uses) => return uses,
None => () None => ()
} }
@ -87,7 +87,7 @@ pub fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint)
ccx.type_use_cache.insert(fn_id, copy uses); ccx.type_use_cache.insert(fn_id, copy uses);
return uses; return uses;
} }
let map_node = match ccx.tcx.items.find(&fn_id_loc.node) { let map_node = match ccx.tcx.items.find(fn_id_loc.node) {
Some(ref x) => (/*bad*/copy *x), Some(ref x) => (/*bad*/copy *x),
None => ccx.sess.bug(fmt!("type_uses_for: unbound item ID %?", None => ccx.sess.bug(fmt!("type_uses_for: unbound item ID %?",
fn_id_loc)) fn_id_loc))
@ -226,10 +226,10 @@ pub fn node_type_needs(cx: ctx, use_: uint, id: node_id) {
} }
pub fn mark_for_method_call(cx: ctx, e_id: node_id, callee_id: node_id) { pub fn mark_for_method_call(cx: ctx, e_id: node_id, callee_id: node_id) {
do option::iter(&cx.ccx.maps.method_map.find(&e_id)) |mth| { do option::iter(&cx.ccx.maps.method_map.find(e_id)) |mth| {
match mth.origin { match mth.origin {
typeck::method_static(did) => { typeck::method_static(did) => {
do cx.ccx.tcx.node_type_substs.find(&callee_id).iter |ts| { do cx.ccx.tcx.node_type_substs.find(callee_id).iter |ts| {
let type_uses = type_uses_for(cx.ccx, did, ts.len()); let type_uses = type_uses_for(cx.ccx, did, ts.len());
for vec::each2(type_uses, *ts) |uses, subst| { for vec::each2(type_uses, *ts) |uses, subst| {
type_needs(cx, *uses, *subst) type_needs(cx, *uses, *subst)
@ -278,8 +278,8 @@ pub fn mark_for_expr(cx: ctx, e: @expr) {
} }
} }
expr_path(_) => { expr_path(_) => {
do cx.ccx.tcx.node_type_substs.find(&e.id).iter |ts| { do cx.ccx.tcx.node_type_substs.find(e.id).iter |ts| {
let id = ast_util::def_id_of_def(cx.ccx.tcx.def_map.get(&e.id)); let id = ast_util::def_id_of_def(cx.ccx.tcx.def_map.get(e.id));
let uses_for_ts = type_uses_for(cx.ccx, id, ts.len()); let uses_for_ts = type_uses_for(cx.ccx, id, ts.len());
for vec::each2(uses_for_ts, *ts) |uses, subst| { for vec::each2(uses_for_ts, *ts) |uses, subst| {
type_needs(cx, *uses, *subst) type_needs(cx, *uses, *subst)

View file

@ -876,7 +876,7 @@ fn mk_t(cx: ctxt, +st: sty) -> t { mk_t_with_id(cx, st, None) }
// and returns the box as cast to an unsafe ptr (see comments for t above). // and returns the box as cast to an unsafe ptr (see comments for t above).
fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t { fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t {
let key = intern_key { sty: to_unsafe_ptr(&st), o_def_id: o_def_id }; let key = intern_key { sty: to_unsafe_ptr(&st), o_def_id: o_def_id };
match cx.interner.find(&key) { match cx.interner.find(key) {
Some(t) => unsafe { return cast::reinterpret_cast(&t); }, Some(t) => unsafe { return cast::reinterpret_cast(&t); },
_ => () _ => ()
} }
@ -1142,7 +1142,7 @@ pub fn default_arg_mode_for_ty(tcx: ctxt, ty: ty::t) -> ast::rmode {
// Returns the narrowest lifetime enclosing the evaluation of the expression // Returns the narrowest lifetime enclosing the evaluation of the expression
// with id `id`. // with id `id`.
pub fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region { pub fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region {
match cx.region_map.find(&id) { match cx.region_map.find(id) {
Some(encl_scope) => ty::re_scope(encl_scope), Some(encl_scope) => ty::re_scope(encl_scope),
None => ty::re_static None => ty::re_static
} }
@ -1653,7 +1653,7 @@ pub fn type_is_immediate(ty: t) -> bool {
} }
pub fn type_needs_drop(cx: ctxt, ty: t) -> bool { pub fn type_needs_drop(cx: ctxt, ty: t) -> bool {
match cx.needs_drop_cache.find(&ty) { match cx.needs_drop_cache.find(ty) {
Some(result) => return result, Some(result) => return result,
None => {/* fall through */ } None => {/* fall through */ }
} }
@ -1732,7 +1732,7 @@ pub fn type_needs_drop(cx: ctxt, ty: t) -> bool {
// that only contain scalars and shared boxes can avoid unwind // that only contain scalars and shared boxes can avoid unwind
// cleanups. // cleanups.
pub fn type_needs_unwind_cleanup(cx: ctxt, ty: t) -> bool { pub fn type_needs_unwind_cleanup(cx: ctxt, ty: t) -> bool {
match cx.needs_unwind_cleanup_cache.find(&ty) { match cx.needs_unwind_cleanup_cache.find(ty) {
Some(result) => return result, Some(result) => return result,
None => () None => ()
} }
@ -1749,7 +1749,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
encountered_box: bool) -> bool { encountered_box: bool) -> bool {
// Prevent infinite recursion // Prevent infinite recursion
match tycache.find(&ty) { match tycache.find(ty) {
Some(_) => return false, Some(_) => return false,
None => { tycache.insert(ty, ()); } None => { tycache.insert(ty, ()); }
} }
@ -2011,7 +2011,7 @@ pub fn type_kind(cx: ctxt, ty: t) -> Kind {
// If `allow_ty_var` is true, then this is a conservative assumption; we // If `allow_ty_var` is true, then this is a conservative assumption; we
// assume that type variables *do* have all kinds. // assume that type variables *do* have all kinds.
pub fn type_kind_ext(cx: ctxt, ty: t, allow_ty_var: bool) -> Kind { pub fn type_kind_ext(cx: ctxt, ty: t, allow_ty_var: bool) -> Kind {
match cx.kind_cache.find(&ty) { match cx.kind_cache.find(ty) {
Some(result) => return result, Some(result) => return result,
None => {/* fall through */ } None => {/* fall through */ }
} }
@ -2165,7 +2165,7 @@ pub fn type_kind_ext(cx: ctxt, ty: t, allow_ty_var: bool) -> Kind {
// cross-crate inlining code to translate a def-id. // cross-crate inlining code to translate a def-id.
assert p.def_id.crate == ast::local_crate; assert p.def_id.crate == ast::local_crate;
param_bounds_to_kind(cx.ty_param_bounds.get(&p.def_id.node)) param_bounds_to_kind(cx.ty_param_bounds.get(p.def_id.node))
} }
// self is a special type parameter that can only appear in traits; it // self is a special type parameter that can only appear in traits; it
@ -2810,7 +2810,7 @@ pub fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t {
} }
pub fn node_id_to_type_params(cx: ctxt, id: ast::node_id) -> ~[t] { pub fn node_id_to_type_params(cx: ctxt, id: ast::node_id) -> ~[t] {
match cx.node_type_substs.find(&id) { match cx.node_type_substs.find(id) {
None => return ~[], None => return ~[],
Some(ts) => return ts Some(ts) => return ts
} }
@ -2950,7 +2950,7 @@ pub fn expr_ty_adjusted(cx: ctxt, expr: @ast::expr) -> t {
let unadjusted_ty = expr_ty(cx, expr); let unadjusted_ty = expr_ty(cx, expr);
return match cx.adjustments.find(&expr.id) { return match cx.adjustments.find(expr.id) {
None => unadjusted_ty, None => unadjusted_ty,
Some(adj) => { Some(adj) => {
@ -3054,7 +3054,7 @@ pub fn expr_has_ty_params(cx: ctxt, expr: @ast::expr) -> bool {
pub fn method_call_bounds(tcx: ctxt, method_map: typeck::method_map, pub fn method_call_bounds(tcx: ctxt, method_map: typeck::method_map,
id: ast::node_id) id: ast::node_id)
-> Option<@~[param_bounds]> { -> Option<@~[param_bounds]> {
do method_map.find(&id).map |method| { do method_map.find(id).map |method| {
match method.origin { match method.origin {
typeck::method_static(did) => { typeck::method_static(did) => {
// n.b.: When we encode impl methods, the bounds // n.b.: When we encode impl methods, the bounds
@ -3081,7 +3081,7 @@ pub fn method_call_bounds(tcx: ctxt, method_map: typeck::method_map,
} }
fn resolve_expr(tcx: ctxt, expr: @ast::expr) -> ast::def { fn resolve_expr(tcx: ctxt, expr: @ast::expr) -> ast::def {
match tcx.def_map.find(&expr.id) { match tcx.def_map.find(expr.id) {
Some(def) => def, Some(def) => def,
None => { None => {
tcx.sess.span_bug(expr.span, fmt!( tcx.sess.span_bug(expr.span, fmt!(
@ -3335,7 +3335,7 @@ pub fn occurs_check(tcx: ctxt, sp: span, vid: TyVid, rt: t) {
fn canon<T:Copy cmp::Eq>(tbl: HashMap<ast::node_id, ast::inferable<T>>, fn canon<T:Copy cmp::Eq>(tbl: HashMap<ast::node_id, ast::inferable<T>>,
+m0: ast::inferable<T>) -> ast::inferable<T> { +m0: ast::inferable<T>) -> ast::inferable<T> {
match m0 { match m0 {
ast::infer(id) => match tbl.find(&id) { ast::infer(id) => match tbl.find(id) {
None => m0, None => m0,
Some(ref m1) => { Some(ref m1) => {
let cm1 = canon(tbl, (*m1)); let cm1 = canon(tbl, (*m1));
@ -3597,7 +3597,7 @@ pub fn store_trait_methods(cx: ctxt, id: ast::node_id, ms: @~[method]) {
pub fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[ast::ident] { pub fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[ast::ident] {
if is_local(id) { if is_local(id) {
match cx.items.find(&id.node) { match cx.items.find(id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: item_trait(_, _, ref ms), node: item_trait(_, _, ref ms),
_ _
@ -3617,7 +3617,7 @@ pub fn trait_supertraits(cx: ctxt,
id: ast::def_id) id: ast::def_id)
-> @~[InstantiatedTraitRef] { -> @~[InstantiatedTraitRef] {
// Check the cache. // Check the cache.
match cx.supertraits.find(&id) { match cx.supertraits.find(id) {
Some(instantiated_trait_info) => { return instantiated_trait_info; } Some(instantiated_trait_info) => { return instantiated_trait_info; }
None => {} // Continue. None => {} // Continue.
} }
@ -3646,7 +3646,7 @@ pub fn trait_supertraits(cx: ctxt,
} }
pub fn trait_methods(cx: ctxt, id: ast::def_id) -> @~[method] { pub fn trait_methods(cx: ctxt, id: ast::def_id) -> @~[method] {
match cx.trait_method_cache.find(&id) { match cx.trait_method_cache.find(id) {
// Local traits are supposed to have been added explicitly. // Local traits are supposed to have been added explicitly.
Some(ms) => ms, Some(ms) => ms,
_ => { _ => {
@ -3680,7 +3680,7 @@ pub fn impl_traits(cx: ctxt, id: ast::def_id, vstore: vstore) -> ~[t] {
if id.crate == ast::local_crate { if id.crate == ast::local_crate {
debug!("(impl_traits) searching for trait impl %?", id); debug!("(impl_traits) searching for trait impl %?", id);
match cx.items.find(&id.node) { match cx.items.find(id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_impl(_, opt_trait, _, _), node: ast::item_impl(_, opt_trait, _, _),
_}, _},
@ -3716,7 +3716,7 @@ fn struct_ctor_id(cx: ctxt, struct_did: ast::def_id) -> Option<ast::def_id> {
cx.sess.unimpl(~"constructor ID of cross-crate tuple structs"); cx.sess.unimpl(~"constructor ID of cross-crate tuple structs");
} }
match cx.items.find(&struct_did.node) { match cx.items.find(struct_did.node) {
Some(ast_map::node_item(item, _)) => { Some(ast_map::node_item(item, _)) => {
match item.node { match item.node {
ast::item_struct(struct_def, _) => { ast::item_struct(struct_def, _) => {
@ -3782,13 +3782,13 @@ impl DtorKind {
/* If struct_id names a struct with a dtor, return Some(the dtor's id). /* If struct_id names a struct with a dtor, return Some(the dtor's id).
Otherwise return none. */ Otherwise return none. */
pub fn ty_dtor(cx: ctxt, struct_id: def_id) -> DtorKind { pub fn ty_dtor(cx: ctxt, struct_id: def_id) -> DtorKind {
match cx.destructor_for_type.find(&struct_id) { match cx.destructor_for_type.find(struct_id) {
Some(method_def_id) => return TraitDtor(method_def_id), Some(method_def_id) => return TraitDtor(method_def_id),
None => {} // Continue. None => {} // Continue.
} }
if is_local(struct_id) { if is_local(struct_id) {
match cx.items.find(&struct_id.node) { match cx.items.find(struct_id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_struct(@ast::struct_def { dtor: Some(ref dtor), node: ast::item_struct(@ast::struct_def { dtor: Some(ref dtor),
_ }, _ },
@ -3816,7 +3816,7 @@ pub fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path {
if id.crate != ast::local_crate { if id.crate != ast::local_crate {
csearch::get_item_path(cx, id) csearch::get_item_path(cx, id)
} else { } else {
let node = cx.items.get(&id.node); let node = cx.items.get(id.node);
match node { match node {
ast_map::node_item(item, path) => { ast_map::node_item(item, path) => {
let item_elt = match item.node { let item_elt = match item.node {
@ -3880,7 +3880,7 @@ pub fn type_is_empty(cx: ctxt, t: t) -> bool {
} }
pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] { pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] {
match cx.enum_var_cache.find(&id) { match cx.enum_var_cache.find(id) {
Some(variants) => return variants, Some(variants) => return variants,
_ => { /* fallthrough */ } _ => { /* fallthrough */ }
} }
@ -3893,7 +3893,7 @@ pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] {
call eval_const_expr, it should never get called twice for the same call eval_const_expr, it should never get called twice for the same
expr, since check_enum_variants also updates the enum_var_cache expr, since check_enum_variants also updates the enum_var_cache
*/ */
match cx.items.get(&id.node) { match cx.items.get(id.node) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_enum(ref enum_definition, _), node: ast::item_enum(ref enum_definition, _),
_ _
@ -3967,7 +3967,7 @@ pub fn enum_variant_with_id(cx: ctxt,
pub fn lookup_item_type(cx: ctxt, pub fn lookup_item_type(cx: ctxt,
did: ast::def_id) did: ast::def_id)
-> ty_param_bounds_and_ty { -> ty_param_bounds_and_ty {
match cx.tcache.find(&did) { match cx.tcache.find(did) {
Some(tpt) => { Some(tpt) => {
// The item is in this crate. The caller should have added it to the // The item is in this crate. The caller should have added it to the
// type cache already // type cache already
@ -3993,7 +3993,7 @@ pub fn lookup_field_type(tcx: ctxt,
node_id_to_type(tcx, id.node) node_id_to_type(tcx, id.node)
} }
else { else {
match tcx.tcache.find(&id) { match tcx.tcache.find(id) {
Some(tpt) => tpt.ty, Some(tpt) => tpt.ty,
None => { None => {
let tpt = csearch::get_field_type(tcx, struct_id, id); let tpt = csearch::get_field_type(tcx, struct_id, id);
@ -4009,7 +4009,7 @@ pub fn lookup_field_type(tcx: ctxt,
// Fails if the id is not bound to a struct. // Fails if the id is not bound to a struct.
pub fn lookup_struct_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] { pub fn lookup_struct_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] {
if did.crate == ast::local_crate { if did.crate == ast::local_crate {
match cx.items.find(&did.node) { match cx.items.find(did.node) {
Some(ast_map::node_item(i,_)) => { Some(ast_map::node_item(i,_)) => {
match i.node { match i.node {
ast::item_struct(struct_def, _) => { ast::item_struct(struct_def, _) => {
@ -4214,7 +4214,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
} }
} }
match cx.normalized_cache.find(&t) { match cx.normalized_cache.find(t) {
Some(t) => return t, Some(t) => return t,
None => () None => ()
} }
@ -4397,8 +4397,8 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
// Given a trait and a type, returns the impl of that type // Given a trait and a type, returns the impl of that type
pub fn get_impl_id(tcx: ctxt, trait_id: def_id, self_ty: t) -> def_id { pub fn get_impl_id(tcx: ctxt, trait_id: def_id, self_ty: t) -> def_id {
match tcx.trait_impls.find(&trait_id) { match tcx.trait_impls.find(trait_id) {
Some(ty_to_impl) => match ty_to_impl.find(&self_ty) { Some(ty_to_impl) => match ty_to_impl.find(self_ty) {
Some(the_impl) => the_impl.did, Some(the_impl) => the_impl.did,
None => // try autoderef! None => // try autoderef!
match deref(tcx, self_ty, false) { match deref(tcx, self_ty, false) {

View file

@ -211,7 +211,7 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
return ty::mk_evec(tcx, mt, vst); return ty::mk_evec(tcx, mt, vst);
} }
ast::ty_path(path, id) if a_seq_ty.mutbl == ast::m_imm => { ast::ty_path(path, id) if a_seq_ty.mutbl == ast::m_imm => {
match tcx.def_map.find(&id) { match tcx.def_map.find(id) {
Some(ast::def_prim_ty(ast::ty_str)) => { Some(ast::def_prim_ty(ast::ty_str)) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS); check_path_args(tcx, path, NO_TPS | NO_REGIONS);
return ty::mk_estr(tcx, vst); return ty::mk_estr(tcx, vst);
@ -273,7 +273,7 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
let tcx = self.tcx(); let tcx = self.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty) { match tcx.ast_ty_to_ty_cache.find(ast_ty) {
Some(ty::atttce_resolved(ty)) => return ty, Some(ty::atttce_resolved(ty)) => return ty,
Some(ty::atttce_unresolved) => { Some(ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \ tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \
@ -330,7 +330,7 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
ty::mk_fn(tcx, fn_decl) ty::mk_fn(tcx, fn_decl)
} }
ast::ty_path(path, id) => { ast::ty_path(path, id) => {
let a_def = match tcx.def_map.find(&id) { let a_def = match tcx.def_map.find(id) {
None => tcx.sess.span_fatal( None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s", ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))), path_to_str(path, tcx.sess.intr()))),

View file

@ -216,7 +216,7 @@ pub fn check_struct_pat_fields(pcx: pat_ctxt,
// Typecheck each field. // Typecheck each field.
let found_fields = HashMap(); let found_fields = HashMap();
for fields.each |field| { for fields.each |field| {
match field_map.find(&field.ident) { match field_map.find(field.ident) {
Some(index) => { Some(index) => {
let class_field = class_fields[index]; let class_field = class_fields[index];
let field_type = ty::lookup_field_type(tcx, let field_type = ty::lookup_field_type(tcx,
@ -259,7 +259,7 @@ pub fn check_struct_pat(pcx: pat_ctxt, pat_id: ast::node_id, span: span,
let class_fields = ty::lookup_struct_fields(tcx, class_id); let class_fields = ty::lookup_struct_fields(tcx, class_id);
// Check to ensure that the struct is the one specified. // Check to ensure that the struct is the one specified.
match tcx.def_map.find(&pat_id) { match tcx.def_map.find(pat_id) {
Some(ast::def_struct(supplied_def_id)) Some(ast::def_struct(supplied_def_id))
if supplied_def_id == class_id => { if supplied_def_id == class_id => {
// OK. // OK.
@ -300,7 +300,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: pat_ctxt,
let tcx = pcx.fcx.ccx.tcx; let tcx = pcx.fcx.ccx.tcx;
// Find the variant that was specified. // Find the variant that was specified.
match tcx.def_map.find(&pat_id) { match tcx.def_map.find(pat_id) {
Some(ast::def_variant(found_enum_id, variant_id)) Some(ast::def_variant(found_enum_id, variant_id))
if found_enum_id == enum_id => { if found_enum_id == enum_id => {
// Get the struct fields from this struct-like enum variant. // Get the struct fields from this struct-like enum variant.
@ -360,7 +360,7 @@ pub fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
fcx.write_ty(pat.id, b_ty); fcx.write_ty(pat.id, b_ty);
} }
ast::pat_ident(*) if pat_is_const(tcx.def_map, pat) => { ast::pat_ident(*) if pat_is_const(tcx.def_map, pat) => {
let const_did = ast_util::def_id_of_def(tcx.def_map.get(&pat.id)); let const_did = ast_util::def_id_of_def(tcx.def_map.get(pat.id));
let const_tpt = ty::lookup_item_type(tcx, const_did); let const_tpt = ty::lookup_item_type(tcx, const_did);
fcx.write_ty(pat.id, const_tpt.ty); fcx.write_ty(pat.id, const_tpt.ty);
} }
@ -386,7 +386,7 @@ pub fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
} }
} }
let canon_id = pcx.map.get(&ast_util::path_to_ident(name)); let canon_id = pcx.map.get(ast_util::path_to_ident(name));
if canon_id != pat.id { if canon_id != pat.id {
let ct = fcx.local_ty(pat.span, canon_id); let ct = fcx.local_ty(pat.span, canon_id);
demand::eqtype(fcx, pat.span, ct, typ); demand::eqtype(fcx, pat.span, ct, typ);

View file

@ -323,15 +323,14 @@ pub impl LookupContext {
// If the method being called is associated with a trait, then // If the method being called is associated with a trait, then
// find all the impls of that trait. Each of those are // find all the impls of that trait. Each of those are
// candidates. // candidates.
let opt_applicable_traits = self.fcx.ccx.trait_map.find( let opt_applicable_traits = self.fcx.ccx.trait_map.find(self.expr.id);
&self.expr.id);
for opt_applicable_traits.each |applicable_traits| { for opt_applicable_traits.each |applicable_traits| {
for applicable_traits.each |trait_did| { for applicable_traits.each |trait_did| {
let coherence_info = self.fcx.ccx.coherence_info; let coherence_info = self.fcx.ccx.coherence_info;
// Look for explicit implementations. // Look for explicit implementations.
let opt_impl_infos = let opt_impl_infos =
coherence_info.extension_methods.find(trait_did); coherence_info.extension_methods.find(*trait_did);
for opt_impl_infos.each |impl_infos| { for opt_impl_infos.each |impl_infos| {
for impl_infos.each |impl_info| { for impl_infos.each |impl_info| {
self.push_candidates_from_impl( self.push_candidates_from_impl(
@ -340,7 +339,7 @@ pub impl LookupContext {
} }
// Look for default methods. // Look for default methods.
match self.tcx().provided_methods.find(trait_did) { match self.tcx().provided_methods.find(*trait_did) {
Some(methods) => { Some(methods) => {
self.push_candidates_from_provided_methods( self.push_candidates_from_provided_methods(
&self.extension_candidates, self_ty, *trait_did, &self.extension_candidates, self_ty, *trait_did,
@ -361,7 +360,7 @@ pub impl LookupContext {
let tcx = self.tcx(); let tcx = self.tcx();
let mut next_bound_idx = 0; // count only trait bounds let mut next_bound_idx = 0; // count only trait bounds
let bounds = tcx.ty_param_bounds.get(&param_ty.def_id.node); let bounds = tcx.ty_param_bounds.get(param_ty.def_id.node);
for vec::each(*bounds) |bound| { for vec::each(*bounds) |bound| {
let bound_trait_ty = match *bound { let bound_trait_ty = match *bound {
@ -608,7 +607,7 @@ pub impl LookupContext {
fn push_inherent_impl_candidates_for_type(did: def_id) { fn push_inherent_impl_candidates_for_type(did: def_id) {
let opt_impl_infos = let opt_impl_infos =
self.fcx.ccx.coherence_info.inherent_methods.find(&did); self.fcx.ccx.coherence_info.inherent_methods.find(did);
for opt_impl_infos.each |impl_infos| { for opt_impl_infos.each |impl_infos| {
for impl_infos.each |impl_info| { for impl_infos.each |impl_info| {
self.push_candidates_from_impl( self.push_candidates_from_impl(
@ -1233,7 +1232,7 @@ pub impl LookupContext {
fn report_static_candidate(&self, idx: uint, did: def_id) { fn report_static_candidate(&self, idx: uint, did: def_id) {
let span = if did.crate == ast::local_crate { let span = if did.crate == ast::local_crate {
match self.tcx().items.find(&did.node) { match self.tcx().items.find(did.node) {
Some(ast_map::node_method(m, _, _)) => m.span, Some(ast_map::node_method(m, _, _)) => m.span,
_ => die!(fmt!("report_static_candidate: bad item %?", did)) _ => die!(fmt!("report_static_candidate: bad item %?", did))
} }

View file

@ -405,7 +405,7 @@ pub fn check_fn(ccx: @crate_ctxt,
assign(self_info.self_id, Some(self_info.self_ty)); assign(self_info.self_id, Some(self_info.self_ty));
debug!("self is assigned to %s", debug!("self is assigned to %s",
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&self_info.self_id))); fcx.inh.locals.get(self_info.self_id)));
} }
// Add formal parameters. // Add formal parameters.
@ -438,7 +438,7 @@ pub fn check_fn(ccx: @crate_ctxt,
debug!("Local variable %s is assigned type %s", debug!("Local variable %s is assigned type %s",
fcx.pat_to_str(local.node.pat), fcx.pat_to_str(local.node.pat),
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&local.node.id))); fcx.inh.locals.get(local.node.id)));
visit::visit_local(local, e, v); visit::visit_local(local, e, v);
}; };
@ -451,7 +451,7 @@ pub fn check_fn(ccx: @crate_ctxt,
debug!("Pattern binding %s is assigned to %s", debug!("Pattern binding %s is assigned to %s",
tcx.sess.str_of(path.idents[0]), tcx.sess.str_of(path.idents[0]),
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get(&p.id))); fcx.inh.locals.get(p.id)));
} }
_ => {} _ => {}
} }
@ -501,7 +501,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
for fields.each |p| { for fields.each |p| {
let (id, sp) = *p; let (id, sp) = *p;
match field_names.find(&id) { match field_names.find(id) {
Some(orig_sp) => { Some(orig_sp) => {
tcx.sess.span_err(sp, fmt!("Duplicate field \ tcx.sess.span_err(sp, fmt!("Duplicate field \
name %s in record type declaration", name %s in record type declaration",
@ -558,7 +558,7 @@ pub fn check_item(ccx: @crate_ctxt, it: @ast::item) {
check_bare_fn(ccx, decl, (*body), it.id, None); check_bare_fn(ccx, decl, (*body), it.id, None);
} }
ast::item_impl(_, _, ty, ms) => { ast::item_impl(_, _, ty, ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id); let rp = ccx.tcx.region_paramd_items.find(it.id);
debug!("item_impl %s with id %d rp %?", debug!("item_impl %s with id %d rp %?",
ccx.tcx.sess.str_of(it.ident), it.id, rp); ccx.tcx.sess.str_of(it.ident), it.id, rp);
let self_ty = ccx.to_ty(rscope::type_rscope(rp), ty); let self_ty = ccx.to_ty(rscope::type_rscope(rp), ty);
@ -664,7 +664,7 @@ pub impl @fn_ctxt {
fn tag() -> ~str { fmt!("%x", ptr::addr_of(&(*self)) as uint) } fn tag() -> ~str { fmt!("%x", ptr::addr_of(&(*self)) as uint) }
fn local_ty(span: span, nid: ast::node_id) -> ty::t { fn local_ty(span: span, nid: ast::node_id) -> ty::t {
match self.inh.locals.find(&nid) { match self.inh.locals.find(nid) {
Some(t) => t, Some(t) => t,
None => { None => {
self.tcx().sess.span_bug( self.tcx().sess.span_bug(
@ -740,7 +740,7 @@ pub impl @fn_ctxt {
} }
fn expr_ty(ex: @ast::expr) -> ty::t { fn expr_ty(ex: @ast::expr) -> ty::t {
match self.inh.node_types.find(&ex.id) { match self.inh.node_types.find(ex.id) {
Some(t) => t, Some(t) => t,
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
@ -750,7 +750,7 @@ pub impl @fn_ctxt {
} }
} }
fn node_ty(id: ast::node_id) -> ty::t { fn node_ty(id: ast::node_id) -> ty::t {
match self.inh.node_types.find(&id) { match self.inh.node_types.find(id) {
Some(t) => t, Some(t) => t,
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
@ -763,7 +763,7 @@ pub impl @fn_ctxt {
} }
} }
fn node_ty_substs(id: ast::node_id) -> ty::substs { fn node_ty_substs(id: ast::node_id) -> ty::substs {
match self.inh.node_type_substs.find(&id) { match self.inh.node_type_substs.find(id) {
Some(ref ts) => (/*bad*/copy *ts), Some(ref ts) => (/*bad*/copy *ts),
None => { None => {
self.tcx().sess.bug( self.tcx().sess.bug(
@ -776,7 +776,7 @@ pub impl @fn_ctxt {
} }
} }
fn opt_node_ty_substs(id: ast::node_id) -> Option<ty::substs> { fn opt_node_ty_substs(id: ast::node_id) -> Option<ty::substs> {
self.inh.node_type_substs.find(&id) self.inh.node_type_substs.find(id)
} }
@ -1001,8 +1001,8 @@ pub fn impl_self_ty(vcx: &VtableContext,
let tcx = vcx.tcx(); let tcx = vcx.tcx();
let {n_tps, region_param, raw_ty} = if did.crate == ast::local_crate { let {n_tps, region_param, raw_ty} = if did.crate == ast::local_crate {
let region_param = tcx.region_paramd_items.find(&did.node); let region_param = tcx.region_paramd_items.find(did.node);
match tcx.items.find(&did.node) { match tcx.items.find(did.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_impl(ref ts, _, st, _), node: ast::item_impl(ref ts, _, st, _),
_ _
@ -1698,7 +1698,7 @@ pub fn check_expr_with_unifier(fcx: @fn_ctxt,
// Typecheck each field. // Typecheck each field.
for ast_fields.each |field| { for ast_fields.each |field| {
match class_field_map.find(&field.node.ident) { match class_field_map.find(field.node.ident) {
None => { None => {
tcx.sess.span_err( tcx.sess.span_err(
field.span, field.span,
@ -1734,7 +1734,7 @@ pub fn check_expr_with_unifier(fcx: @fn_ctxt,
let mut missing_fields = ~[]; let mut missing_fields = ~[];
for field_types.each |class_field| { for field_types.each |class_field| {
let name = class_field.ident; let name = class_field.ident;
let (_, seen) = class_field_map.get(&name); let (_, seen) = class_field_map.get(name);
if !seen { if !seen {
missing_fields.push( missing_fields.push(
~"`" + tcx.sess.str_of(name) + ~"`"); ~"`" + tcx.sess.str_of(name) + ~"`");
@ -1769,8 +1769,8 @@ pub fn check_expr_with_unifier(fcx: @fn_ctxt,
let type_parameter_count, region_parameterized, raw_type; let type_parameter_count, region_parameterized, raw_type;
if class_id.crate == ast::local_crate { if class_id.crate == ast::local_crate {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&class_id.node); tcx.region_paramd_items.find(class_id.node);
match tcx.items.find(&class_id.node) { match tcx.items.find(class_id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_struct(_, ref type_parameters), node: ast::item_struct(_, ref type_parameters),
_ _
@ -1852,8 +1852,8 @@ pub fn check_expr_with_unifier(fcx: @fn_ctxt,
let type_parameter_count, region_parameterized, raw_type; let type_parameter_count, region_parameterized, raw_type;
if enum_id.crate == ast::local_crate { if enum_id.crate == ast::local_crate {
region_parameterized = region_parameterized =
tcx.region_paramd_items.find(&enum_id.node); tcx.region_paramd_items.find(enum_id.node);
match tcx.items.find(&enum_id.node) { match tcx.items.find(enum_id.node) {
Some(ast_map::node_item(@ast::item { Some(ast_map::node_item(@ast::item {
node: ast::item_enum(_, ref type_parameters), node: ast::item_enum(_, ref type_parameters),
_ _
@ -2435,7 +2435,7 @@ pub fn check_expr_with_unifier(fcx: @fn_ctxt,
} }
ast::expr_struct(path, ref fields, base_expr) => { ast::expr_struct(path, ref fields, base_expr) => {
// Resolve the path. // Resolve the path.
match tcx.def_map.find(&id) { match tcx.def_map.find(id) {
Some(ast::def_struct(type_def_id)) => { Some(ast::def_struct(type_def_id)) => {
check_struct_constructor(fcx, id, expr.span, type_def_id, check_struct_constructor(fcx, id, expr.span, type_def_id,
(/*bad*/copy *fields), base_expr); (/*bad*/copy *fields), base_expr);
@ -2532,7 +2532,7 @@ pub fn check_decl_local(fcx: @fn_ctxt, local: @ast::local) -> bool {
} }
let region = let region =
ty::re_scope(tcx.region_map.get(&local.node.id)); ty::re_scope(tcx.region_map.get(local.node.id));
let pcx = pat_ctxt { let pcx = pat_ctxt {
fcx: fcx, fcx: fcx,
map: pat_id_map(tcx.def_map, local.node.pat), map: pat_id_map(tcx.def_map, local.node.pat),
@ -2633,7 +2633,7 @@ pub fn check_const(ccx: @crate_ctxt,
id: ast::node_id) { id: ast::node_id) {
let rty = ty::node_id_to_type(ccx.tcx, id); let rty = ty::node_id_to_type(ccx.tcx, id);
let fcx = blank_fn_ctxt(ccx, rty, e.id); let fcx = blank_fn_ctxt(ccx, rty, e.id);
let declty = fcx.ccx.tcx.tcache.get(&local_def(id)).ty; let declty = fcx.ccx.tcx.tcache.get(local_def(id)).ty;
check_const_with_ty(fcx, _sp, e, declty); check_const_with_ty(fcx, _sp, e, declty);
} }
@ -2984,7 +2984,7 @@ pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: ast::blk) -> bool {
(block_query(b, |e| { (block_query(b, |e| {
match e.node { match e.node {
ast::expr_break(Some(_)) => ast::expr_break(Some(_)) =>
match cx.def_map.find(&e.id) { match cx.def_map.find(e.id) {
Some(ast::def_label(loop_id)) if id == loop_id => true, Some(ast::def_label(loop_id)) if id == loop_id => true,
_ => false, _ => false,
}, },
@ -3080,8 +3080,8 @@ pub fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) {
let ty_visitor_name = tcx.sess.ident_of(~"TyVisitor"); let ty_visitor_name = tcx.sess.ident_of(~"TyVisitor");
assert tcx.intrinsic_defs.contains_key_ref(&tydesc_name); assert tcx.intrinsic_defs.contains_key_ref(&tydesc_name);
assert ccx.tcx.intrinsic_defs.contains_key_ref(&ty_visitor_name); assert ccx.tcx.intrinsic_defs.contains_key_ref(&ty_visitor_name);
let (_, tydesc_ty) = tcx.intrinsic_defs.get(&tydesc_name); let (_, tydesc_ty) = tcx.intrinsic_defs.get(tydesc_name);
let (_, visitor_trait) = tcx.intrinsic_defs.get(&ty_visitor_name); let (_, visitor_trait) = tcx.intrinsic_defs.get(ty_visitor_name);
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {ty: tydesc_ty, let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {ty: tydesc_ty,
mutbl: ast::m_imm}); mutbl: ast::m_imm});
(0u, ~[arg(ast::by_val, td_ptr), (0u, ~[arg(ast::by_val, td_ptr),

View file

@ -181,7 +181,7 @@ pub fn visit_block(b: ast::blk, &&rcx: @rcx, v: rvt) {
pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) { pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) {
debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr)); debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| { for rcx.fcx.inh.adjustments.find(expr.id).each |adjustment| {
for adjustment.autoref.each |autoref| { for adjustment.autoref.each |autoref| {
guarantor::for_autoref(rcx, expr, *adjustment, autoref); guarantor::for_autoref(rcx, expr, *adjustment, autoref);
} }
@ -327,7 +327,7 @@ pub fn constrain_auto_ref(rcx: @rcx, expr: @ast::expr) {
debug!("constrain_auto_ref(expr=%s)", rcx.fcx.expr_to_str(expr)); debug!("constrain_auto_ref(expr=%s)", rcx.fcx.expr_to_str(expr));
let adjustment = rcx.fcx.inh.adjustments.find(&expr.id); let adjustment = rcx.fcx.inh.adjustments.find(expr.id);
let region = match adjustment { let region = match adjustment {
Some(@ty::AutoAdjustment { autoref: Some(ref auto_ref), _ }) => { Some(@ty::AutoAdjustment { autoref: Some(ref auto_ref), _ }) => {
auto_ref.region auto_ref.region
@ -725,7 +725,7 @@ pub mod guarantor {
let mut expr_ct = categorize_unadjusted(rcx, expr); let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!("before adjustments, cat=%?", expr_ct.cat); debug!("before adjustments, cat=%?", expr_ct.cat);
for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| { for rcx.fcx.inh.adjustments.find(expr.id).each |adjustment| {
debug!("adjustment=%?", adjustment); debug!("adjustment=%?", adjustment);
expr_ct = apply_autoderefs( expr_ct = apply_autoderefs(

View file

@ -194,7 +194,7 @@ pub fn lookup_vtable(vcx: &VtableContext,
match ty::get(ty).sty { match ty::get(ty).sty {
ty::ty_param(param_ty {idx: n, def_id: did}) => { ty::ty_param(param_ty {idx: n, def_id: did}) => {
let mut n_bound = 0; let mut n_bound = 0;
let bounds = tcx.ty_param_bounds.get(&did.node); let bounds = tcx.ty_param_bounds.get(did.node);
for ty::iter_bound_traits_and_supertraits( for ty::iter_bound_traits_and_supertraits(
tcx, bounds) |ity| { tcx, bounds) |ity| {
debug!("checking bounds trait %?", debug!("checking bounds trait %?",
@ -255,7 +255,7 @@ pub fn lookup_vtable(vcx: &VtableContext,
let mut impls_seen = HashMap(); let mut impls_seen = HashMap();
match vcx.ccx.coherence_info.extension_methods.find(&trait_id) { match vcx.ccx.coherence_info.extension_methods.find(trait_id) {
None => { None => {
// Nothing found. Continue. // Nothing found. Continue.
} }
@ -525,7 +525,7 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
ast::expr_path(*) => { ast::expr_path(*) => {
match fcx.opt_node_ty_substs(ex.id) { match fcx.opt_node_ty_substs(ex.id) {
Some(ref substs) => { Some(ref substs) => {
let def = cx.tcx.def_map.get(&ex.id); let def = cx.tcx.def_map.get(ex.id);
let did = ast_util::def_id_of_def(def); let did = ast_util::def_id_of_def(def);
let item_ty = ty::lookup_item_type(cx.tcx, did); let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def %? %?, %?, %?", ex.id, did, def, debug!("early resolve expr: def %? %?, %?, %?", ex.id, did, def,

View file

@ -55,7 +55,7 @@ fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t)
fn resolve_method_map_entry(fcx: @fn_ctxt, sp: span, id: ast::node_id) fn resolve_method_map_entry(fcx: @fn_ctxt, sp: span, id: ast::node_id)
{ {
// Resolve any method map entry // Resolve any method map entry
match fcx.ccx.method_map.find(&id) { match fcx.ccx.method_map.find(id) {
None => {} None => {}
Some(ref mme) => { Some(ref mme) => {
for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| { for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| {
@ -77,7 +77,7 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
let fcx = wbcx.fcx, tcx = fcx.ccx.tcx; let fcx = wbcx.fcx, tcx = fcx.ccx.tcx;
// Resolve any borrowings for the node with id `id` // Resolve any borrowings for the node with id `id`
match fcx.inh.adjustments.find(&id) { match fcx.inh.adjustments.find(id) {
None => (), None => (),
Some(adj) => { Some(adj) => {
let resolved_autoref = match adj.autoref { let resolved_autoref = match adj.autoref {

View file

@ -236,8 +236,7 @@ pub impl CoherenceChecker {
} }
fn check_implementation(item: @item, associated_traits: ~[@trait_ref]) { fn check_implementation(item: @item, associated_traits: ~[@trait_ref]) {
let self_type = self.crate_context.tcx.tcache.get( let self_type = self.crate_context.tcx.tcache.get(local_def(item.id));
&local_def(item.id));
// If there are no traits, then this implementation must have a // If there are no traits, then this implementation must have a
// base type. // base type.
@ -355,7 +354,7 @@ pub impl CoherenceChecker {
}; };
let pmm = self.crate_context.tcx.provided_methods; let pmm = self.crate_context.tcx.provided_methods;
match pmm.find(&local_def(impl_id)) { match pmm.find(local_def(impl_id)) {
Some(mis) => { Some(mis) => {
// If the trait already has an entry in the // If the trait already has an entry in the
// provided_methods_map, we just need to add this // provided_methods_map, we just need to add this
@ -383,7 +382,7 @@ pub impl CoherenceChecker {
fn add_inherent_method(base_def_id: def_id, implementation: @Impl) { fn add_inherent_method(base_def_id: def_id, implementation: @Impl) {
let implementation_list; let implementation_list;
match self.crate_context.coherence_info.inherent_methods match self.crate_context.coherence_info.inherent_methods
.find(&base_def_id) { .find(base_def_id) {
None => { None => {
implementation_list = @DVec(); implementation_list = @DVec();
self.crate_context.coherence_info.inherent_methods self.crate_context.coherence_info.inherent_methods
@ -400,7 +399,7 @@ pub impl CoherenceChecker {
fn add_trait_method(trait_id: def_id, implementation: @Impl) { fn add_trait_method(trait_id: def_id, implementation: @Impl) {
let implementation_list; let implementation_list;
match self.crate_context.coherence_info.extension_methods match self.crate_context.coherence_info.extension_methods
.find(&trait_id) { .find(trait_id) {
None => { None => {
implementation_list = @DVec(); implementation_list = @DVec();
self.crate_context.coherence_info.extension_methods self.crate_context.coherence_info.extension_methods
@ -463,7 +462,7 @@ pub impl CoherenceChecker {
debug!("Adding impl %? of %? for %s", debug!("Adding impl %? of %? for %s",
the_impl.did, trait_t, the_impl.did, trait_t,
ty_to_str(self.crate_context.tcx, self_t)); ty_to_str(self.crate_context.tcx, self_t));
match self.crate_context.tcx.trait_impls.find(&trait_t) { match self.crate_context.tcx.trait_impls.find(trait_t) {
None => { None => {
let m = HashMap(); let m = HashMap();
m.insert(self_t, the_impl); m.insert(self_t, the_impl);
@ -481,7 +480,7 @@ pub impl CoherenceChecker {
let coherence_info = &self.crate_context.coherence_info; let coherence_info = &self.crate_context.coherence_info;
let extension_methods = &coherence_info.extension_methods; let extension_methods = &coherence_info.extension_methods;
match extension_methods.find(&trait_def_id) { match extension_methods.find(trait_def_id) {
Some(impls) => { Some(impls) => {
for uint::range(0, impls.len()) |i| { for uint::range(0, impls.len()) |i| {
f(impls[i]); f(impls[i]);
@ -613,7 +612,7 @@ pub impl CoherenceChecker {
fn get_self_type_for_implementation(implementation: @Impl) fn get_self_type_for_implementation(implementation: @Impl)
-> ty_param_bounds_and_ty { -> ty_param_bounds_and_ty {
return self.crate_context.tcx.tcache.get(&implementation.did); return self.crate_context.tcx.tcache.get(implementation.did);
} }
// Privileged scope checking // Privileged scope checking
@ -628,7 +627,7 @@ pub impl CoherenceChecker {
item_impl(_, opt_trait, _, _) => { item_impl(_, opt_trait, _, _) => {
let mut ok = false; let mut ok = false;
match self.base_type_def_ids.find( match self.base_type_def_ids.find(
&local_def(item.id)) { local_def(item.id)) {
None => { None => {
// Nothing to do. // Nothing to do.
@ -702,7 +701,7 @@ pub impl CoherenceChecker {
fn trait_ref_to_trait_def_id(trait_ref: @trait_ref) -> def_id { fn trait_ref_to_trait_def_id(trait_ref: @trait_ref) -> def_id {
let def_map = self.crate_context.tcx.def_map; let def_map = self.crate_context.tcx.def_map;
let trait_def = def_map.get(&trait_ref.ref_id); let trait_def = def_map.get(trait_ref.ref_id);
let trait_id = def_id_of_def(trait_def); let trait_id = def_id_of_def(trait_def);
return trait_id; return trait_id;
} }
@ -775,7 +774,7 @@ pub impl CoherenceChecker {
match self.crate_context.tcx match self.crate_context.tcx
.provided_methods .provided_methods
.find(&local_def(item.id)) { .find(local_def(item.id)) {
None => { None => {
debug!("(creating impl) trait with node_id `%d` \ debug!("(creating impl) trait with node_id `%d` \
has no provided methods", trait_did.node); has no provided methods", trait_did.node);
@ -809,7 +808,7 @@ pub impl CoherenceChecker {
fn span_of_impl(implementation: @Impl) -> span { fn span_of_impl(implementation: @Impl) -> span {
assert implementation.did.crate == local_crate; assert implementation.did.crate == local_crate;
match self.crate_context.tcx.items.find(&implementation.did.node) { match self.crate_context.tcx.items.find(implementation.did.node) {
Some(node_item(item, _)) => { Some(node_item(item, _)) => {
return item.span; return item.span;
} }
@ -837,7 +836,7 @@ pub impl CoherenceChecker {
// Make sure we don't visit the same implementation // Make sure we don't visit the same implementation
// multiple times. // multiple times.
match impls_seen.find(&implementation.did) { match impls_seen.find(implementation.did) {
None => { None => {
// Good. Continue. // Good. Continue.
impls_seen.insert(implementation.did, ()); impls_seen.insert(implementation.did, ());
@ -989,7 +988,7 @@ pub impl CoherenceChecker {
let coherence_info = &self.crate_context.coherence_info; let coherence_info = &self.crate_context.coherence_info;
let tcx = self.crate_context.tcx; let tcx = self.crate_context.tcx;
let drop_trait = tcx.lang_items.drop_trait(); let drop_trait = tcx.lang_items.drop_trait();
let impls_opt = coherence_info.extension_methods.find(&drop_trait); let impls_opt = coherence_info.extension_methods.find(drop_trait);
let impls; let impls;
match impls_opt { match impls_opt {
@ -1014,7 +1013,7 @@ pub impl CoherenceChecker {
_ => { _ => {
// Destructors only work on nominal types. // Destructors only work on nominal types.
if impl_info.did.crate == ast::local_crate { if impl_info.did.crate == ast::local_crate {
match tcx.items.find(&impl_info.did.node) { match tcx.items.find(impl_info.did.node) {
Some(ast_map::node_item(@ref item, _)) => { Some(ast_map::node_item(@ref item, _)) => {
tcx.sess.span_err((*item).span, tcx.sess.span_err((*item).span,
~"the Drop trait may only \ ~"the Drop trait may only \

View file

@ -130,7 +130,7 @@ pub impl @crate_ctxt: ast_conv {
if id.crate != ast::local_crate { if id.crate != ast::local_crate {
csearch::get_type(self.tcx, id) csearch::get_type(self.tcx, id)
} else { } else {
match self.tcx.items.find(&id.node) { match self.tcx.items.find(id.node) {
Some(ast_map::node_item(item, _)) => { Some(ast_map::node_item(item, _)) => {
ty_of_item(self, item) ty_of_item(self, item)
} }
@ -290,8 +290,8 @@ pub fn ensure_trait_methods(ccx: @crate_ctxt,
let tcx = ccx.tcx; let tcx = ccx.tcx;
let region_paramd = tcx.region_paramd_items.find(&id); let region_paramd = tcx.region_paramd_items.find(id);
match tcx.items.get(&id) { match tcx.items.get(id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_trait(ref params, _, ref ms), node: ast::item_trait(ref params, _, ref ms),
_ _
@ -521,7 +521,7 @@ pub fn check_methods_against_trait(ccx: @crate_ctxt,
// trait ref. Otherwise, we will potentially overwrite the types of // trait ref. Otherwise, we will potentially overwrite the types of
// the methods within the trait with bogus results. (See issue #3903.) // the methods within the trait with bogus results. (See issue #3903.)
match tcx.items.find(&did.node) { match tcx.items.find(did.node) {
Some(ast_map::node_item(item, _)) => { Some(ast_map::node_item(item, _)) => {
let tpt = ty_of_item(ccx, item); let tpt = ty_of_item(ccx, item);
ensure_trait_methods(ccx, did.node, tpt.ty); ensure_trait_methods(ccx, did.node, tpt.ty);
@ -616,7 +616,7 @@ pub fn ensure_no_ty_param_bounds(ccx: @crate_ctxt,
pub fn convert(ccx: @crate_ctxt, it: @ast::item) { pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(it.id);
debug!("convert: item %s with id %d rp %?", debug!("convert: item %s with id %d rp %?",
tcx.sess.str_of(it.ident), it.id, rp); tcx.sess.str_of(it.ident), it.id, rp);
match /*bad*/copy it.node { match /*bad*/copy it.node {
@ -740,7 +740,7 @@ pub fn convert_struct(ccx: @crate_ctxt,
arg { arg {
mode: ast::expl(ast::by_copy), mode: ast::expl(ast::by_copy),
ty: ccx.tcx.tcache.get ty: ccx.tcx.tcache.get
(&local_def(field.node.id)).ty (local_def(field.node.id)).ty
} }
}, },
output: selfty output: selfty
@ -830,11 +830,11 @@ pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
let def_id = local_def(it.id); let def_id = local_def(it.id);
let tcx = ccx.tcx; let tcx = ccx.tcx;
match tcx.tcache.find(&def_id) { match tcx.tcache.find(def_id) {
Some(tpt) => return tpt, Some(tpt) => return tpt,
_ => {} _ => {}
} }
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(it.id);
match /*bad*/copy it.node { match /*bad*/copy it.node {
ast::item_const(t, _) => { ast::item_const(t, _) => {
let typ = ccx.to_ty(empty_rscope, t); let typ = ccx.to_ty(empty_rscope, t);
@ -859,12 +859,12 @@ pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
return tpt; return tpt;
} }
ast::item_ty(t, tps) => { ast::item_ty(t, tps) => {
match tcx.tcache.find(&local_def(it.id)) { match tcx.tcache.find(local_def(it.id)) {
Some(tpt) => return tpt, Some(tpt) => return tpt,
None => { } None => { }
} }
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(it.id);
let tpt = { let tpt = {
let ty = { let ty = {
let t0 = ccx.to_ty(type_rscope(rp), t); let t0 = ccx.to_ty(type_rscope(rp), t);
@ -978,7 +978,7 @@ pub fn compute_bounds(ccx: @crate_ctxt,
pub fn ty_param_bounds(ccx: @crate_ctxt, pub fn ty_param_bounds(ccx: @crate_ctxt,
params: ~[ast::ty_param]) -> @~[ty::param_bounds] { params: ~[ast::ty_param]) -> @~[ty::param_bounds] {
@do params.map |param| { @do params.map |param| {
match ccx.tcx.ty_param_bounds.find(&param.id) { match ccx.tcx.ty_param_bounds.find(param.id) {
Some(bs) => bs, Some(bs) => bs,
None => { None => {
let bounds = compute_bounds(ccx, param.bounds); let bounds = compute_bounds(ccx, param.bounds);

View file

@ -912,7 +912,7 @@ pub impl RegionVarBindings {
-> cres<Region> { -> cres<Region> {
let vars = TwoRegions { a: a, b: b }; let vars = TwoRegions { a: a, b: b };
match combines.find(&vars) { match combines.find(vars) {
Some(c) => Ok(re_infer(ReVar(c))), Some(c) => Ok(re_infer(ReVar(c))),
None => { None => {
let c = self.new_region_var(span); let c = self.new_region_var(span);

View file

@ -225,7 +225,7 @@ pub fn write_substs_to_tcx(tcx: ty::ctxt,
} }
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def { pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) { match tcx.def_map.find(id) {
Some(x) => x, Some(x) => x,
_ => { _ => {
tcx.sess.span_fatal(sp, ~"internal error looking up a definition") tcx.sess.span_fatal(sp, ~"internal error looking up a definition")
@ -325,7 +325,7 @@ fn check_main_fn_ty(ccx: @crate_ctxt,
let main_t = ty::node_id_to_type(tcx, main_id); let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty { match ty::get(main_t).sty {
ty::ty_fn(ref fn_ty) => { ty::ty_fn(ref fn_ty) => {
match tcx.items.find(&main_id) { match tcx.items.find(main_id) {
Some(ast_map::node_item(it,_)) => { Some(ast_map::node_item(it,_)) => {
match it.node { match it.node {
ast::item_fn(_, _, ref ps, _) ast::item_fn(_, _, ref ps, _)

View file

@ -67,7 +67,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
-> (~str, Option<span>) { -> (~str, Option<span>) {
return match region { return match region {
re_scope(node_id) => { re_scope(node_id) => {
match cx.items.find(&node_id) { match cx.items.find(node_id) {
Some(ast_map::node_block(ref blk)) => { Some(ast_map::node_block(ref blk)) => {
explain_span(cx, "block", (*blk).span) explain_span(cx, "block", (*blk).span)
} }
@ -105,7 +105,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
bound_region_to_str(cx, br)) bound_region_to_str(cx, br))
}; };
match cx.items.find(&id) { match cx.items.find(id) {
Some(ast_map::node_block(ref blk)) => { Some(ast_map::node_block(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", (*blk).span); let (msg, opt_span) = explain_span(cx, "block", (*blk).span);
(fmt!("%s %s", prefix, msg), opt_span) (fmt!("%s %s", prefix, msg), opt_span)
@ -154,7 +154,7 @@ pub fn bound_region_to_str_adorned(cx: ctxt, prefix: &str,
} }
pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str {
match cx.items.find(&node_id) { match cx.items.find(node_id) {
Some(ast_map::node_block(ref blk)) => { Some(ast_map::node_block(ref blk)) => {
fmt!("<block at %s>", fmt!("<block at %s>",
cx.sess.codemap.span_to_str((*blk).span)) cx.sess.codemap.span_to_str((*blk).span))

View file

@ -116,7 +116,7 @@ fn parse_item_attrs<T:Owned>(
id: doc::AstId, id: doc::AstId,
parse_attrs: fn~(a: ~[ast::attribute]) -> T) -> T { parse_attrs: fn~(a: ~[ast::attribute]) -> T) -> T {
do astsrv::exec(srv) |move parse_attrs, ctxt| { do astsrv::exec(srv) |move parse_attrs, ctxt| {
let attrs = match ctxt.ast_map.get(&id) { let attrs = match ctxt.ast_map.get(id) {
ast_map::node_item(item, _) => copy item.attrs, ast_map::node_item(item, _) => copy item.attrs,
ast_map::node_foreign_item(item, _, _) => copy item.attrs, ast_map::node_foreign_item(item, _, _) => copy item.attrs,
_ => die!(~"parse_item_attrs: not an item") _ => die!(~"parse_item_attrs: not an item")
@ -170,7 +170,7 @@ fn fold_enum(
let desc = { let desc = {
let variant = copy variant; let variant = copy variant;
do astsrv::exec(srv.clone()) |ctxt| { do astsrv::exec(srv.clone()) |ctxt| {
match ctxt.ast_map.get(&doc_id) { match ctxt.ast_map.get(doc_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_enum(ref enum_definition, _), _ node: ast::item_enum(ref enum_definition, _), _
}, _) => { }, _) => {
@ -234,7 +234,7 @@ fn merge_method_attrs(
// Create an assoc list from method name to attributes // Create an assoc list from method name to attributes
let attrs: ~[(~str, Option<~str>)] = do astsrv::exec(srv) |ctxt| { let attrs: ~[(~str, Option<~str>)] = do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&item_id) { match ctxt.ast_map.get(item_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_trait(_, _, ref methods), _ node: ast::item_trait(_, _, ref methods), _
}, _) => { }, _) => {

View file

@ -56,7 +56,7 @@ fn is_hidden(srv: astsrv::Srv, doc: doc::ItemDoc) -> bool {
let id = doc.id; let id = doc.id;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
let attrs = match ctxt.ast_map.get(&id) { let attrs = match ctxt.ast_map.get(id) {
ast_map::node_item(item, _) => copy item.attrs, ast_map::node_item(item, _) => copy item.attrs,
_ => ~[] _ => ~[]
}; };

View file

@ -59,7 +59,7 @@ fn is_visible(srv: astsrv::Srv, doc: doc::ItemDoc) -> bool {
let id = doc.id; let id = doc.id;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&id) { match ctxt.ast_map.get(id) {
ast_map::node_item(item, _) => { ast_map::node_item(item, _) => {
item.vis == ast::public item.vis == ast::public
} }

View file

@ -68,7 +68,7 @@ fn fold_fn(
fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> { fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> {
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&fn_id) { match ctxt.ast_map.get(fn_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
ident: ident, ident: ident,
node: ast::item_fn(ref decl, _, ref tys, _), _ node: ast::item_fn(ref decl, _, ref tys, _), _
@ -107,7 +107,7 @@ fn fold_const(
sig: Some({ sig: Some({
let doc = copy doc; let doc = copy doc;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&doc.id()) { match ctxt.ast_map.get(doc.id()) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_const(ty, _), _ node: ast::item_const(ty, _), _
}, _) => { }, _) => {
@ -138,7 +138,7 @@ fn fold_enum(
let sig = { let sig = {
let variant = copy *variant; let variant = copy *variant;
do astsrv::exec(srv.clone()) |copy variant, ctxt| { do astsrv::exec(srv.clone()) |copy variant, ctxt| {
match ctxt.ast_map.get(&doc_id) { match ctxt.ast_map.get(doc_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_enum(ref enum_definition, _), _ node: ast::item_enum(ref enum_definition, _), _
}, _) => { }, _) => {
@ -199,7 +199,7 @@ fn get_method_sig(
method_name: ~str method_name: ~str
) -> Option<~str> { ) -> Option<~str> {
do astsrv::exec(srv) |copy method_name, ctxt| { do astsrv::exec(srv) |copy method_name, ctxt| {
match ctxt.ast_map.get(&item_id) { match ctxt.ast_map.get(item_id) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_trait(_, _, ref methods), _ node: ast::item_trait(_, _, ref methods), _
}, _) => { }, _) => {
@ -271,7 +271,7 @@ fn fold_impl(
let (trait_types, self_ty) = { let (trait_types, self_ty) = {
let doc = copy doc; let doc = copy doc;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&doc.id()) { match ctxt.ast_map.get(doc.id()) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
node: ast::item_impl(_, opt_trait_type, self_ty, _), _ node: ast::item_impl(_, opt_trait_type, self_ty, _), _
}, _) => { }, _) => {
@ -331,7 +331,7 @@ fn fold_type(
sig: { sig: {
let doc = copy doc; let doc = copy doc;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&doc.id()) { match ctxt.ast_map.get(doc.id()) {
ast_map::node_item(@ast::item { ast_map::node_item(@ast::item {
ident: ident, ident: ident,
node: ast::item_ty(ty, ref params), _ node: ast::item_ty(ty, ref params), _
@ -369,7 +369,7 @@ fn fold_struct(
sig: { sig: {
let doc = copy doc; let doc = copy doc;
do astsrv::exec(srv) |ctxt| { do astsrv::exec(srv) |ctxt| {
match ctxt.ast_map.get(&doc.id()) { match ctxt.ast_map.get(doc.id()) {
ast_map::node_item(item, _) => { ast_map::node_item(item, _) => {
let item = strip_struct_extra_stuff(item); let item = strip_struct_extra_stuff(item);
Some(pprust::item_to_str(item, Some(pprust::item_to_str(item,

View file

@ -226,9 +226,9 @@ pub mod chained {
} }
} }
pure fn find(&self, k: &K) -> Option<V> { pure fn find(&self, k: K) -> Option<V> {
unsafe { unsafe {
match self.search_tbl(k, k.hash_keyed(0,0) as uint) { match self.search_tbl(&k, k.hash_keyed(0,0) as uint) {
NotFound => None, NotFound => None,
FoundFirst(_, entry) => Some(entry.value), FoundFirst(_, entry) => Some(entry.value),
FoundAfter(_, entry) => Some(entry.value) FoundAfter(_, entry) => Some(entry.value)
@ -291,7 +291,7 @@ pub mod chained {
return self.update_with_key(key, newval, |_k, v, v1| ff(v,v1)); return self.update_with_key(key, newval, |_k, v, v1| ff(v,v1));
} }
pure fn get(&self, k: &K) -> V { pure fn get(&self, k: K) -> V {
let opt_v = self.find(k); let opt_v = self.find(k);
if opt_v.is_none() { if opt_v.is_none() {
die!(fmt!("Key not found in table: %?", k)); die!(fmt!("Key not found in table: %?", k));
@ -364,7 +364,7 @@ pub mod chained {
impl<K:Eq IterBytes Hash Copy, V: Copy> T<K, V>: ops::Index<K, V> { impl<K:Eq IterBytes Hash Copy, V: Copy> T<K, V>: ops::Index<K, V> {
pure fn index(&self, k: K) -> V { pure fn index(&self, k: K) -> V {
unsafe { unsafe {
self.get(&k) self.get(k)
} }
} }
} }
@ -421,6 +421,7 @@ pub fn hash_from_vec<K: Eq IterBytes Hash Const Copy, V: Copy>(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use core::option::None; use core::option::None;
use core::option;
use core::uint; use core::uint;
use super::*; use super::*;
@ -436,13 +437,13 @@ mod tests {
assert (hm_uu.insert(10u, 12u)); assert (hm_uu.insert(10u, 12u));
assert (hm_uu.insert(11u, 13u)); assert (hm_uu.insert(11u, 13u));
assert (hm_uu.insert(12u, 14u)); assert (hm_uu.insert(12u, 14u));
assert (hm_uu.get(&11) == 13u); assert (hm_uu.get(11u) == 13u);
assert (hm_uu.get(&12) == 14u); assert (hm_uu.get(12u) == 14u);
assert (hm_uu.get(&10) == 12u); assert (hm_uu.get(10u) == 12u);
assert (!hm_uu.insert(12u, 14u)); assert (!hm_uu.insert(12u, 14u));
assert (hm_uu.get(&12) == 14u); assert (hm_uu.get(12u) == 14u);
assert (!hm_uu.insert(12u, 12u)); assert (!hm_uu.insert(12u, 12u));
assert (hm_uu.get(&12) == 12u); assert (hm_uu.get(12u) == 12u);
let ten: ~str = ~"ten"; let ten: ~str = ~"ten";
let eleven: ~str = ~"eleven"; let eleven: ~str = ~"eleven";
let twelve: ~str = ~"twelve"; let twelve: ~str = ~"twelve";
@ -452,40 +453,40 @@ mod tests {
assert (hm_su.insert(~"ten", 12u)); assert (hm_su.insert(~"ten", 12u));
assert (hm_su.insert(eleven, 13u)); assert (hm_su.insert(eleven, 13u));
assert (hm_su.insert(~"twelve", 14u)); assert (hm_su.insert(~"twelve", 14u));
assert (hm_su.get(&eleven) == 13u); assert (hm_su.get(eleven) == 13u);
assert (hm_su.get(&~"eleven") == 13u); assert (hm_su.get(~"eleven") == 13u);
assert (hm_su.get(&~"twelve") == 14u); assert (hm_su.get(~"twelve") == 14u);
assert (hm_su.get(&~"ten") == 12u); assert (hm_su.get(~"ten") == 12u);
assert (!hm_su.insert(~"twelve", 14u)); assert (!hm_su.insert(~"twelve", 14u));
assert (hm_su.get(&~"twelve") == 14u); assert (hm_su.get(~"twelve") == 14u);
assert (!hm_su.insert(~"twelve", 12u)); assert (!hm_su.insert(~"twelve", 12u));
assert (hm_su.get(&~"twelve") == 12u); assert (hm_su.get(~"twelve") == 12u);
debug!("uint -> str"); debug!("uint -> str");
let hm_us: HashMap<uint, ~str> = let hm_us: HashMap<uint, ~str> =
HashMap::<uint, ~str>(); HashMap::<uint, ~str>();
assert (hm_us.insert(10u, ~"twelve")); assert (hm_us.insert(10u, ~"twelve"));
assert (hm_us.insert(11u, ~"thirteen")); assert (hm_us.insert(11u, ~"thirteen"));
assert (hm_us.insert(12u, ~"fourteen")); assert (hm_us.insert(12u, ~"fourteen"));
assert hm_us.get(&11) == ~"thirteen"; assert hm_us.get(11u) == ~"thirteen";
assert hm_us.get(&12) == ~"fourteen"; assert hm_us.get(12u) == ~"fourteen";
assert hm_us.get(&10) == ~"twelve"; assert hm_us.get(10u) == ~"twelve";
assert (!hm_us.insert(12u, ~"fourteen")); assert (!hm_us.insert(12u, ~"fourteen"));
assert hm_us.get(&12) == ~"fourteen"; assert hm_us.get(12u) == ~"fourteen";
assert (!hm_us.insert(12u, ~"twelve")); assert (!hm_us.insert(12u, ~"twelve"));
assert hm_us.get(&12) == ~"twelve"; assert hm_us.get(12u) == ~"twelve";
debug!("str -> str"); debug!("str -> str");
let hm_ss: HashMap<~str, ~str> = let hm_ss: HashMap<~str, ~str> =
HashMap::<~str, ~str>(); HashMap::<~str, ~str>();
assert (hm_ss.insert(ten, ~"twelve")); assert (hm_ss.insert(ten, ~"twelve"));
assert (hm_ss.insert(eleven, ~"thirteen")); assert (hm_ss.insert(eleven, ~"thirteen"));
assert (hm_ss.insert(twelve, ~"fourteen")); assert (hm_ss.insert(twelve, ~"fourteen"));
assert hm_ss.get(&~"eleven") == ~"thirteen"; assert hm_ss.get(~"eleven") == ~"thirteen";
assert hm_ss.get(&~"twelve") == ~"fourteen"; assert hm_ss.get(~"twelve") == ~"fourteen";
assert hm_ss.get(&~"ten") == ~"twelve"; assert hm_ss.get(~"ten") == ~"twelve";
assert (!hm_ss.insert(~"twelve", ~"fourteen")); assert (!hm_ss.insert(~"twelve", ~"fourteen"));
assert hm_ss.get(&~"twelve") == ~"fourteen"; assert hm_ss.get(~"twelve") == ~"fourteen";
assert (!hm_ss.insert(~"twelve", ~"twelve")); assert (!hm_ss.insert(~"twelve", ~"twelve"));
assert hm_ss.get(&~"twelve") == ~"twelve"; assert hm_ss.get(~"twelve") == ~"twelve";
debug!("*** finished test_simple"); debug!("*** finished test_simple");
} }
@ -511,17 +512,17 @@ mod tests {
debug!("-----"); debug!("-----");
i = 0u; i = 0u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm_uu.get(&i)); debug!("get(%u) = %u", i, hm_uu.get(i));
assert (hm_uu.get(&i) == i * i); assert (hm_uu.get(i) == i * i);
i += 1u; i += 1u;
} }
assert (hm_uu.insert(num_to_insert, 17u)); assert (hm_uu.insert(num_to_insert, 17u));
assert (hm_uu.get(&num_to_insert) == 17u); assert (hm_uu.get(num_to_insert) == 17u);
debug!("-----"); debug!("-----");
i = 0u; i = 0u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm_uu.get(&i)); debug!("get(%u) = %u", i, hm_uu.get(i));
assert (hm_uu.get(&i) == i * i); assert (hm_uu.get(i) == i * i);
i += 1u; i += 1u;
} }
debug!("str -> str"); debug!("str -> str");
@ -541,22 +542,22 @@ mod tests {
while i < num_to_insert { while i < num_to_insert {
debug!("get(\"%s\") = \"%s\"", debug!("get(\"%s\") = \"%s\"",
uint::to_str_radix(i, 2u), uint::to_str_radix(i, 2u),
hm_ss.get(&uint::to_str_radix(i, 2u))); hm_ss.get(uint::to_str_radix(i, 2u)));
assert hm_ss.get(&uint::to_str_radix(i, 2u)) == assert hm_ss.get(uint::to_str_radix(i, 2u)) ==
uint::to_str_radix(i * i, 2u); uint::to_str_radix(i * i, 2u);
i += 1u; i += 1u;
} }
assert (hm_ss.insert(uint::to_str_radix(num_to_insert, 2u), assert (hm_ss.insert(uint::to_str_radix(num_to_insert, 2u),
uint::to_str_radix(17u, 2u))); uint::to_str_radix(17u, 2u)));
assert hm_ss.get(&uint::to_str_radix(num_to_insert, 2u)) == assert hm_ss.get(uint::to_str_radix(num_to_insert, 2u)) ==
uint::to_str_radix(17u, 2u); uint::to_str_radix(17u, 2u);
debug!("-----"); debug!("-----");
i = 0u; i = 0u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(\"%s\") = \"%s\"", debug!("get(\"%s\") = \"%s\"",
uint::to_str_radix(i, 2u), uint::to_str_radix(i, 2u),
hm_ss.get(&uint::to_str_radix(i, 2u))); hm_ss.get(uint::to_str_radix(i, 2u)));
assert hm_ss.get(&uint::to_str_radix(i, 2u)) == assert hm_ss.get(uint::to_str_radix(i, 2u)) ==
uint::to_str_radix(i * i, 2u); uint::to_str_radix(i * i, 2u);
i += 1u; i += 1u;
} }
@ -588,15 +589,15 @@ mod tests {
debug!("-----"); debug!("-----");
i = 1u; i = 1u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm.get(&i)); debug!("get(%u) = %u", i, hm.get(i));
assert (hm.get(&i) == i * i); assert (hm.get(i) == i * i);
i += 2u; i += 2u;
} }
debug!("-----"); debug!("-----");
i = 1u; i = 1u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm.get(&i)); debug!("get(%u) = %u", i, hm.get(i));
assert (hm.get(&i) == i * i); assert (hm.get(i) == i * i);
i += 2u; i += 2u;
} }
debug!("-----"); debug!("-----");
@ -610,16 +611,16 @@ mod tests {
debug!("-----"); debug!("-----");
i = 0u; i = 0u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm.get(&i)); debug!("get(%u) = %u", i, hm.get(i));
assert (hm.get(&i) == i * i); assert (hm.get(i) == i * i);
i += 1u; i += 1u;
} }
debug!("-----"); debug!("-----");
assert (hm.len() == num_to_insert); assert (hm.len() == num_to_insert);
i = 0u; i = 0u;
while i < num_to_insert { while i < num_to_insert {
debug!("get(%u) = %u", i, hm.get(&i)); debug!("get(%u) = %u", i, hm.get(i));
assert (hm.get(&i) == i * i); assert (hm.get(i) == i * i);
i += 1u; i += 1u;
} }
debug!("*** finished test_removal"); debug!("*** finished test_removal");
@ -638,9 +639,9 @@ mod tests {
fn test_find() { fn test_find() {
let key = ~"k"; let key = ~"k";
let map = HashMap::<~str, ~str>(); let map = HashMap::<~str, ~str>();
assert map.find(&key).is_none(); assert (option::is_none(&map.find(key)));
map.insert(key, ~"val"); map.insert(key, ~"val");
assert map.find(&key).get() == ~"val"; assert (option::get(map.find(key)) == ~"val");
} }
#[test] #[test]
@ -663,9 +664,9 @@ mod tests {
(~"c", 3) (~"c", 3)
]); ]);
assert map.len() == 3u; assert map.len() == 3u;
assert map.get(&~"a") == 1; assert map.get(~"a") == 1;
assert map.get(&~"b") == 2; assert map.get(~"b") == 2;
assert map.get(&~"c") == 3; assert map.get(~"c") == 3;
} }
#[test] #[test]
@ -691,11 +692,11 @@ mod tests {
map.update_with_key(~"cat", 2, addMoreToCount); map.update_with_key(~"cat", 2, addMoreToCount);
// check the total counts // check the total counts
assert map.find(&~"cat").get() == 10; assert 10 == option::get(map.find(~"cat"));
assert map.find(&~"ferret").get() == 3; assert 3 == option::get(map.find(~"ferret"));
assert map.find(&~"mongoose").get() == 1; assert 1 == option::get(map.find(~"mongoose"));
// sadly, no mythical animals were counted! // sadly, no mythical animals were counted!
assert map.find(&~"unicorn").is_none(); assert None == map.find(~"unicorn");
} }
} }

View file

@ -333,7 +333,7 @@ pub fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
} }
pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
match map.find(&id) { match map.find(id) {
None => { None => {
fmt!("unknown node (id=%d)", id) fmt!("unknown node (id=%d)", id)
} }
@ -398,7 +398,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
pub fn node_item_query<Result>(items: map, id: node_id, pub fn node_item_query<Result>(items: map, id: node_id,
query: fn(@item) -> Result, query: fn(@item) -> Result,
error_msg: ~str) -> Result { error_msg: ~str) -> Result {
match items.find(&id) { match items.find(id) {
Some(node_item(it, _)) => query(it), Some(node_item(it, _)) => query(it),
_ => die!(error_msg) _ => die!(error_msg)
} }

View file

@ -40,7 +40,7 @@ pub fn expand_expr(exts: HashMap<~str, SyntaxExtension>, cx: ext_ctxt,
/* using idents and token::special_idents would make the /* using idents and token::special_idents would make the
the macro names be hygienic */ the macro names be hygienic */
let extname = cx.parse_sess().interner.get(pth.idents[0]); let extname = cx.parse_sess().interner.get(pth.idents[0]);
match exts.find(extname) { match exts.find(*extname) {
None => { None => {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("macro undefined: '%s'", *extname)) fmt!("macro undefined: '%s'", *extname))
@ -104,7 +104,7 @@ pub fn expand_mod_items(exts: HashMap<~str, SyntaxExtension>, cx: ext_ctxt,
ast::meta_name_value(ref n, _) => (*n), ast::meta_name_value(ref n, _) => (*n),
ast::meta_list(ref n, _) => (*n) ast::meta_list(ref n, _) => (*n)
}; };
match exts.find(&mname) { match exts.find(mname) {
None | Some(NormalTT(_)) | Some(ItemTT(*)) => items, None | Some(NormalTT(_)) | Some(ItemTT(*)) => items,
Some(ItemDecorator(dec_fn)) => { Some(ItemDecorator(dec_fn)) => {
cx.bt_push(ExpandedFrom({call_site: attr.span, cx.bt_push(ExpandedFrom({call_site: attr.span,
@ -161,7 +161,7 @@ pub fn expand_item_mac(exts: HashMap<~str, SyntaxExtension>,
}; };
let extname = cx.parse_sess().interner.get(pth.idents[0]); let extname = cx.parse_sess().interner.get(pth.idents[0]);
let expanded = match exts.find(extname) { let expanded = match exts.find(*extname) {
None => cx.span_fatal(pth.span, None => cx.span_fatal(pth.span,
fmt!("macro undefined: '%s!'", *extname)), fmt!("macro undefined: '%s!'", *extname)),
@ -224,7 +224,7 @@ pub fn expand_stmt(exts: HashMap<~str, SyntaxExtension>, cx: ext_ctxt,
assert(vec::len(pth.idents) == 1u); assert(vec::len(pth.idents) == 1u);
let extname = cx.parse_sess().interner.get(pth.idents[0]); let extname = cx.parse_sess().interner.get(pth.idents[0]);
let (fully_expanded, sp) = match exts.find(extname) { let (fully_expanded, sp) = match exts.find(*extname) {
None => None =>
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)), cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)),

View file

@ -59,11 +59,11 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
arg_reader as reader, argument_gram); arg_reader as reader, argument_gram);
// Extract the arguments: // Extract the arguments:
let lhses:~[@named_match] = match argument_map.get(&lhs_nm) { let lhses:~[@named_match] = match argument_map.get(lhs_nm) {
@matched_seq(s, _) => s, @matched_seq(s, _) => s,
_ => cx.span_bug(sp, ~"wrong-structured lhs") _ => cx.span_bug(sp, ~"wrong-structured lhs")
}; };
let rhses:~[@named_match] = match argument_map.get(&rhs_nm) { let rhses:~[@named_match] = match argument_map.get(rhs_nm) {
@matched_seq(s, _) => s, @matched_seq(s, _) => s,
_ => cx.span_bug(sp, ~"wrong-structured rhs") _ => cx.span_bug(sp, ~"wrong-structured rhs")
}; };

View file

@ -111,7 +111,7 @@ pure fn lookup_cur_matched_by_matched(r: tt_reader,
} }
fn lookup_cur_matched(r: tt_reader, name: ident) -> @named_match { fn lookup_cur_matched(r: tt_reader, name: ident) -> @named_match {
lookup_cur_matched_by_matched(r, r.interpolations.get(&name)) lookup_cur_matched_by_matched(r, r.interpolations.get(name))
} }
enum lis { enum lis {
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str) lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)

View file

@ -44,7 +44,7 @@ pub trait Interner<T:Eq IterBytes Hash Const Copy> {
pub impl <T:Eq IterBytes Hash Const Copy> hash_interner<T>: Interner<T> { pub impl <T:Eq IterBytes Hash Const Copy> hash_interner<T>: Interner<T> {
fn intern(val: T) -> uint { fn intern(val: T) -> uint {
match self.map.find(&val) { match self.map.find(val) {
Some(idx) => return idx, Some(idx) => return idx,
None => { None => {
let new_idx = self.vect.len(); let new_idx = self.vect.len();

View file

@ -20,5 +20,5 @@ pub type header_map = HashMap<~str, @DVec<@~str>>;
// the unused ty param is necessary so this gets monomorphized // the unused ty param is necessary so this gets monomorphized
pub fn request<T: Copy>(req: header_map) { pub fn request<T: Copy>(req: header_map) {
let _x = copy *(copy *req.get(&~"METHOD"))[0u]; let _x = copy *(copy *req.get(~"METHOD"))[0u];
} }

View file

@ -42,7 +42,7 @@ fn old_int_benchmarks(rng: @rand::Rng, num_keys: uint, results: &mut Results) {
} }
for uint::range(0, num_keys) |i| { for uint::range(0, num_keys) |i| {
assert map.get(&i) == i+1; assert map.get(i) == i+1;
} }
} }
} }
@ -81,7 +81,7 @@ fn old_str_benchmarks(rng: @rand::Rng, num_keys: uint, results: &mut Results) {
for uint::range(0, num_keys) |i| { for uint::range(0, num_keys) |i| {
let s = uint::to_str(i); let s = uint::to_str(i);
assert map.get(&s) == i; assert map.get(s) == i;
} }
} }
} }

View file

@ -83,7 +83,7 @@ fn str_set() {
let mut found = 0; let mut found = 0;
for int::range(0, 1000) |_i| { for int::range(0, 1000) |_i| {
match s.find(&r.gen_str(10)) { match s.find(r.gen_str(10)) {
Some(_) => { found += 1; } Some(_) => { found += 1; }
None => { } None => { }
} }

View file

@ -69,7 +69,7 @@ fn sort_and_fmt(mm: HashMap<~[u8], uint>, total: uint) -> ~str {
// given a map, search for the frequency of a pattern // given a map, search for the frequency of a pattern
fn find(mm: HashMap<~[u8], uint>, key: ~str) -> uint { fn find(mm: HashMap<~[u8], uint>, key: ~str) -> uint {
match mm.find(&str::to_bytes(str::to_lower(key))) { match mm.find(str::to_bytes(str::to_lower(key))) {
option::None => { return 0u; } option::None => { return 0u; }
option::Some(num) => { return num; } option::Some(num) => { return num; }
} }

View file

@ -136,7 +136,7 @@ fn writer(path: ~str, pport: pipes::Port<Line>, size: uint)
while prev <= i { while prev <= i {
if lines.contains_key_ref(&prev) { if lines.contains_key_ref(&prev) {
debug!("WS %u", prev); debug!("WS %u", prev);
cout.write(lines.get(&prev)); cout.write(lines.get(prev));
done += 1_u; done += 1_u;
lines.remove(&prev); lines.remove(&prev);
prev += 1_u; prev += 1_u;

View file

@ -49,7 +49,7 @@ mod map_reduce {
fn emit(im: oldmap::HashMap<~str, int>, ctrl: SharedChan<ctrl_proto>, key: ~str, fn emit(im: oldmap::HashMap<~str, int>, ctrl: SharedChan<ctrl_proto>, key: ~str,
val: ~str) { val: ~str) {
let mut c; let mut c;
match im.find(&key) { match im.find(copy key) {
Some(_c) => { c = _c } Some(_c) => { c = _c }
None => { None => {
let (pp, cc) = stream(); let (pp, cc) = stream();
@ -88,7 +88,7 @@ mod map_reduce {
mapper_done => { num_mappers -= 1; } mapper_done => { num_mappers -= 1; }
find_reducer(k, cc) => { find_reducer(k, cc) => {
let mut c; let mut c;
match reducers.find(&str::from_bytes(k)) { match reducers.find(str::from_bytes(k)) {
Some(_c) => { c = _c; } Some(_c) => { c = _c; }
None => { c = 0; } None => { c = 0; }
} }