Port the compiler to the expr foo::<T> syntax.
This commit is contained in:
parent
d9327a61bb
commit
af21a27002
37 changed files with 335 additions and 329 deletions
|
@ -150,9 +150,9 @@ fn compile_input(sess: session::session, cfg: ast::crate_cfg, input: str,
|
||||||
time(time_passes, "freevar finding",
|
time(time_passes, "freevar finding",
|
||||||
bind freevars::annotate_freevars(sess, d, crate));
|
bind freevars::annotate_freevars(sess, d, crate));
|
||||||
let ty_cx = ty::mk_ctxt(sess, d, ast_map, freevars);
|
let ty_cx = ty::mk_ctxt(sess, d, ast_map, freevars);
|
||||||
time[()](time_passes, "typechecking",
|
time::<()>(time_passes, "typechecking",
|
||||||
bind typeck::check_crate(ty_cx, crate));
|
bind typeck::check_crate(ty_cx, crate));
|
||||||
time[()](time_passes, "alt checking",
|
time::<()>(time_passes, "alt checking",
|
||||||
bind middle::check_alt::check_crate(ty_cx, crate));
|
bind middle::check_alt::check_crate(ty_cx, crate));
|
||||||
if sess.get_opts().run_typestate {
|
if sess.get_opts().run_typestate {
|
||||||
time(time_passes, "typestate checking",
|
time(time_passes, "typestate checking",
|
||||||
|
@ -160,15 +160,15 @@ fn compile_input(sess: session::session, cfg: ast::crate_cfg, input: str,
|
||||||
}
|
}
|
||||||
time(time_passes, "alias checking",
|
time(time_passes, "alias checking",
|
||||||
bind middle::alias::check_crate(ty_cx, crate));
|
bind middle::alias::check_crate(ty_cx, crate));
|
||||||
time[()](time_passes, "kind checking",
|
time::<()>(time_passes, "kind checking",
|
||||||
bind kind::check_crate(ty_cx, crate));
|
bind kind::check_crate(ty_cx, crate));
|
||||||
if sess.get_opts().no_trans { ret; }
|
if sess.get_opts().no_trans { ret; }
|
||||||
let llmod =
|
let llmod =
|
||||||
time[llvm::llvm::ModuleRef](time_passes, "translation",
|
time::<llvm::llvm::ModuleRef>(time_passes, "translation",
|
||||||
bind trans::trans_crate(sess, crate,
|
bind trans::trans_crate(sess, crate,
|
||||||
ty_cx, output,
|
ty_cx, output,
|
||||||
ast_map));
|
ast_map));
|
||||||
time[()](time_passes, "LLVM passes",
|
time::<()>(time_passes, "LLVM passes",
|
||||||
bind link::write::run_passes(sess, llmod, output));
|
bind link::write::run_passes(sess, llmod, output));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -459,14 +459,14 @@ fn main(args: vec<str>) {
|
||||||
}
|
}
|
||||||
let sopts = build_session_options(binary, match, binary_dir);
|
let sopts = build_session_options(binary, match, binary_dir);
|
||||||
let sess = build_session(sopts);
|
let sess = build_session(sopts);
|
||||||
let n_inputs = vec::len[str](match.free);
|
let n_inputs = vec::len::<str>(match.free);
|
||||||
let output_file = getopts::opt_maybe_str(match, "o");
|
let output_file = getopts::opt_maybe_str(match, "o");
|
||||||
let glue = opt_present(match, "glue");
|
let glue = opt_present(match, "glue");
|
||||||
if glue {
|
if glue {
|
||||||
if n_inputs > 0u {
|
if n_inputs > 0u {
|
||||||
sess.fatal("No input files allowed with --glue.");
|
sess.fatal("No input files allowed with --glue.");
|
||||||
}
|
}
|
||||||
let out = option::from_maybe[str]("glue.bc", output_file);
|
let out = option::from_maybe::<str>("glue.bc", output_file);
|
||||||
middle::trans::make_common_glue(sess, out);
|
middle::trans::make_common_glue(sess, out);
|
||||||
ret;
|
ret;
|
||||||
}
|
}
|
||||||
|
@ -479,26 +479,28 @@ fn main(args: vec<str>) {
|
||||||
let saved_out_filename: str = "";
|
let saved_out_filename: str = "";
|
||||||
let cfg = build_configuration(sess, binary, ifile);
|
let cfg = build_configuration(sess, binary, ifile);
|
||||||
let expand =
|
let expand =
|
||||||
option::map[str,
|
option::map::<str,
|
||||||
pp_mode](bind parse_pretty(sess, _),
|
pp_mode>(bind parse_pretty(sess, _),
|
||||||
getopts::opt_default(match, "expand", "normal"));
|
getopts::opt_default(match, "expand",
|
||||||
|
"normal"));
|
||||||
alt expand {
|
alt expand {
|
||||||
some[pp_mode](ppm) {
|
some::<pp_mode>(ppm) {
|
||||||
pretty_print_input(sess, cfg, ifile, ppm, true);
|
pretty_print_input(sess, cfg, ifile, ppm, true);
|
||||||
ret;
|
ret;
|
||||||
}
|
}
|
||||||
none[pp_mode]. {/* continue */ }
|
none::<pp_mode>. {/* continue */ }
|
||||||
}
|
}
|
||||||
let pretty =
|
let pretty =
|
||||||
option::map[str,
|
option::map::<str,
|
||||||
pp_mode](bind parse_pretty(sess, _),
|
pp_mode>(bind parse_pretty(sess, _),
|
||||||
getopts::opt_default(match, "pretty", "normal"));
|
getopts::opt_default(match, "pretty",
|
||||||
|
"normal"));
|
||||||
alt pretty {
|
alt pretty {
|
||||||
some[pp_mode](ppm) {
|
some::<pp_mode>(ppm) {
|
||||||
pretty_print_input(sess, cfg, ifile, ppm, false);
|
pretty_print_input(sess, cfg, ifile, ppm, false);
|
||||||
ret;
|
ret;
|
||||||
}
|
}
|
||||||
none[pp_mode]. {/* continue */ }
|
none::<pp_mode>. {/* continue */ }
|
||||||
}
|
}
|
||||||
let ls = opt_present(match, "ls");
|
let ls = opt_present(match, "ls");
|
||||||
if ls {
|
if ls {
|
||||||
|
|
|
@ -178,7 +178,7 @@ fn remove_meta_items_by_name(items: &[@ast::meta_item], name: str) ->
|
||||||
|
|
||||||
fn require_unique_names(sess: &session::session,
|
fn require_unique_names(sess: &session::session,
|
||||||
metas: &[@ast::meta_item]) {
|
metas: &[@ast::meta_item]) {
|
||||||
let map = map::mk_hashmap[str, ()](str::hash, str::eq);
|
let map = map::mk_hashmap::<str, ()>(str::hash, str::eq);
|
||||||
for meta: @ast::meta_item in metas {
|
for meta: @ast::meta_item in metas {
|
||||||
let name = get_meta_item_name(meta);
|
let name = get_meta_item_name(meta);
|
||||||
if map.contains_key(name) {
|
if map.contains_key(name) {
|
||||||
|
|
|
@ -1303,7 +1303,7 @@ obj builder(B: BuilderRef, terminated: @mutable bool,
|
||||||
ValueRef {
|
ValueRef {
|
||||||
assert (!*terminated);
|
assert (!*terminated);
|
||||||
let phi = llvm::LLVMBuildPhi(B, Ty, str::buf(""));
|
let phi = llvm::LLVMBuildPhi(B, Ty, str::buf(""));
|
||||||
assert (vec::len[ValueRef](vals) == vec::len[BasicBlockRef](bbs));
|
assert (vec::len::<ValueRef>(vals) == vec::len::<BasicBlockRef>(bbs));
|
||||||
llvm::LLVMAddIncoming(phi, vec::to_ptr(vals), vec::to_ptr(bbs),
|
llvm::LLVMAddIncoming(phi, vec::to_ptr(vals), vec::to_ptr(bbs),
|
||||||
vec::len(vals));
|
vec::len(vals));
|
||||||
ret phi;
|
ret phi;
|
||||||
|
@ -1311,7 +1311,7 @@ obj builder(B: BuilderRef, terminated: @mutable bool,
|
||||||
|
|
||||||
fn AddIncomingToPhi(phi: ValueRef, vals: &[ValueRef],
|
fn AddIncomingToPhi(phi: ValueRef, vals: &[ValueRef],
|
||||||
bbs: &[BasicBlockRef]) {
|
bbs: &[BasicBlockRef]) {
|
||||||
assert (vec::len[ValueRef](vals) == vec::len[BasicBlockRef](bbs));
|
assert (vec::len::<ValueRef>(vals) == vec::len::<BasicBlockRef>(bbs));
|
||||||
llvm::LLVMAddIncoming(phi, vec::to_ptr(vals), vec::to_ptr(bbs),
|
llvm::LLVMAddIncoming(phi, vec::to_ptr(vals), vec::to_ptr(bbs),
|
||||||
vec::len(vals));
|
vec::len(vals));
|
||||||
}
|
}
|
||||||
|
@ -1440,7 +1440,7 @@ obj type_names(type_names: std::map::hashmap<TypeRef, str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_type_names() -> type_names {
|
fn mk_type_names() -> type_names {
|
||||||
let nt = std::map::new_str_hash[TypeRef]();
|
let nt = std::map::new_str_hash::<TypeRef>();
|
||||||
|
|
||||||
fn hash(t: &TypeRef) -> uint { ret t as uint; }
|
fn hash(t: &TypeRef) -> uint { ret t as uint; }
|
||||||
|
|
||||||
|
@ -1448,7 +1448,7 @@ fn mk_type_names() -> type_names {
|
||||||
|
|
||||||
let hasher: std::map::hashfn<TypeRef> = hash;
|
let hasher: std::map::hashfn<TypeRef> = hash;
|
||||||
let eqer: std::map::eqfn<TypeRef> = eq;
|
let eqer: std::map::eqfn<TypeRef> = eq;
|
||||||
let tn = std::map::mk_hashmap[TypeRef, str](hasher, eqer);
|
let tn = std::map::mk_hashmap::<TypeRef, str>(hasher, eqer);
|
||||||
|
|
||||||
ret type_names(tn, nt);
|
ret type_names(tn, nt);
|
||||||
}
|
}
|
||||||
|
@ -1503,7 +1503,7 @@ fn type_to_str_inner(names: type_names, outer0: &[TypeRef], ty: TypeRef) ->
|
||||||
let s = "fn(";
|
let s = "fn(";
|
||||||
let out_ty: TypeRef = llvm::LLVMGetReturnType(ty);
|
let out_ty: TypeRef = llvm::LLVMGetReturnType(ty);
|
||||||
let n_args: uint = llvm::LLVMCountParamTypes(ty);
|
let n_args: uint = llvm::LLVMCountParamTypes(ty);
|
||||||
let args: [TypeRef] = vec::init_elt[TypeRef](0 as TypeRef, n_args);
|
let args: [TypeRef] = vec::init_elt::<TypeRef>(0 as TypeRef, n_args);
|
||||||
llvm::LLVMGetParamTypes(ty, vec::to_ptr(args));
|
llvm::LLVMGetParamTypes(ty, vec::to_ptr(args));
|
||||||
s += tys_str(names, outer, args);
|
s += tys_str(names, outer, args);
|
||||||
s += ") -> ";
|
s += ") -> ";
|
||||||
|
@ -1515,7 +1515,7 @@ fn type_to_str_inner(names: type_names, outer0: &[TypeRef], ty: TypeRef) ->
|
||||||
9 {
|
9 {
|
||||||
let s: str = "{";
|
let s: str = "{";
|
||||||
let n_elts: uint = llvm::LLVMCountStructElementTypes(ty);
|
let n_elts: uint = llvm::LLVMCountStructElementTypes(ty);
|
||||||
let elts: [TypeRef] = vec::init_elt[TypeRef](0 as TypeRef, n_elts);
|
let elts: [TypeRef] = vec::init_elt::<TypeRef>(0 as TypeRef, n_elts);
|
||||||
llvm::LLVMGetStructElementTypes(ty, vec::to_ptr(elts));
|
llvm::LLVMGetStructElementTypes(ty, vec::to_ptr(elts));
|
||||||
s += tys_str(names, outer, elts);
|
s += tys_str(names, outer, elts);
|
||||||
s += "}";
|
s += "}";
|
||||||
|
@ -1534,7 +1534,7 @@ fn type_to_str_inner(names: type_names, outer0: &[TypeRef], ty: TypeRef) ->
|
||||||
for tout: TypeRef in outer0 {
|
for tout: TypeRef in outer0 {
|
||||||
i += 1u;
|
i += 1u;
|
||||||
if tout as int == ty as int {
|
if tout as int == ty as int {
|
||||||
let n: uint = vec::len[TypeRef](outer0) - i;
|
let n: uint = vec::len::<TypeRef>(outer0) - i;
|
||||||
ret "*\\" + std::int::str(n as int);
|
ret "*\\" + std::int::str(n as int);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,7 @@ export list_file_metadata;
|
||||||
fn read_crates(sess: session::session, crate: &ast::crate) {
|
fn read_crates(sess: session::session, crate: &ast::crate) {
|
||||||
let e =
|
let e =
|
||||||
@{sess: sess,
|
@{sess: sess,
|
||||||
crate_cache: @std::map::new_str_hash[int](),
|
crate_cache: @std::map::new_str_hash::<int>(),
|
||||||
library_search_paths: sess.get_opts().library_search_paths,
|
library_search_paths: sess.get_opts().library_search_paths,
|
||||||
mutable next_crate_num: 1};
|
mutable next_crate_num: 1};
|
||||||
let v =
|
let v =
|
||||||
|
@ -186,7 +186,7 @@ fn find_library_crate_aux(nn: &{prefix: str, suffix: str}, crate_name: str,
|
||||||
fn get_metadata_section(filename: str) -> option::t<@[u8]> {
|
fn get_metadata_section(filename: str) -> option::t<@[u8]> {
|
||||||
let b = str::buf(filename);
|
let b = str::buf(filename);
|
||||||
let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(b);
|
let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(b);
|
||||||
if mb as int == 0 { ret option::none[@[u8]]; }
|
if mb as int == 0 { ret option::none::<@[u8]>; }
|
||||||
let of = mk_object_file(mb);
|
let of = mk_object_file(mb);
|
||||||
let si = mk_section_iter(of.llof);
|
let si = mk_section_iter(of.llof);
|
||||||
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
|
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
|
||||||
|
@ -196,11 +196,11 @@ fn get_metadata_section(filename: str) -> option::t<@[u8]> {
|
||||||
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
|
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
|
||||||
let csz = llvm::LLVMGetSectionSize(si.llsi);
|
let csz = llvm::LLVMGetSectionSize(si.llsi);
|
||||||
let cvbuf: *u8 = std::unsafe::reinterpret_cast(cbuf);
|
let cvbuf: *u8 = std::unsafe::reinterpret_cast(cbuf);
|
||||||
ret option::some[@[u8]](@vec::unsafe::from_buf(cvbuf, csz));
|
ret option::some::<@[u8]>(@vec::unsafe::from_buf(cvbuf, csz));
|
||||||
}
|
}
|
||||||
llvm::LLVMMoveToNextSection(si.llsi);
|
llvm::LLVMMoveToNextSection(si.llsi);
|
||||||
}
|
}
|
||||||
ret option::none[@[u8]];
|
ret option::none::<@[u8]>;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_library_crate(sess: &session::session, span: span, ident: &ast::ident,
|
fn load_library_crate(sess: &session::session, span: span, ident: &ast::ident,
|
||||||
|
@ -249,7 +249,7 @@ fn resolve_crate_deps(e: env, cdata: &@[u8]) -> cstore::cnum_map {
|
||||||
log "resolving deps of external crate";
|
log "resolving deps of external crate";
|
||||||
// The map from crate numbers in the crate we're resolving to local crate
|
// The map from crate numbers in the crate we're resolving to local crate
|
||||||
// numbers
|
// numbers
|
||||||
let cnum_map = new_int_hash[ast::crate_num]();
|
let cnum_map = new_int_hash::<ast::crate_num>();
|
||||||
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {
|
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {
|
||||||
let extrn_cnum = dep.cnum;
|
let extrn_cnum = dep.cnum;
|
||||||
let cname = dep.ident;
|
let cname = dep.ident;
|
||||||
|
|
|
@ -52,8 +52,8 @@ type use_crate_map = map::hashmap<ast::node_id, ast::crate_num>;
|
||||||
fn p(cstore: &cstore) -> cstore_private { alt cstore { private(p) { p } } }
|
fn p(cstore: &cstore) -> cstore_private { alt cstore { private(p) { p } } }
|
||||||
|
|
||||||
fn mk_cstore() -> cstore {
|
fn mk_cstore() -> cstore {
|
||||||
let meta_cache = map::new_int_hash[crate_metadata]();
|
let meta_cache = map::new_int_hash::<crate_metadata>();
|
||||||
let crate_map = map::new_int_hash[ast::crate_num]();
|
let crate_map = map::new_int_hash::<ast::crate_num>();
|
||||||
ret private(@{metas: meta_cache,
|
ret private(@{metas: meta_cache,
|
||||||
use_crate_map: crate_map,
|
use_crate_map: crate_map,
|
||||||
mutable used_crate_files: ~[],
|
mutable used_crate_files: ~[],
|
||||||
|
|
|
@ -48,7 +48,7 @@ fn lookup_hash(d: &ebml::doc, eq_fn: fn(&[u8]) -> bool , hash: uint) ->
|
||||||
let belt = tag_index_buckets_bucket_elt;
|
let belt = tag_index_buckets_bucket_elt;
|
||||||
for each elt: ebml::doc in ebml::tagged_docs(bucket, belt) {
|
for each elt: ebml::doc in ebml::tagged_docs(bucket, belt) {
|
||||||
let pos = ebml::be_uint_from_bytes(elt.data, elt.start, 4u);
|
let pos = ebml::be_uint_from_bytes(elt.data, elt.start, 4u);
|
||||||
if eq_fn(vec::slice[u8](*elt.data, elt.start + 4u, elt.end)) {
|
if eq_fn(vec::slice::<u8>(*elt.data, elt.start + 4u, elt.end)) {
|
||||||
result += ~[ebml::doc_at(d.data, pos)];
|
result += ~[ebml::doc_at(d.data, pos)];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,8 +63,8 @@ fn maybe_find_item(item_id: int, items: &ebml::doc) ->
|
||||||
let eqer = bind eq_item(_, item_id);
|
let eqer = bind eq_item(_, item_id);
|
||||||
let found = lookup_hash(items, eqer, hash_node_id(item_id));
|
let found = lookup_hash(items, eqer, hash_node_id(item_id));
|
||||||
if vec::len(found) == 0u {
|
if vec::len(found) == 0u {
|
||||||
ret option::none[ebml::doc];
|
ret option::none::<ebml::doc>;
|
||||||
} else { ret option::some[ebml::doc](found.(0)); }
|
} else { ret option::some::<ebml::doc>(found.(0)); }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_item(item_id: int, items: &ebml::doc) -> ebml::doc {
|
fn find_item(item_id: int, items: &ebml::doc) -> ebml::doc {
|
||||||
|
@ -269,7 +269,7 @@ fn family_has_type_params(fam_ch: u8) -> bool {
|
||||||
fn read_path(d: &ebml::doc) -> {path: str, pos: uint} {
|
fn read_path(d: &ebml::doc) -> {path: str, pos: uint} {
|
||||||
let desc = ebml::doc_data(d);
|
let desc = ebml::doc_data(d);
|
||||||
let pos = ebml::be_uint_from_bytes(@desc, 0u, 4u);
|
let pos = ebml::be_uint_from_bytes(@desc, 0u, 4u);
|
||||||
let pathbytes = vec::slice[u8](desc, 4u, vec::len[u8](desc));
|
let pathbytes = vec::slice::<u8>(desc, 4u, vec::len::<u8>(desc));
|
||||||
let path = str::unsafe_from_bytes(pathbytes);
|
let path = str::unsafe_from_bytes(pathbytes);
|
||||||
ret {path: path, pos: pos};
|
ret {path: path, pos: pos};
|
||||||
}
|
}
|
||||||
|
|
|
@ -176,7 +176,7 @@ fn def_to_str(did: &def_id) -> str { ret #fmt("%d:%d", did.crate, did.node); }
|
||||||
|
|
||||||
fn encode_type_param_kinds(ebml_w: &ebml::writer, tps: &[ty_param]) {
|
fn encode_type_param_kinds(ebml_w: &ebml::writer, tps: &[ty_param]) {
|
||||||
ebml::start_tag(ebml_w, tag_items_data_item_ty_param_kinds);
|
ebml::start_tag(ebml_w, tag_items_data_item_ty_param_kinds);
|
||||||
ebml::write_vint(ebml_w.writer, vec::len[ty_param](tps));
|
ebml::write_vint(ebml_w.writer, vec::len::<ty_param>(tps));
|
||||||
for tp: ty_param in tps {
|
for tp: ty_param in tps {
|
||||||
let c = alt tp.kind {
|
let c = alt tp.kind {
|
||||||
kind_unique. { 'u' }
|
kind_unique. { 'u' }
|
||||||
|
@ -236,7 +236,7 @@ fn encode_tag_variant_info(ecx: &@encode_ctxt, ebml_w: &ebml::writer,
|
||||||
encode_tag_id(ebml_w, local_def(id));
|
encode_tag_id(ebml_w, local_def(id));
|
||||||
encode_type(ecx, ebml_w,
|
encode_type(ecx, ebml_w,
|
||||||
node_id_to_monotype(ecx.ccx.tcx, variant.node.id));
|
node_id_to_monotype(ecx.ccx.tcx, variant.node.id));
|
||||||
if vec::len[variant_arg](variant.node.args) > 0u {
|
if vec::len::<variant_arg>(variant.node.args) > 0u {
|
||||||
encode_symbol(ecx, ebml_w, variant.node.id);
|
encode_symbol(ecx, ebml_w, variant.node.id);
|
||||||
}
|
}
|
||||||
encode_discriminant(ecx, ebml_w, variant.node.id);
|
encode_discriminant(ecx, ebml_w, variant.node.id);
|
||||||
|
|
|
@ -71,7 +71,7 @@ fn parse_constrs(st: @pstate, sd: str_def) -> [@ty::constr] {
|
||||||
do {
|
do {
|
||||||
next(st);
|
next(st);
|
||||||
let one: @ty::constr =
|
let one: @ty::constr =
|
||||||
parse_constr[uint](st, sd, parse_constr_arg);
|
parse_constr::<uint>(st, sd, parse_constr_arg);
|
||||||
rslt += ~[one];
|
rslt += ~[one];
|
||||||
} while peek(st) as char == ';'
|
} while peek(st) as char == ';'
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ fn parse_ty_constrs(st: @pstate, sd: str_def) -> [@ty::type_constr] {
|
||||||
do {
|
do {
|
||||||
next(st);
|
next(st);
|
||||||
let one: @ty::type_constr =
|
let one: @ty::type_constr =
|
||||||
parse_constr[path](st, sd, parse_ty_constr_arg);
|
parse_constr::<path>(st, sd, parse_ty_constr_arg);
|
||||||
rslt += ~[one];
|
rslt += ~[one];
|
||||||
} while peek(st) as char == ';'
|
} while peek(st) as char == ';'
|
||||||
}
|
}
|
||||||
|
@ -406,14 +406,14 @@ fn parse_ty_fn(st: @pstate, sd: str_def) ->
|
||||||
// Rust metadata parsing
|
// Rust metadata parsing
|
||||||
fn parse_def_id(buf: &[u8]) -> ast::def_id {
|
fn parse_def_id(buf: &[u8]) -> ast::def_id {
|
||||||
let colon_idx = 0u;
|
let colon_idx = 0u;
|
||||||
let len = vec::len[u8](buf);
|
let len = vec::len::<u8>(buf);
|
||||||
while colon_idx < len && buf.(colon_idx) != ':' as u8 { colon_idx += 1u; }
|
while colon_idx < len && buf.(colon_idx) != ':' as u8 { colon_idx += 1u; }
|
||||||
if colon_idx == len {
|
if colon_idx == len {
|
||||||
log_err "didn't find ':' when parsing def id";
|
log_err "didn't find ':' when parsing def id";
|
||||||
fail;
|
fail;
|
||||||
}
|
}
|
||||||
let crate_part = vec::slice[u8](buf, 0u, colon_idx);
|
let crate_part = vec::slice::<u8>(buf, 0u, colon_idx);
|
||||||
let def_part = vec::slice[u8](buf, colon_idx + 1u, len);
|
let def_part = vec::slice::<u8>(buf, colon_idx + 1u, len);
|
||||||
|
|
||||||
let crate_part_vec = ~[];
|
let crate_part_vec = ~[];
|
||||||
let def_part_vec = ~[];
|
let def_part_vec = ~[];
|
||||||
|
|
|
@ -46,7 +46,7 @@ fn check_crate(tcx: ty::ctxt, crate: &@ast::crate) {
|
||||||
visit_item: bind visit_item(cx, _, _, _),
|
visit_item: bind visit_item(cx, _, _, _),
|
||||||
visit_expr: bind visit_expr(cx, _, _, _),
|
visit_expr: bind visit_expr(cx, _, _, _),
|
||||||
visit_decl: bind visit_decl(cx, _, _, _)
|
visit_decl: bind visit_decl(cx, _, _, _)
|
||||||
with *visit::default_visitor[scope]()};
|
with *visit::default_visitor::<scope>()};
|
||||||
visit::visit_crate(*crate, @~[], visit::mk_vt(v));
|
visit::visit_crate(*crate, @~[], visit::mk_vt(v));
|
||||||
tcx.sess.abort_if_errors();
|
tcx.sess.abort_if_errors();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,13 +18,13 @@ fn map_crate(c: &crate) -> map {
|
||||||
// FIXME: This is using an adapter to convert the smallintmap
|
// FIXME: This is using an adapter to convert the smallintmap
|
||||||
// interface to the hashmap interface. It would be better to just
|
// interface to the hashmap interface. It would be better to just
|
||||||
// convert everything to use the smallintmap.
|
// convert everything to use the smallintmap.
|
||||||
let map = new_smallintmap_int_adapter[ast_node]();
|
let map = new_smallintmap_int_adapter::<ast_node>();
|
||||||
|
|
||||||
let v_map =
|
let v_map =
|
||||||
@{visit_item: bind map_item(map, _, _, _),
|
@{visit_item: bind map_item(map, _, _, _),
|
||||||
visit_native_item: bind map_native_item(map, _, _, _),
|
visit_native_item: bind map_native_item(map, _, _, _),
|
||||||
visit_expr: bind map_expr(map, _, _, _)
|
visit_expr: bind map_expr(map, _, _, _)
|
||||||
with *visit::default_visitor[()]()};
|
with *visit::default_visitor::<()>()};
|
||||||
visit::visit_crate(c, (), visit::mk_vt(v_map));
|
visit::visit_crate(c, (), visit::mk_vt(v_map));
|
||||||
ret map;
|
ret map;
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ fn new_smallintmap_adapter<@K,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let map = smallintmap::mk[V]();
|
let map = smallintmap::mk::<V>();
|
||||||
ret adapter(map, key_idx, idx_key);
|
ret adapter(map, key_idx, idx_key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ fn check_crate(tcx: &ty::ctxt, crate: &@crate) {
|
||||||
let v =
|
let v =
|
||||||
@{visit_expr: bind check_expr(tcx, _, _, _),
|
@{visit_expr: bind check_expr(tcx, _, _, _),
|
||||||
visit_local: bind check_local(tcx, _, _, _)
|
visit_local: bind check_local(tcx, _, _, _)
|
||||||
with *visit::default_visitor[()]()};
|
with *visit::default_visitor::<()>()};
|
||||||
visit::visit_crate(*crate, (), visit::mk_vt(v));
|
visit::visit_crate(*crate, (), visit::mk_vt(v));
|
||||||
tcx.sess.abort_if_errors();
|
tcx.sess.abort_if_errors();
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ fn new_ext_hash() -> ext_hash {
|
||||||
ret util::common::def_eq(v1.did, v2.did) &&
|
ret util::common::def_eq(v1.did, v2.did) &&
|
||||||
str::eq(v1.ident, v2.ident) && v1.ns == v2.ns;
|
str::eq(v1.ident, v2.ident) && v1.ns == v2.ns;
|
||||||
}
|
}
|
||||||
ret std::map::mk_hashmap[key, def](hash, eq);
|
ret std::map::mk_hashmap::<key, def>(hash, eq);
|
||||||
}
|
}
|
||||||
|
|
||||||
tag mod_index_entry {
|
tag mod_index_entry {
|
||||||
|
@ -135,11 +135,11 @@ fn resolve_crate(sess: session, amap: &ast_map::map, crate: @ast::crate) ->
|
||||||
def_map {
|
def_map {
|
||||||
let e =
|
let e =
|
||||||
@{cstore: sess.get_cstore(),
|
@{cstore: sess.get_cstore(),
|
||||||
def_map: new_int_hash[def](),
|
def_map: new_int_hash::<def>(),
|
||||||
ast_map: amap,
|
ast_map: amap,
|
||||||
imports: new_int_hash[import_state](),
|
imports: new_int_hash::<import_state>(),
|
||||||
mod_map: new_int_hash[@indexed_mod](),
|
mod_map: new_int_hash::<@indexed_mod>(),
|
||||||
ext_map: new_def_hash[[ident]](),
|
ext_map: new_def_hash::<[ident]>(),
|
||||||
ext_cache: new_ext_hash(),
|
ext_cache: new_ext_hash(),
|
||||||
mutable reported: ~[],
|
mutable reported: ~[],
|
||||||
sess: sess};
|
sess: sess};
|
||||||
|
@ -159,7 +159,7 @@ fn map_crate(e: &@env, c: &@ast::crate) {
|
||||||
let v_map_mod =
|
let v_map_mod =
|
||||||
@{visit_view_item: bind index_vi(e, _, _, _),
|
@{visit_view_item: bind index_vi(e, _, _, _),
|
||||||
visit_item: bind index_i(e, _, _, _)
|
visit_item: bind index_i(e, _, _, _)
|
||||||
with *visit::default_visitor[scopes]()};
|
with *visit::default_visitor::<scopes>()};
|
||||||
visit::visit_crate(*c, cons(scope_crate, @nil), visit::mk_vt(v_map_mod));
|
visit::visit_crate(*c, cons(scope_crate, @nil), visit::mk_vt(v_map_mod));
|
||||||
// Register the top-level mod
|
// Register the top-level mod
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ fn map_crate(e: &@env, c: &@ast::crate) {
|
||||||
@{m: some(c.node.module),
|
@{m: some(c.node.module),
|
||||||
index: index_mod(c.node.module),
|
index: index_mod(c.node.module),
|
||||||
mutable glob_imports: ~[],
|
mutable glob_imports: ~[],
|
||||||
glob_imported_names: new_str_hash[import_state]()});
|
glob_imported_names: new_str_hash::<import_state>()});
|
||||||
fn index_vi(e: @env, i: &@ast::view_item, sc: &scopes, v: &vt<scopes>) {
|
fn index_vi(e: @env, i: &@ast::view_item, sc: &scopes, v: &vt<scopes>) {
|
||||||
alt i.node {
|
alt i.node {
|
||||||
ast::view_item_import(_, ids, id) {
|
ast::view_item_import(_, ids, id) {
|
||||||
|
@ -180,7 +180,7 @@ fn map_crate(e: &@env, c: &@ast::crate) {
|
||||||
visit_item_with_scope(i, sc, v);
|
visit_item_with_scope(i, sc, v);
|
||||||
alt i.node {
|
alt i.node {
|
||||||
ast::item_mod(md) {
|
ast::item_mod(md) {
|
||||||
let s = new_str_hash[import_state]();
|
let s = new_str_hash::<import_state>();
|
||||||
e.mod_map.insert(i.id,
|
e.mod_map.insert(i.id,
|
||||||
@{m: some(md),
|
@{m: some(md),
|
||||||
index: index_mod(md),
|
index: index_mod(md),
|
||||||
|
@ -188,9 +188,9 @@ fn map_crate(e: &@env, c: &@ast::crate) {
|
||||||
glob_imported_names: s});
|
glob_imported_names: s});
|
||||||
}
|
}
|
||||||
ast::item_native_mod(nmd) {
|
ast::item_native_mod(nmd) {
|
||||||
let s = new_str_hash[import_state]();
|
let s = new_str_hash::<import_state>();
|
||||||
e.mod_map.insert(i.id,
|
e.mod_map.insert(i.id,
|
||||||
@{m: none[ast::_mod],
|
@{m: none::<ast::_mod>,
|
||||||
index: index_nmod(nmd),
|
index: index_nmod(nmd),
|
||||||
mutable glob_imports: ~[],
|
mutable glob_imports: ~[],
|
||||||
glob_imported_names: s});
|
glob_imported_names: s});
|
||||||
|
@ -203,7 +203,7 @@ fn map_crate(e: &@env, c: &@ast::crate) {
|
||||||
let v_link_glob =
|
let v_link_glob =
|
||||||
@{visit_view_item: bind link_glob(e, _, _, _),
|
@{visit_view_item: bind link_glob(e, _, _, _),
|
||||||
visit_item: visit_item_with_scope
|
visit_item: visit_item_with_scope
|
||||||
with *visit::default_visitor[scopes]()};
|
with *visit::default_visitor::<scopes>()};
|
||||||
visit::visit_crate(*c, cons(scope_crate, @nil),
|
visit::visit_crate(*c, cons(scope_crate, @nil),
|
||||||
visit::mk_vt(v_link_glob));
|
visit::mk_vt(v_link_glob));
|
||||||
fn link_glob(e: @env, vi: &@ast::view_item, sc: &scopes, v: &vt<scopes>) {
|
fn link_glob(e: @env, vi: &@ast::view_item, sc: &scopes, v: &vt<scopes>) {
|
||||||
|
@ -387,7 +387,7 @@ fn visit_arm_with_scope(a: &ast::arm, sc: &scopes, v: &vt<scopes>) {
|
||||||
fn visit_expr_with_scope(x: &@ast::expr, sc: &scopes, v: &vt<scopes>) {
|
fn visit_expr_with_scope(x: &@ast::expr, sc: &scopes, v: &vt<scopes>) {
|
||||||
alt x.node {
|
alt x.node {
|
||||||
ast::expr_for(decl, coll, blk) | ast::expr_for_each(decl, coll, blk) {
|
ast::expr_for(decl, coll, blk) | ast::expr_for_each(decl, coll, blk) {
|
||||||
let new_sc = cons[scope](scope_loop(decl), @sc);
|
let new_sc = cons::<scope>(scope_loop(decl), @sc);
|
||||||
v.visit_expr(coll, sc, v);
|
v.visit_expr(coll, sc, v);
|
||||||
v.visit_local(decl, new_sc, v);
|
v.visit_local(decl, new_sc, v);
|
||||||
v.visit_block(blk, new_sc, v);
|
v.visit_block(blk, new_sc, v);
|
||||||
|
@ -684,7 +684,7 @@ fn lookup_in_scope(e: &env, sc: scopes, sp: &span, name: &ident,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
let left_fn = false;
|
let left_fn = false;
|
||||||
// Used to determine whether obj fields are in scope
|
// Used to determine whether obj fields are in scope
|
||||||
|
@ -692,7 +692,7 @@ fn lookup_in_scope(e: &env, sc: scopes, sp: &span, name: &ident,
|
||||||
let left_fn_level2 = false;
|
let left_fn_level2 = false;
|
||||||
while true {
|
while true {
|
||||||
alt { sc } {
|
alt { sc } {
|
||||||
nil. { ret none[def]; }
|
nil. { ret none::<def>; }
|
||||||
cons(hd, tl) {
|
cons(hd, tl) {
|
||||||
let fnd = in_scope(e, sp, name, hd, ns);
|
let fnd = in_scope(e, sp, name, hd, ns);
|
||||||
if !is_none(fnd) {
|
if !is_none(fnd) {
|
||||||
|
@ -734,7 +734,7 @@ fn lookup_in_ty_params(name: &ident, ty_params: &[ast::ty_param]) ->
|
||||||
if str::eq(tp.ident, name) { ret some(ast::def_ty_arg(i,tp.kind)); }
|
if str::eq(tp.ident, name) { ret some(ast::def_ty_arg(i,tp.kind)); }
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_in_pat(name: &ident, pat: &@ast::pat) -> option::t<def_id> {
|
fn lookup_in_pat(name: &ident, pat: &@ast::pat) -> option::t<def_id> {
|
||||||
|
@ -758,10 +758,10 @@ fn lookup_in_fn(name: &ident, decl: &ast::fn_decl,
|
||||||
ret some(ast::def_arg(local_def(a.id)));
|
ret some(ast::def_arg(local_def(a.id)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
ns_type. { ret lookup_in_ty_params(name, ty_params); }
|
ns_type. { ret lookup_in_ty_params(name, ty_params); }
|
||||||
_ { ret none[def]; }
|
_ { ret none::<def>; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -774,10 +774,10 @@ fn lookup_in_obj(name: &ident, ob: &ast::_obj, ty_params: &[ast::ty_param],
|
||||||
ret some(ast::def_obj_field(local_def(f.id)));
|
ret some(ast::def_obj_field(local_def(f.id)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
ns_type. { ret lookup_in_ty_params(name, ty_params); }
|
ns_type. { ret lookup_in_ty_params(name, ty_params); }
|
||||||
_ { ret none[def]; }
|
_ { ret none::<def>; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -835,7 +835,7 @@ fn lookup_in_block(name: &ident, b: &ast::blk_, pos: uint, loc_pos: uint,
|
||||||
_ { }
|
_ { }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn found_def_item(i: &@ast::item, ns: namespace) -> option::t<def> {
|
fn found_def_item(i: &@ast::item, ns: namespace) -> option::t<def> {
|
||||||
|
@ -880,7 +880,7 @@ fn found_def_item(i: &@ast::item, ns: namespace) -> option::t<def> {
|
||||||
}
|
}
|
||||||
_ { }
|
_ { }
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_in_mod_strict(e: &env, sc: &scopes, m: def, sp: &span, name: &ident,
|
fn lookup_in_mod_strict(e: &env, sc: &scopes, m: def, sp: &span, name: &ident,
|
||||||
|
@ -929,7 +929,7 @@ fn found_view_item(e: &env, vi: @ast::view_item, ns: namespace) ->
|
||||||
ret lookup_import(e, local_def(id), ns);
|
ret lookup_import(e, local_def(id), ns);
|
||||||
}
|
}
|
||||||
ast::view_item_import_glob(_, defid) {
|
ast::view_item_import_glob(_, defid) {
|
||||||
ret none[def]; //will be handled in the fallback glob pass
|
ret none::<def>; //will be handled in the fallback glob pass
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -959,7 +959,7 @@ fn lookup_in_local_mod(e: &env, node_id: node_id, sp: &span, id: &ident,
|
||||||
if dr == outside && !ast::is_exported(id, option::get(info.m)) {
|
if dr == outside && !ast::is_exported(id, option::get(info.m)) {
|
||||||
// if we're in a native mod, then dr==inside, so info.m is some _mod
|
// if we're in a native mod, then dr==inside, so info.m is some _mod
|
||||||
|
|
||||||
ret none[def]; // name is not visible
|
ret none::<def>; // name is not visible
|
||||||
|
|
||||||
}
|
}
|
||||||
alt info.index.find(id) {
|
alt info.index.find(id) {
|
||||||
|
@ -1027,7 +1027,7 @@ fn lookup_glob_in_mod(e: &env, info: @indexed_mod, sp: &span, id: &ident,
|
||||||
alt info.glob_imported_names.get(id) {
|
alt info.glob_imported_names.get(id) {
|
||||||
todo(_, _) { e.sess.bug("Shouldn't've put a todo in."); }
|
todo(_, _) { e.sess.bug("Shouldn't've put a todo in."); }
|
||||||
resolving(sp) {
|
resolving(sp) {
|
||||||
ret none[def]; //circularity is okay in import globs
|
ret none::<def>; //circularity is okay in import globs
|
||||||
|
|
||||||
}
|
}
|
||||||
resolved(val, typ, md) {
|
resolved(val, typ, md) {
|
||||||
|
@ -1052,7 +1052,7 @@ fn lookup_in_mie(e: &env, mie: &mod_index_entry, ns: namespace) ->
|
||||||
let vid = variants.(variant_idx).node.id;
|
let vid = variants.(variant_idx).node.id;
|
||||||
ret some(ast::def_variant(local_def(item.id),
|
ret some(ast::def_variant(local_def(item.id),
|
||||||
local_def(vid)));
|
local_def(vid)));
|
||||||
} else { ret none[def]; }
|
} else { ret none::<def>; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1072,7 +1072,7 @@ fn lookup_in_mie(e: &env, mie: &mod_index_entry, ns: namespace) ->
|
||||||
}
|
}
|
||||||
_ { }
|
_ { }
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1080,13 +1080,13 @@ fn lookup_in_mie(e: &env, mie: &mod_index_entry, ns: namespace) ->
|
||||||
fn add_to_index(index: &hashmap<ident, list<mod_index_entry>>, id: &ident,
|
fn add_to_index(index: &hashmap<ident, list<mod_index_entry>>, id: &ident,
|
||||||
ent: &mod_index_entry) {
|
ent: &mod_index_entry) {
|
||||||
alt index.find(id) {
|
alt index.find(id) {
|
||||||
none. { index.insert(id, cons(ent, @nil[mod_index_entry])); }
|
none. { index.insert(id, cons(ent, @nil::<mod_index_entry>)); }
|
||||||
some(prev) { index.insert(id, cons(ent, @prev)); }
|
some(prev) { index.insert(id, cons(ent, @prev)); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_mod(md: &ast::_mod) -> mod_index {
|
fn index_mod(md: &ast::_mod) -> mod_index {
|
||||||
let index = new_str_hash[list<mod_index_entry>]();
|
let index = new_str_hash::<list<mod_index_entry>>();
|
||||||
for it: @ast::view_item in md.view_items {
|
for it: @ast::view_item in md.view_items {
|
||||||
alt it.node {
|
alt it.node {
|
||||||
ast::view_item_import(ident, _, _) | ast::view_item_use(ident, _, _)
|
ast::view_item_import(ident, _, _) | ast::view_item_use(ident, _, _)
|
||||||
|
@ -1121,7 +1121,7 @@ fn index_mod(md: &ast::_mod) -> mod_index {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn index_nmod(md: &ast::native_mod) -> mod_index {
|
fn index_nmod(md: &ast::native_mod) -> mod_index {
|
||||||
let index = new_str_hash[list<mod_index_entry>]();
|
let index = new_str_hash::<list<mod_index_entry>>();
|
||||||
for it: @ast::view_item in md.view_items {
|
for it: @ast::view_item in md.view_items {
|
||||||
alt it.node {
|
alt it.node {
|
||||||
ast::view_item_use(ident, _, _) | ast::view_item_import(ident, _, _)
|
ast::view_item_use(ident, _, _) | ast::view_item_import(ident, _, _)
|
||||||
|
@ -1163,7 +1163,7 @@ fn lookup_external(e: &env, cnum: int, ids: &[ident], ns: namespace) ->
|
||||||
e.ext_map.insert(ast::def_id_of_def(d), ids);
|
e.ext_map.insert(ast::def_id_of_def(d), ids);
|
||||||
if ns == ns_for_def(d) { ret some(d); }
|
if ns == ns_for_def(d) { ret some(d); }
|
||||||
}
|
}
|
||||||
ret none[def];
|
ret none::<def>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -379,7 +379,7 @@ fn get_extern_const(externs: &hashmap<str, ValueRef>, llmod: ModuleRef,
|
||||||
|
|
||||||
fn get_simple_extern_fn(externs: &hashmap<str, ValueRef>, llmod: ModuleRef,
|
fn get_simple_extern_fn(externs: &hashmap<str, ValueRef>, llmod: ModuleRef,
|
||||||
name: &str, n_args: int) -> ValueRef {
|
name: &str, n_args: int) -> ValueRef {
|
||||||
let inputs = std::vec::init_elt[TypeRef](T_int(), n_args as uint);
|
let inputs = std::vec::init_elt::<TypeRef>(T_int(), n_args as uint);
|
||||||
let output = T_int();
|
let output = T_int();
|
||||||
let t = T_fn(inputs, output);
|
let t = T_fn(inputs, output);
|
||||||
ret get_extern_fn(externs, llmod, name, lib::llvm::LLVMCCallConv, t);
|
ret get_extern_fn(externs, llmod, name, lib::llvm::LLVMCCallConv, t);
|
||||||
|
@ -389,7 +389,7 @@ fn trans_native_call(b: &builder, glues: @glue_fns, lltaskptr: ValueRef,
|
||||||
externs: &hashmap<str, ValueRef>, tn: &type_names,
|
externs: &hashmap<str, ValueRef>, tn: &type_names,
|
||||||
llmod: ModuleRef, name: &str, pass_task: bool,
|
llmod: ModuleRef, name: &str, pass_task: bool,
|
||||||
args: &[ValueRef]) -> ValueRef {
|
args: &[ValueRef]) -> ValueRef {
|
||||||
let n: int = std::vec::len[ValueRef](args) as int;
|
let n: int = std::vec::len::<ValueRef>(args) as int;
|
||||||
let llnative: ValueRef = get_simple_extern_fn(externs, llmod, name, n);
|
let llnative: ValueRef = get_simple_extern_fn(externs, llmod, name, n);
|
||||||
let call_args: [ValueRef] = ~[];
|
let call_args: [ValueRef] = ~[];
|
||||||
for a: ValueRef in args { call_args += ~[b.ZExtOrBitCast(a, T_int())]; }
|
for a: ValueRef in args { call_args += ~[b.ZExtOrBitCast(a, T_int())]; }
|
||||||
|
@ -712,7 +712,7 @@ fn GEP_tup_like(cx: &@block_ctxt, t: &ty::t, base: ValueRef, ixs: &[int]) ->
|
||||||
|
|
||||||
fn split_type(ccx: &@crate_ctxt, t: &ty::t, ixs: &[int], n: uint) ->
|
fn split_type(ccx: &@crate_ctxt, t: &ty::t, ixs: &[int], n: uint) ->
|
||||||
{prefix: [ty::t], target: ty::t} {
|
{prefix: [ty::t], target: ty::t} {
|
||||||
let len: uint = std::vec::len[int](ixs);
|
let len: uint = std::vec::len::<int>(ixs);
|
||||||
// We don't support 0-index or 1-index GEPs: The former is nonsense
|
// We don't support 0-index or 1-index GEPs: The former is nonsense
|
||||||
// and the latter would only be meaningful if we supported non-0
|
// and the latter would only be meaningful if we supported non-0
|
||||||
// values for the 0th index (we don't).
|
// values for the 0th index (we don't).
|
||||||
|
@ -878,7 +878,7 @@ fn trans_malloc_boxed(cx: &@block_ctxt, t: ty::t) ->
|
||||||
// tydesc if necessary.
|
// tydesc if necessary.
|
||||||
fn field_of_tydesc(cx: &@block_ctxt, t: &ty::t, escapes: bool, field: int) ->
|
fn field_of_tydesc(cx: &@block_ctxt, t: &ty::t, escapes: bool, field: int) ->
|
||||||
result {
|
result {
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let tydesc = get_tydesc(cx, t, escapes, ti);
|
let tydesc = get_tydesc(cx, t, escapes, ti);
|
||||||
ret rslt(tydesc.bcx,
|
ret rslt(tydesc.bcx,
|
||||||
tydesc.bcx.build.GEP(tydesc.val, ~[C_int(0), C_int(field)]));
|
tydesc.bcx.build.GEP(tydesc.val, ~[C_int(0), C_int(field)]));
|
||||||
|
@ -954,10 +954,10 @@ fn get_derived_tydesc(cx: &@block_ctxt, t: &ty::t, escapes: bool,
|
||||||
let bcx = new_raw_block_ctxt(cx.fcx, cx.fcx.llderivedtydescs);
|
let bcx = new_raw_block_ctxt(cx.fcx, cx.fcx.llderivedtydescs);
|
||||||
let n_params: uint = ty::count_ty_params(bcx_tcx(bcx), t);
|
let n_params: uint = ty::count_ty_params(bcx_tcx(bcx), t);
|
||||||
let tys = linearize_ty_params(bcx, t);
|
let tys = linearize_ty_params(bcx, t);
|
||||||
assert (n_params == std::vec::len[uint](tys.params));
|
assert (n_params == std::vec::len::<uint>(tys.params));
|
||||||
assert (n_params == std::vec::len[ValueRef](tys.descs));
|
assert (n_params == std::vec::len::<ValueRef>(tys.descs));
|
||||||
let root_ti = get_static_tydesc(bcx, t, tys.params);
|
let root_ti = get_static_tydesc(bcx, t, tys.params);
|
||||||
static_ti = some[@tydesc_info](root_ti);
|
static_ti = some::<@tydesc_info>(root_ti);
|
||||||
lazily_emit_all_tydesc_glue(cx, static_ti);
|
lazily_emit_all_tydesc_glue(cx, static_ti);
|
||||||
let root = root_ti.tydesc;
|
let root = root_ti.tydesc;
|
||||||
let sz = size_of(bcx, t);
|
let sz = size_of(bcx, t);
|
||||||
|
@ -1033,7 +1033,7 @@ fn get_tydesc(cx: &@block_ctxt, orig_t: &ty::t, escapes: bool,
|
||||||
|
|
||||||
// Otherwise, generate a tydesc if necessary, and return it.
|
// Otherwise, generate a tydesc if necessary, and return it.
|
||||||
let info = get_static_tydesc(cx, t, ~[]);
|
let info = get_static_tydesc(cx, t, ~[]);
|
||||||
static_ti = some[@tydesc_info](info);
|
static_ti = some::<@tydesc_info>(info);
|
||||||
ret rslt(cx, info.tydesc);
|
ret rslt(cx, info.tydesc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1110,10 +1110,10 @@ fn declare_tydesc(cx: &@local_ctxt, sp: &span, t: &ty::t, ty_params: &[uint])
|
||||||
tydesc: gvar,
|
tydesc: gvar,
|
||||||
size: llsize,
|
size: llsize,
|
||||||
align: llalign,
|
align: llalign,
|
||||||
mutable copy_glue: none[ValueRef],
|
mutable copy_glue: none::<ValueRef>,
|
||||||
mutable drop_glue: none[ValueRef],
|
mutable drop_glue: none::<ValueRef>,
|
||||||
mutable free_glue: none[ValueRef],
|
mutable free_glue: none::<ValueRef>,
|
||||||
mutable cmp_glue: none[ValueRef],
|
mutable cmp_glue: none::<ValueRef>,
|
||||||
ty_params: ty_params};
|
ty_params: ty_params};
|
||||||
log "--- declare_tydesc " + ty_to_str(cx.ccx.tcx, t);
|
log "--- declare_tydesc " + ty_to_str(cx.ccx.tcx, t);
|
||||||
ret info;
|
ret info;
|
||||||
|
@ -1150,7 +1150,7 @@ fn make_generic_glue_inner(cx: &@local_ctxt, sp: &span, t: &ty::t,
|
||||||
if ty::type_has_dynamic_size(cx.ccx.tcx, t) {
|
if ty::type_has_dynamic_size(cx.ccx.tcx, t) {
|
||||||
llty = T_ptr(T_i8());
|
llty = T_ptr(T_i8());
|
||||||
} else { llty = T_ptr(type_of(cx.ccx, sp, t)); }
|
} else { llty = T_ptr(type_of(cx.ccx, sp, t)); }
|
||||||
let ty_param_count = std::vec::len[uint](ty_params);
|
let ty_param_count = std::vec::len::<uint>(ty_params);
|
||||||
let lltyparams = llvm::LLVMGetParam(llfn, 3u);
|
let lltyparams = llvm::LLVMGetParam(llfn, 3u);
|
||||||
let copy_args_bcx = new_raw_block_ctxt(fcx, fcx.llcopyargs);
|
let copy_args_bcx = new_raw_block_ctxt(fcx, fcx.llcopyargs);
|
||||||
let lltydescs = ~[mutable];
|
let lltydescs = ~[mutable];
|
||||||
|
@ -1341,7 +1341,7 @@ fn make_free_glue(cx: &@block_ctxt, v0: ValueRef, t: &ty::t) {
|
||||||
cx.build.GEP(body,
|
cx.build.GEP(body,
|
||||||
~[C_int(0), C_int(abi::obj_body_elt_tydesc)]);
|
~[C_int(0), C_int(abi::obj_body_elt_tydesc)]);
|
||||||
let tydesc = cx.build.Load(tydescptr);
|
let tydesc = cx.build.Load(tydescptr);
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
call_tydesc_glue_full(cx, body, tydesc,
|
call_tydesc_glue_full(cx, body, tydesc,
|
||||||
abi::tydesc_field_drop_glue, ti);
|
abi::tydesc_field_drop_glue, ti);
|
||||||
if (!bcx_ccx(cx).sess.get_opts().do_gc) {
|
if (!bcx_ccx(cx).sess.get_opts().do_gc) {
|
||||||
|
@ -1362,7 +1362,7 @@ fn make_free_glue(cx: &@block_ctxt, v0: ValueRef, t: &ty::t) {
|
||||||
let tydescptr =
|
let tydescptr =
|
||||||
cx.build.GEP(body,
|
cx.build.GEP(body,
|
||||||
~[C_int(0), C_int(abi::closure_elt_tydesc)]);
|
~[C_int(0), C_int(abi::closure_elt_tydesc)]);
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
call_tydesc_glue_full(cx, bindings, cx.build.Load(tydescptr),
|
call_tydesc_glue_full(cx, bindings, cx.build.Load(tydescptr),
|
||||||
abi::tydesc_field_drop_glue, ti);
|
abi::tydesc_field_drop_glue, ti);
|
||||||
if (!bcx_ccx(cx).sess.get_opts().do_gc) {
|
if (!bcx_ccx(cx).sess.get_opts().do_gc) {
|
||||||
|
@ -1571,7 +1571,7 @@ fn compare_scalar_types(cx: @block_ctxt, lhs: ValueRef, rhs: ValueRef,
|
||||||
ret rslt(new_sub_block_ctxt(cx, "after_fail_dummy"), C_bool(false));
|
ret rslt(new_sub_block_ctxt(cx, "after_fail_dummy"), C_bool(false));
|
||||||
}
|
}
|
||||||
ty::ty_native(_) {
|
ty::ty_native(_) {
|
||||||
trans_fail(cx, none[span],
|
trans_fail(cx, none::<span>,
|
||||||
"attempt to compare values of type native");
|
"attempt to compare values of type native");
|
||||||
ret rslt(new_sub_block_ctxt(cx, "after_fail_dummy"), C_bool(false));
|
ret rslt(new_sub_block_ctxt(cx, "after_fail_dummy"), C_bool(false));
|
||||||
}
|
}
|
||||||
|
@ -1757,7 +1757,7 @@ fn iter_structural_ty_full(cx: &@block_ctxt, av: ValueRef, t: &ty::t,
|
||||||
fn iter_variant(cx: @block_ctxt, a_tup: ValueRef,
|
fn iter_variant(cx: @block_ctxt, a_tup: ValueRef,
|
||||||
variant: &ty::variant_info, tps: &[ty::t],
|
variant: &ty::variant_info, tps: &[ty::t],
|
||||||
tid: &ast::def_id, f: &val_and_ty_fn) -> result {
|
tid: &ast::def_id, f: &val_and_ty_fn) -> result {
|
||||||
if std::vec::len[ty::t](variant.args) == 0u {
|
if std::vec::len::<ty::t>(variant.args) == 0u {
|
||||||
ret rslt(cx, C_nil());
|
ret rslt(cx, C_nil());
|
||||||
}
|
}
|
||||||
let fn_ty = variant.ctor_ty;
|
let fn_ty = variant.ctor_ty;
|
||||||
|
@ -2012,7 +2012,7 @@ fn lazily_emit_tydesc_glue(cx: &@block_ctxt, field: int,
|
||||||
let glue_fn =
|
let glue_fn =
|
||||||
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
||||||
"copy");
|
"copy");
|
||||||
ti.copy_glue = some[ValueRef](glue_fn);
|
ti.copy_glue = some::<ValueRef>(glue_fn);
|
||||||
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
||||||
make_copy_glue, ti.ty_params,
|
make_copy_glue, ti.ty_params,
|
||||||
"take");
|
"take");
|
||||||
|
@ -2030,7 +2030,7 @@ fn lazily_emit_tydesc_glue(cx: &@block_ctxt, field: int,
|
||||||
let glue_fn =
|
let glue_fn =
|
||||||
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
||||||
"drop");
|
"drop");
|
||||||
ti.drop_glue = some[ValueRef](glue_fn);
|
ti.drop_glue = some::<ValueRef>(glue_fn);
|
||||||
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
||||||
make_drop_glue, ti.ty_params,
|
make_drop_glue, ti.ty_params,
|
||||||
"drop");
|
"drop");
|
||||||
|
@ -2048,7 +2048,7 @@ fn lazily_emit_tydesc_glue(cx: &@block_ctxt, field: int,
|
||||||
let glue_fn =
|
let glue_fn =
|
||||||
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
declare_generic_glue(lcx, ti.ty, T_glue_fn(*lcx.ccx),
|
||||||
"free");
|
"free");
|
||||||
ti.free_glue = some[ValueRef](glue_fn);
|
ti.free_glue = some::<ValueRef>(glue_fn);
|
||||||
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
make_generic_glue(lcx, cx.sp, ti.ty, glue_fn,
|
||||||
make_free_glue, ti.ty_params,
|
make_free_glue, ti.ty_params,
|
||||||
"free");
|
"free");
|
||||||
|
@ -2114,7 +2114,7 @@ fn call_tydesc_glue_full(cx: &@block_ctxt, v: ValueRef, tydesc: ValueRef,
|
||||||
|
|
||||||
fn call_tydesc_glue(cx: &@block_ctxt, v: ValueRef, t: &ty::t, field: int) ->
|
fn call_tydesc_glue(cx: &@block_ctxt, v: ValueRef, t: &ty::t, field: int) ->
|
||||||
result {
|
result {
|
||||||
let ti: option::t<@tydesc_info> = none[@tydesc_info];
|
let ti: option::t<@tydesc_info> = none::<@tydesc_info>;
|
||||||
let td = get_tydesc(cx, t, false, ti);
|
let td = get_tydesc(cx, t, false, ti);
|
||||||
call_tydesc_glue_full(td.bcx, spill_if_immediate(td.bcx, v, t), td.val,
|
call_tydesc_glue_full(td.bcx, spill_if_immediate(td.bcx, v, t), td.val,
|
||||||
field, ti);
|
field, ti);
|
||||||
|
@ -2130,7 +2130,7 @@ fn call_cmp_glue(cx: &@block_ctxt, lhs: ValueRef, rhs: ValueRef, t: &ty::t,
|
||||||
let llrhs = spill_if_immediate(cx, rhs, t);
|
let llrhs = spill_if_immediate(cx, rhs, t);
|
||||||
let llrawlhsptr = cx.build.BitCast(lllhs, T_ptr(T_i8()));
|
let llrawlhsptr = cx.build.BitCast(lllhs, T_ptr(T_i8()));
|
||||||
let llrawrhsptr = cx.build.BitCast(llrhs, T_ptr(T_i8()));
|
let llrawrhsptr = cx.build.BitCast(llrhs, T_ptr(T_i8()));
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let r = get_tydesc(cx, t, false, ti);
|
let r = get_tydesc(cx, t, false, ti);
|
||||||
lazily_emit_tydesc_glue(cx, abi::tydesc_field_cmp_glue, ti);
|
lazily_emit_tydesc_glue(cx, abi::tydesc_field_cmp_glue, ti);
|
||||||
let lltydesc = r.val;
|
let lltydesc = r.val;
|
||||||
|
@ -2545,10 +2545,10 @@ fn trans_vec_append(cx: &@block_ctxt, t: &ty::t, lhs: ValueRef, rhs: ValueRef)
|
||||||
_ { }
|
_ { }
|
||||||
}
|
}
|
||||||
let bcx = cx;
|
let bcx = cx;
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let llvec_tydesc = get_tydesc(bcx, t, false, ti);
|
let llvec_tydesc = get_tydesc(bcx, t, false, ti);
|
||||||
bcx = llvec_tydesc.bcx;
|
bcx = llvec_tydesc.bcx;
|
||||||
ti = none[@tydesc_info];
|
ti = none::<@tydesc_info>;
|
||||||
let llelt_tydesc = get_tydesc(bcx, elt_ty, false, ti);
|
let llelt_tydesc = get_tydesc(bcx, elt_ty, false, ti);
|
||||||
lazily_emit_tydesc_glue(cx, abi::tydesc_field_copy_glue, ti);
|
lazily_emit_tydesc_glue(cx, abi::tydesc_field_copy_glue, ti);
|
||||||
lazily_emit_tydesc_glue(cx, abi::tydesc_field_drop_glue, ti);
|
lazily_emit_tydesc_glue(cx, abi::tydesc_field_drop_glue, ti);
|
||||||
|
@ -3346,13 +3346,13 @@ fn join_results(parent_cx: &@block_ctxt, t: TypeRef, ins: &[result]) ->
|
||||||
bbs += ~[r.bcx.llbb];
|
bbs += ~[r.bcx.llbb];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
alt std::vec::len[result](live) {
|
alt std::vec::len::<result>(live) {
|
||||||
0u {
|
0u {
|
||||||
// No incoming edges are live, so we're in dead-code-land.
|
// No incoming edges are live, so we're in dead-code-land.
|
||||||
// Arbitrarily pick the first dead edge, since the caller
|
// Arbitrarily pick the first dead edge, since the caller
|
||||||
// is just going to propagate it outward.
|
// is just going to propagate it outward.
|
||||||
|
|
||||||
assert (std::vec::len[result](ins) >= 1u);
|
assert (std::vec::len::<result>(ins) >= 1u);
|
||||||
ret ins.(0);
|
ret ins.(0);
|
||||||
}
|
}
|
||||||
_ {/* fall through */ }
|
_ {/* fall through */ }
|
||||||
|
@ -3427,7 +3427,8 @@ fn trans_for(cx: &@block_ctxt, local: &@ast::local, seq: &@ast::expr,
|
||||||
body: &ast::blk, outer_next_cx: @block_ctxt) -> result {
|
body: &ast::blk, outer_next_cx: @block_ctxt) -> result {
|
||||||
let next_cx = new_sub_block_ctxt(cx, "next");
|
let next_cx = new_sub_block_ctxt(cx, "next");
|
||||||
let scope_cx =
|
let scope_cx =
|
||||||
new_loop_scope_block_ctxt(cx, option::some[@block_ctxt](next_cx),
|
new_loop_scope_block_ctxt(cx,
|
||||||
|
option::some::<@block_ctxt>(next_cx),
|
||||||
outer_next_cx, "for loop scope");
|
outer_next_cx, "for loop scope");
|
||||||
cx.build.Br(scope_cx.llbb);
|
cx.build.Br(scope_cx.llbb);
|
||||||
let local_res = alloc_local(scope_cx, local);
|
let local_res = alloc_local(scope_cx, local);
|
||||||
|
@ -3728,7 +3729,7 @@ fn trans_for_each(cx: &@block_ctxt, local: &@ast::local, seq: &@ast::expr,
|
||||||
ast::expr_call(f, args) {
|
ast::expr_call(f, args) {
|
||||||
let pair =
|
let pair =
|
||||||
create_real_fn_pair(cx, iter_body_llty, lliterbody, llenv.ptr);
|
create_real_fn_pair(cx, iter_body_llty, lliterbody, llenv.ptr);
|
||||||
r = trans_call(cx, f, some[ValueRef](cx.build.Load(pair)), args,
|
r = trans_call(cx, f, some::<ValueRef>(cx.build.Load(pair)), args,
|
||||||
seq.id);
|
seq.id);
|
||||||
ret rslt(r.bcx, C_nil());
|
ret rslt(r.bcx, C_nil());
|
||||||
}
|
}
|
||||||
|
@ -3740,7 +3741,7 @@ fn trans_while(cx: &@block_ctxt, cond: &@ast::expr, body: &ast::blk) ->
|
||||||
let cond_cx = new_scope_block_ctxt(cx, "while cond");
|
let cond_cx = new_scope_block_ctxt(cx, "while cond");
|
||||||
let next_cx = new_sub_block_ctxt(cx, "next");
|
let next_cx = new_sub_block_ctxt(cx, "next");
|
||||||
let body_cx =
|
let body_cx =
|
||||||
new_loop_scope_block_ctxt(cx, option::none[@block_ctxt], next_cx,
|
new_loop_scope_block_ctxt(cx, option::none::<@block_ctxt>, next_cx,
|
||||||
"while loop body");
|
"while loop body");
|
||||||
let body_res = trans_block(body_cx, body, return);
|
let body_res = trans_block(body_cx, body, return);
|
||||||
let cond_res = trans_expr(cond_cx, cond);
|
let cond_res = trans_expr(cond_cx, cond);
|
||||||
|
@ -3755,7 +3756,7 @@ fn trans_do_while(cx: &@block_ctxt, body: &ast::blk, cond: &@ast::expr) ->
|
||||||
result {
|
result {
|
||||||
let next_cx = new_sub_block_ctxt(cx, "next");
|
let next_cx = new_sub_block_ctxt(cx, "next");
|
||||||
let body_cx =
|
let body_cx =
|
||||||
new_loop_scope_block_ctxt(cx, option::none[@block_ctxt], next_cx,
|
new_loop_scope_block_ctxt(cx, option::none::<@block_ctxt>, next_cx,
|
||||||
"do-while loop body");
|
"do-while loop body");
|
||||||
let body_res = trans_block(body_cx, body, return);
|
let body_res = trans_block(body_cx, body, return);
|
||||||
if is_terminated(body_res.bcx) {
|
if is_terminated(body_res.bcx) {
|
||||||
|
@ -3791,17 +3792,17 @@ type lval_result =
|
||||||
fn lval_mem(cx: &@block_ctxt, val: ValueRef) -> lval_result {
|
fn lval_mem(cx: &@block_ctxt, val: ValueRef) -> lval_result {
|
||||||
ret {res: rslt(cx, val),
|
ret {res: rslt(cx, val),
|
||||||
is_mem: true,
|
is_mem: true,
|
||||||
generic: none[generic_info],
|
generic: none::<generic_info>,
|
||||||
llobj: none[ValueRef],
|
llobj: none::<ValueRef>,
|
||||||
method_ty: none[ty::t]};
|
method_ty: none::<ty::t>};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lval_val(cx: &@block_ctxt, val: ValueRef) -> lval_result {
|
fn lval_val(cx: &@block_ctxt, val: ValueRef) -> lval_result {
|
||||||
ret {res: rslt(cx, val),
|
ret {res: rslt(cx, val),
|
||||||
is_mem: false,
|
is_mem: false,
|
||||||
generic: none[generic_info],
|
generic: none::<generic_info>,
|
||||||
llobj: none[ValueRef],
|
llobj: none::<ValueRef>,
|
||||||
method_ty: none[ty::t]};
|
method_ty: none::<ty::t>};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trans_external_path(cx: &@block_ctxt, did: &ast::def_id,
|
fn trans_external_path(cx: &@block_ctxt, did: &ast::def_id,
|
||||||
|
@ -3824,14 +3825,14 @@ fn lval_generic_fn(cx: &@block_ctxt, tpt: &ty::ty_param_kinds_and_ty,
|
||||||
lv = lval_val(cx, trans_external_path(cx, fn_id, tpt));
|
lv = lval_val(cx, trans_external_path(cx, fn_id, tpt));
|
||||||
}
|
}
|
||||||
let tys = ty::node_id_to_type_params(bcx_tcx(cx), id);
|
let tys = ty::node_id_to_type_params(bcx_tcx(cx), id);
|
||||||
if std::vec::len[ty::t](tys) != 0u {
|
if std::vec::len::<ty::t>(tys) != 0u {
|
||||||
let bcx = lv.res.bcx;
|
let bcx = lv.res.bcx;
|
||||||
let tydescs: [ValueRef] = ~[];
|
let tydescs: [ValueRef] = ~[];
|
||||||
let tis: [option::t<@tydesc_info>] = ~[];
|
let tis: [option::t<@tydesc_info>] = ~[];
|
||||||
for t: ty::t in tys {
|
for t: ty::t in tys {
|
||||||
// TODO: Doesn't always escape.
|
// TODO: Doesn't always escape.
|
||||||
|
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let td = get_tydesc(bcx, t, true, ti);
|
let td = get_tydesc(bcx, t, true, ti);
|
||||||
tis += ~[ti];
|
tis += ~[ti];
|
||||||
bcx = td.bcx;
|
bcx = td.bcx;
|
||||||
|
@ -3972,7 +3973,7 @@ fn trans_field(cx: &@block_ctxt, sp: &span, v: ValueRef, t0: &ty::t,
|
||||||
ty::ty_fn_ret(tcx, fn_ty), 0u);
|
ty::ty_fn_ret(tcx, fn_ty), 0u);
|
||||||
v = r.bcx.build.PointerCast(v, T_ptr(T_ptr(ll_fn_ty)));
|
v = r.bcx.build.PointerCast(v, T_ptr(T_ptr(ll_fn_ty)));
|
||||||
let lvo = lval_mem(r.bcx, v);
|
let lvo = lval_mem(r.bcx, v);
|
||||||
ret {llobj: some[ValueRef](r.val), method_ty: some[ty::t](fn_ty)
|
ret {llobj: some::<ValueRef>(r.val), method_ty: some::<ty::t>(fn_ty)
|
||||||
with lvo};
|
with lvo};
|
||||||
}
|
}
|
||||||
_ { bcx_ccx(cx).sess.unimpl("field variant in trans_field"); }
|
_ { bcx_ccx(cx).sess.unimpl("field variant in trans_field"); }
|
||||||
|
@ -4027,7 +4028,7 @@ fn trans_index(cx: &@block_ctxt, sp: &span, base: &@ast::expr,
|
||||||
bcx.build.CondBr(bounds_check, next_cx.llbb, fail_cx.llbb);
|
bcx.build.CondBr(bounds_check, next_cx.llbb, fail_cx.llbb);
|
||||||
// fail: bad bounds check.
|
// fail: bad bounds check.
|
||||||
|
|
||||||
trans_fail(fail_cx, some[span](sp), "bounds check");
|
trans_fail(fail_cx, some::<span>(sp), "bounds check");
|
||||||
let body;
|
let body;
|
||||||
alt interior_len_and_data {
|
alt interior_len_and_data {
|
||||||
some(lad) { body = lad.data; }
|
some(lad) { body = lad.data; }
|
||||||
|
@ -4126,7 +4127,7 @@ fn trans_lval(cx: &@block_ctxt, e: &@ast::expr) -> lval_result {
|
||||||
some(gi) {
|
some(gi) {
|
||||||
let t = ty::expr_ty(bcx_tcx(cx), e);
|
let t = ty::expr_ty(bcx_tcx(cx), e);
|
||||||
let n_args = std::vec::len(ty::ty_fn_args(bcx_tcx(cx), t));
|
let n_args = std::vec::len(ty::ty_fn_args(bcx_tcx(cx), t));
|
||||||
let args = std::vec::init_elt(none[@ast::expr], n_args);
|
let args = std::vec::init_elt(none::<@ast::expr>, n_args);
|
||||||
let bound = trans_bind_1(lv.res.bcx, e, lv, args, e.id);
|
let bound = trans_bind_1(lv.res.bcx, e, lv, args, e.id);
|
||||||
ret lval_val(bound.bcx, bound.val);
|
ret lval_val(bound.bcx, bound.val);
|
||||||
}
|
}
|
||||||
|
@ -4768,7 +4769,7 @@ fn trans_vec(cx: &@block_ctxt, args: &[@ast::expr], id: ast::node_id) ->
|
||||||
let unit_sz = size_of(bcx, unit_ty);
|
let unit_sz = size_of(bcx, unit_ty);
|
||||||
bcx = unit_sz.bcx;
|
bcx = unit_sz.bcx;
|
||||||
let data_sz =
|
let data_sz =
|
||||||
bcx.build.Mul(C_uint(std::vec::len[@ast::expr](args)), unit_sz.val);
|
bcx.build.Mul(C_uint(std::vec::len::<@ast::expr>(args)), unit_sz.val);
|
||||||
// FIXME: pass tydesc properly.
|
// FIXME: pass tydesc properly.
|
||||||
|
|
||||||
let vec_val =
|
let vec_val =
|
||||||
|
@ -4781,7 +4782,7 @@ fn trans_vec(cx: &@block_ctxt, args: &[@ast::expr], id: ast::node_id) ->
|
||||||
let body = bcx.build.GEP(vec_val, ~[C_int(0), C_int(abi::vec_elt_data)]);
|
let body = bcx.build.GEP(vec_val, ~[C_int(0), C_int(abi::vec_elt_data)]);
|
||||||
let pseudo_tup_ty =
|
let pseudo_tup_ty =
|
||||||
ty::mk_tup(bcx_tcx(cx),
|
ty::mk_tup(bcx_tcx(cx),
|
||||||
std::vec::init_elt[ty::t](unit_ty,
|
std::vec::init_elt::<ty::t>(unit_ty,
|
||||||
std::vec::len(args)));
|
std::vec::len(args)));
|
||||||
let i: int = 0;
|
let i: int = 0;
|
||||||
for e: @ast::expr in args {
|
for e: @ast::expr in args {
|
||||||
|
@ -5100,7 +5101,7 @@ fn trans_expr_out(cx: &@block_ctxt, e: &@ast::expr, output: out_method) ->
|
||||||
}
|
}
|
||||||
ast::expr_bind(f, args) { ret trans_bind(cx, f, args, e.id); }
|
ast::expr_bind(f, args) { ret trans_bind(cx, f, args, e.id); }
|
||||||
ast::expr_call(f, args) {
|
ast::expr_call(f, args) {
|
||||||
ret trans_call(cx, f, none[ValueRef], args, e.id);
|
ret trans_call(cx, f, none::<ValueRef>, args, e.id);
|
||||||
}
|
}
|
||||||
ast::expr_cast(val, _) { ret trans_cast(cx, val, e.id); }
|
ast::expr_cast(val, _) { ret trans_cast(cx, val, e.id); }
|
||||||
ast::expr_vec(args, _, ast::sk_rc.) { ret trans_vec(cx, args, e.id); }
|
ast::expr_vec(args, _, ast::sk_rc.) { ret trans_vec(cx, args, e.id); }
|
||||||
|
@ -5237,7 +5238,7 @@ fn trans_log(lvl: int, cx: &@block_ctxt, e: &@ast::expr) -> result {
|
||||||
let e_ty = ty::expr_ty(bcx_tcx(cx), e);
|
let e_ty = ty::expr_ty(bcx_tcx(cx), e);
|
||||||
let log_bcx = sub.bcx;
|
let log_bcx = sub.bcx;
|
||||||
|
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let r = get_tydesc(log_bcx, e_ty, false, ti);
|
let r = get_tydesc(log_bcx, e_ty, false, ti);
|
||||||
log_bcx = r.bcx;
|
log_bcx = r.bcx;
|
||||||
|
|
||||||
|
@ -5257,7 +5258,7 @@ fn trans_check_expr(cx: &@block_ctxt, e: &@ast::expr, s: &str) -> result {
|
||||||
let cond_res = trans_expr(cx, e);
|
let cond_res = trans_expr(cx, e);
|
||||||
let expr_str = s + " " + expr_to_str(e) + " failed";
|
let expr_str = s + " " + expr_to_str(e) + " failed";
|
||||||
let fail_cx = new_sub_block_ctxt(cx, "fail");
|
let fail_cx = new_sub_block_ctxt(cx, "fail");
|
||||||
trans_fail(fail_cx, some[span](e.span), expr_str);
|
trans_fail(fail_cx, some::<span>(e.span), expr_str);
|
||||||
let next_cx = new_sub_block_ctxt(cx, "next");
|
let next_cx = new_sub_block_ctxt(cx, "next");
|
||||||
cond_res.bcx.build.CondBr(cond_res.val, next_cx.llbb, fail_cx.llbb);
|
cond_res.bcx.build.CondBr(cond_res.val, next_cx.llbb, fail_cx.llbb);
|
||||||
ret rslt(next_cx, C_nil());
|
ret rslt(next_cx, C_nil());
|
||||||
|
@ -5587,9 +5588,9 @@ fn trans_block_cleanups(cx: &@block_ctxt, cleanup_cx: &@block_ctxt) ->
|
||||||
@block_ctxt {
|
@block_ctxt {
|
||||||
let bcx = cx;
|
let bcx = cx;
|
||||||
if cleanup_cx.kind == NON_SCOPE_BLOCK {
|
if cleanup_cx.kind == NON_SCOPE_BLOCK {
|
||||||
assert (std::vec::len[cleanup](cleanup_cx.cleanups) == 0u);
|
assert (std::vec::len::<cleanup>(cleanup_cx.cleanups) == 0u);
|
||||||
}
|
}
|
||||||
let i = std::vec::len[cleanup](cleanup_cx.cleanups);
|
let i = std::vec::len::<cleanup>(cleanup_cx.cleanups);
|
||||||
while i > 0u {
|
while i > 0u {
|
||||||
i -= 1u;
|
i -= 1u;
|
||||||
let c = cleanup_cx.cleanups.(i);
|
let c = cleanup_cx.cleanups.(i);
|
||||||
|
@ -5788,13 +5789,15 @@ fn new_fn_ctxt_w_id(cx: @local_ctxt, sp: &span, llfndecl: ValueRef,
|
||||||
let llretptr: ValueRef = llvm::LLVMGetParam(llfndecl, 0u);
|
let llretptr: ValueRef = llvm::LLVMGetParam(llfndecl, 0u);
|
||||||
let lltaskptr: ValueRef = llvm::LLVMGetParam(llfndecl, 1u);
|
let lltaskptr: ValueRef = llvm::LLVMGetParam(llfndecl, 1u);
|
||||||
let llenv: ValueRef = llvm::LLVMGetParam(llfndecl, 2u);
|
let llenv: ValueRef = llvm::LLVMGetParam(llfndecl, 2u);
|
||||||
let llargs: hashmap<ast::node_id, ValueRef> = new_int_hash[ValueRef]();
|
let llargs: hashmap<ast::node_id, ValueRef> = new_int_hash::<ValueRef>();
|
||||||
let llobjfields: hashmap<ast::node_id, ValueRef> =
|
let llobjfields: hashmap<ast::node_id, ValueRef> =
|
||||||
new_int_hash[ValueRef]();
|
new_int_hash::<ValueRef>();
|
||||||
let lllocals: hashmap<ast::node_id, ValueRef> = new_int_hash[ValueRef]();
|
let lllocals: hashmap<ast::node_id, ValueRef> =
|
||||||
let llupvars: hashmap<ast::node_id, ValueRef> = new_int_hash[ValueRef]();
|
new_int_hash::<ValueRef>();
|
||||||
|
let llupvars: hashmap<ast::node_id, ValueRef> =
|
||||||
|
new_int_hash::<ValueRef>();
|
||||||
let derived_tydescs =
|
let derived_tydescs =
|
||||||
map::mk_hashmap[ty::t, derived_tydesc_info](ty::hash_ty, ty::eq_ty);
|
map::mk_hashmap::<ty::t, derived_tydesc_info>(ty::hash_ty, ty::eq_ty);
|
||||||
let llbbs = mk_standard_basic_blocks(llfndecl);
|
let llbbs = mk_standard_basic_blocks(llfndecl);
|
||||||
ret @{llfn: llfndecl,
|
ret @{llfn: llfndecl,
|
||||||
lltaskptr: lltaskptr,
|
lltaskptr: lltaskptr,
|
||||||
|
@ -5805,9 +5808,9 @@ fn new_fn_ctxt_w_id(cx: @local_ctxt, sp: &span, llfndecl: ValueRef,
|
||||||
mutable llderivedtydescs_first: llbbs.dt,
|
mutable llderivedtydescs_first: llbbs.dt,
|
||||||
mutable llderivedtydescs: llbbs.dt,
|
mutable llderivedtydescs: llbbs.dt,
|
||||||
mutable lldynamicallocas: llbbs.da,
|
mutable lldynamicallocas: llbbs.da,
|
||||||
mutable llself: none[val_self_pair],
|
mutable llself: none::<val_self_pair>,
|
||||||
mutable lliterbody: none[ValueRef],
|
mutable lliterbody: none::<ValueRef>,
|
||||||
mutable iterbodyty: none[ty::t],
|
mutable iterbodyty: none::<ty::t>,
|
||||||
llargs: llargs,
|
llargs: llargs,
|
||||||
llobjfields: llobjfields,
|
llobjfields: llobjfields,
|
||||||
lllocals: lllocals,
|
lllocals: lllocals,
|
||||||
|
@ -5845,7 +5848,7 @@ fn create_llargs_for_fn_args(cx: &@fn_ctxt, proto: ast::proto,
|
||||||
// way.
|
// way.
|
||||||
let arg_n = 3u;
|
let arg_n = 3u;
|
||||||
alt ty_self {
|
alt ty_self {
|
||||||
some(tt) { cx.llself = some[val_self_pair]({v: cx.llenv, t: tt}); }
|
some(tt) { cx.llself = some::<val_self_pair>({v: cx.llenv, t: tt}); }
|
||||||
none. {
|
none. {
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
for tp: ast::ty_param in ty_params {
|
for tp: ast::ty_param in ty_params {
|
||||||
|
@ -5865,7 +5868,7 @@ fn create_llargs_for_fn_args(cx: &@fn_ctxt, proto: ast::proto,
|
||||||
cx.iterbodyty = some(ty::mk_iter_body_fn(fcx_tcx(cx), ret_ty));
|
cx.iterbodyty = some(ty::mk_iter_body_fn(fcx_tcx(cx), ret_ty));
|
||||||
let llarg = llvm::LLVMGetParam(cx.llfn, arg_n);
|
let llarg = llvm::LLVMGetParam(cx.llfn, arg_n);
|
||||||
assert (llarg as int != 0);
|
assert (llarg as int != 0);
|
||||||
cx.lliterbody = some[ValueRef](llarg);
|
cx.lliterbody = some::<ValueRef>(llarg);
|
||||||
arg_n += 1u;
|
arg_n += 1u;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5942,7 +5945,7 @@ fn populate_fn_ctxt_from_llself(fcx: @fn_ctxt, llself: val_self_pair) {
|
||||||
// its magic.
|
// its magic.
|
||||||
|
|
||||||
let fields_tup_ty = ty::mk_tup(fcx.lcx.ccx.tcx, field_tys);
|
let fields_tup_ty = ty::mk_tup(fcx.lcx.ccx.tcx, field_tys);
|
||||||
let n_typarams = std::vec::len[ast::ty_param](bcx.fcx.lcx.obj_typarams);
|
let n_typarams = std::vec::len::<ast::ty_param>(bcx.fcx.lcx.obj_typarams);
|
||||||
let llobj_box_ty: TypeRef = T_obj_ptr(*bcx_ccx(bcx), n_typarams);
|
let llobj_box_ty: TypeRef = T_obj_ptr(*bcx_ccx(bcx), n_typarams);
|
||||||
let box_cell =
|
let box_cell =
|
||||||
bcx.build.GEP(llself.v, ~[C_int(0), C_int(abi::obj_field_box)]);
|
bcx.build.GEP(llself.v, ~[C_int(0), C_int(abi::obj_field_box)]);
|
||||||
|
@ -6100,7 +6103,7 @@ fn trans_res_ctor(cx: @local_ctxt, sp: &span, dtor: &ast::_fn,
|
||||||
}
|
}
|
||||||
let fcx = new_fn_ctxt(cx, sp, llctor_decl);
|
let fcx = new_fn_ctxt(cx, sp, llctor_decl);
|
||||||
let ret_t = ty::ret_ty_of_fn(cx.ccx.tcx, ctor_id);
|
let ret_t = ty::ret_ty_of_fn(cx.ccx.tcx, ctor_id);
|
||||||
create_llargs_for_fn_args(fcx, ast::proto_fn, none[ty::t], ret_t,
|
create_llargs_for_fn_args(fcx, ast::proto_fn, none::<ty::t>, ret_t,
|
||||||
dtor.decl.inputs, ty_params);
|
dtor.decl.inputs, ty_params);
|
||||||
let bcx = new_top_block_ctxt(fcx);
|
let bcx = new_top_block_ctxt(fcx);
|
||||||
let lltop = bcx.llbb;
|
let lltop = bcx.llbb;
|
||||||
|
@ -6132,7 +6135,7 @@ fn trans_res_ctor(cx: @local_ctxt, sp: &span, dtor: &ast::_fn,
|
||||||
fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
|
fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
|
||||||
variant: &ast::variant, index: int, is_degen: bool,
|
variant: &ast::variant, index: int, is_degen: bool,
|
||||||
ty_params: &[ast::ty_param]) {
|
ty_params: &[ast::ty_param]) {
|
||||||
if std::vec::len[ast::variant_arg](variant.node.args) == 0u {
|
if std::vec::len::<ast::variant_arg>(variant.node.args) == 0u {
|
||||||
ret; // nullary constructors are just constants
|
ret; // nullary constructors are just constants
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -6157,7 +6160,7 @@ fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let fcx = new_fn_ctxt(cx, variant.span, llfndecl);
|
let fcx = new_fn_ctxt(cx, variant.span, llfndecl);
|
||||||
create_llargs_for_fn_args(fcx, ast::proto_fn, none[ty::t],
|
create_llargs_for_fn_args(fcx, ast::proto_fn, none::<ty::t>,
|
||||||
ty::ret_ty_of_fn(cx.ccx.tcx, variant.node.id),
|
ty::ret_ty_of_fn(cx.ccx.tcx, variant.node.id),
|
||||||
fn_args, ty_params);
|
fn_args, ty_params);
|
||||||
let ty_param_substs: [ty::t] = ~[];
|
let ty_param_substs: [ty::t] = ~[];
|
||||||
|
@ -6328,7 +6331,7 @@ fn decl_fn_and_pair_full(ccx: &@crate_ctxt, sp: &span, path: &[str],
|
||||||
ty::ty_fn(proto, inputs, output, _, _) {
|
ty::ty_fn(proto, inputs, output, _, _) {
|
||||||
llfty =
|
llfty =
|
||||||
type_of_fn(ccx, sp, proto, inputs, output,
|
type_of_fn(ccx, sp, proto, inputs, output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
}
|
}
|
||||||
_ { ccx.sess.bug("decl_fn_and_pair(): fn item doesn't have fn type!"); }
|
_ { ccx.sess.bug("decl_fn_and_pair(): fn item doesn't have fn type!"); }
|
||||||
}
|
}
|
||||||
|
@ -6349,7 +6352,7 @@ fn decl_fn_and_pair_full(ccx: &@crate_ctxt, sp: &span, path: &[str],
|
||||||
fn create_main_wrapper(ccx: &@crate_ctxt, sp: &span,
|
fn create_main_wrapper(ccx: &@crate_ctxt, sp: &span,
|
||||||
main_llfn: ValueRef, main_node_type: ty::t) {
|
main_llfn: ValueRef, main_node_type: ty::t) {
|
||||||
|
|
||||||
if ccx.main_fn != none[ValueRef] {
|
if ccx.main_fn != none::<ValueRef> {
|
||||||
ccx.sess.span_fatal(sp, "multiple 'main' functions");
|
ccx.sess.span_fatal(sp, "multiple 'main' functions");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6537,7 +6540,7 @@ fn native_fn_ty_param_count(cx: &@crate_ctxt, id: ast::node_id) -> uint {
|
||||||
actually a fn");
|
actually a fn");
|
||||||
}
|
}
|
||||||
ast::native_item_fn(_, _, tps) {
|
ast::native_item_fn(_, _, tps) {
|
||||||
count = std::vec::len[ast::ty_param](tps);
|
count = std::vec::len::<ast::ty_param>(tps);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret count;
|
ret count;
|
||||||
|
@ -6850,7 +6853,7 @@ fn trans_constant(ccx: @crate_ctxt, it: &@ast::item, pt: &[str],
|
||||||
alt it.node {
|
alt it.node {
|
||||||
ast::item_tag(variants, _) {
|
ast::item_tag(variants, _) {
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
let n_variants = std::vec::len[ast::variant](variants);
|
let n_variants = std::vec::len::<ast::variant>(variants);
|
||||||
while i < n_variants {
|
while i < n_variants {
|
||||||
let variant = variants.(i);
|
let variant = variants.(i);
|
||||||
let p = new_pt + ~[it.ident, variant.node.name, "discrim"];
|
let p = new_pt + ~[it.ident, variant.node.name, "discrim"];
|
||||||
|
@ -6917,7 +6920,7 @@ fn declare_intrinsics(llmod: ModuleRef) -> hashmap<str, ValueRef> {
|
||||||
decl_cdecl_fn(llmod, "llvm.memset.p0i8.i64",
|
decl_cdecl_fn(llmod, "llvm.memset.p0i8.i64",
|
||||||
T_fn(T_memset64_args, T_void()));
|
T_fn(T_memset64_args, T_void()));
|
||||||
let trap = decl_cdecl_fn(llmod, "llvm.trap", T_fn(T_trap_args, T_void()));
|
let trap = decl_cdecl_fn(llmod, "llvm.trap", T_fn(T_trap_args, T_void()));
|
||||||
let intrinsics = new_str_hash[ValueRef]();
|
let intrinsics = new_str_hash::<ValueRef>();
|
||||||
intrinsics.insert("llvm.gcread", gcread);
|
intrinsics.insert("llvm.gcread", gcread);
|
||||||
intrinsics.insert("llvm.memmove.p0i8.p0i8.i32", memmove32);
|
intrinsics.insert("llvm.memmove.p0i8.p0i8.i32", memmove32);
|
||||||
intrinsics.insert("llvm.memmove.p0i8.p0i8.i64", memmove64);
|
intrinsics.insert("llvm.memmove.p0i8.p0i8.i64", memmove64);
|
||||||
|
@ -7019,7 +7022,7 @@ fn create_crate_map(ccx: &@crate_ctxt) -> ValueRef {
|
||||||
mapname = ccx.link_meta.name;
|
mapname = ccx.link_meta.name;
|
||||||
} else { mapname = "toplevel"; }
|
} else { mapname = "toplevel"; }
|
||||||
let sym_name = "_rust_crate_map_" + mapname;
|
let sym_name = "_rust_crate_map_" + mapname;
|
||||||
let arrtype = T_array(T_int(), std::vec::len[ValueRef](subcrates));
|
let arrtype = T_array(T_int(), std::vec::len::<ValueRef>(subcrates));
|
||||||
let maptype = T_struct(~[T_int(), arrtype]);
|
let maptype = T_struct(~[T_int(), arrtype]);
|
||||||
let map = llvm::LLVMAddGlobal(ccx.llmod, maptype, str::buf(sym_name));
|
let map = llvm::LLVMAddGlobal(ccx.llmod, maptype, str::buf(sym_name));
|
||||||
llvm::LLVMSetLinkage(map,
|
llvm::LLVMSetLinkage(map,
|
||||||
|
@ -7073,32 +7076,32 @@ fn trans_crate(sess: &session::session, crate: &@ast::crate, tcx: &ty::ctxt,
|
||||||
let glues = make_glues(llmod, taskptr_type);
|
let glues = make_glues(llmod, taskptr_type);
|
||||||
let hasher = ty::hash_ty;
|
let hasher = ty::hash_ty;
|
||||||
let eqer = ty::eq_ty;
|
let eqer = ty::eq_ty;
|
||||||
let tag_sizes = map::mk_hashmap[ty::t, uint](hasher, eqer);
|
let tag_sizes = map::mk_hashmap::<ty::t, uint>(hasher, eqer);
|
||||||
let tydescs = map::mk_hashmap[ty::t, @tydesc_info](hasher, eqer);
|
let tydescs = map::mk_hashmap::<ty::t, @tydesc_info>(hasher, eqer);
|
||||||
let lltypes = map::mk_hashmap[ty::t, TypeRef](hasher, eqer);
|
let lltypes = map::mk_hashmap::<ty::t, TypeRef>(hasher, eqer);
|
||||||
let sha1s = map::mk_hashmap[ty::t, str](hasher, eqer);
|
let sha1s = map::mk_hashmap::<ty::t, str>(hasher, eqer);
|
||||||
let short_names = map::mk_hashmap[ty::t, str](hasher, eqer);
|
let short_names = map::mk_hashmap::<ty::t, str>(hasher, eqer);
|
||||||
let sha = std::sha1::mk_sha1();
|
let sha = std::sha1::mk_sha1();
|
||||||
let ccx =
|
let ccx =
|
||||||
@{sess: sess,
|
@{sess: sess,
|
||||||
llmod: llmod,
|
llmod: llmod,
|
||||||
td: td,
|
td: td,
|
||||||
tn: tn,
|
tn: tn,
|
||||||
externs: new_str_hash[ValueRef](),
|
externs: new_str_hash::<ValueRef>(),
|
||||||
intrinsics: intrinsics,
|
intrinsics: intrinsics,
|
||||||
item_ids: new_int_hash[ValueRef](),
|
item_ids: new_int_hash::<ValueRef>(),
|
||||||
ast_map: amap,
|
ast_map: amap,
|
||||||
item_symbols: new_int_hash[str](),
|
item_symbols: new_int_hash::<str>(),
|
||||||
mutable main_fn: none[ValueRef],
|
mutable main_fn: none::<ValueRef>,
|
||||||
link_meta: link::build_link_meta(sess, *crate, output, sha),
|
link_meta: link::build_link_meta(sess, *crate, output, sha),
|
||||||
tag_sizes: tag_sizes,
|
tag_sizes: tag_sizes,
|
||||||
discrims: new_int_hash[ValueRef](),
|
discrims: new_int_hash::<ValueRef>(),
|
||||||
discrim_symbols: new_int_hash[str](),
|
discrim_symbols: new_int_hash::<str>(),
|
||||||
fn_pairs: new_int_hash[ValueRef](),
|
fn_pairs: new_int_hash::<ValueRef>(),
|
||||||
consts: new_int_hash[ValueRef](),
|
consts: new_int_hash::<ValueRef>(),
|
||||||
obj_methods: new_int_hash[()](),
|
obj_methods: new_int_hash::<()>(),
|
||||||
tydescs: tydescs,
|
tydescs: tydescs,
|
||||||
module_data: new_str_hash[ValueRef](),
|
module_data: new_str_hash::<ValueRef>(),
|
||||||
lltypes: lltypes,
|
lltypes: lltypes,
|
||||||
glues: glues,
|
glues: glues,
|
||||||
names: namegen(0),
|
names: namegen(0),
|
||||||
|
|
|
@ -250,7 +250,7 @@ type fn_ctxt = {
|
||||||
|
|
||||||
// For convenience, a vector of the incoming tydescs for each of
|
// For convenience, a vector of the incoming tydescs for each of
|
||||||
// this functions type parameters, fetched via llvm::LLVMGetParam.
|
// this functions type parameters, fetched via llvm::LLVMGetParam.
|
||||||
// For example, for a function foo[A, B, C](), lltydescs contains
|
// For example, for a function foo::<A, B, C>(), lltydescs contains
|
||||||
// the ValueRefs for the tydescs for A, B, and C.
|
// the ValueRefs for the tydescs for A, B, and C.
|
||||||
mutable lltydescs: [ValueRef],
|
mutable lltydescs: [ValueRef],
|
||||||
|
|
||||||
|
@ -510,7 +510,7 @@ fn T_size_t() -> TypeRef {
|
||||||
|
|
||||||
fn T_fn(inputs: &[TypeRef], output: TypeRef) -> TypeRef {
|
fn T_fn(inputs: &[TypeRef], output: TypeRef) -> TypeRef {
|
||||||
ret llvm::LLVMFunctionType(output, std::vec::to_ptr(inputs),
|
ret llvm::LLVMFunctionType(output, std::vec::to_ptr(inputs),
|
||||||
std::vec::len[TypeRef](inputs), False);
|
std::vec::len::<TypeRef>(inputs), False);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn T_fn_pair(cx: &crate_ctxt, tfn: TypeRef) -> TypeRef {
|
fn T_fn_pair(cx: &crate_ctxt, tfn: TypeRef) -> TypeRef {
|
||||||
|
@ -570,9 +570,9 @@ fn T_tydesc_field(cx: &crate_ctxt, field: int) -> TypeRef {
|
||||||
// Bit of a kludge: pick the fn typeref out of the tydesc..
|
// Bit of a kludge: pick the fn typeref out of the tydesc..
|
||||||
|
|
||||||
let tydesc_elts: [TypeRef] =
|
let tydesc_elts: [TypeRef] =
|
||||||
std::vec::init_elt[TypeRef](T_nil(), abi::n_tydesc_fields as uint);
|
std::vec::init_elt::<TypeRef>(T_nil(), abi::n_tydesc_fields as uint);
|
||||||
llvm::LLVMGetStructElementTypes(cx.tydesc_type,
|
llvm::LLVMGetStructElementTypes(cx.tydesc_type,
|
||||||
std::vec::to_ptr[TypeRef](tydesc_elts));
|
std::vec::to_ptr::<TypeRef>(tydesc_elts));
|
||||||
let t = llvm::LLVMGetElementType(tydesc_elts.(field));
|
let t = llvm::LLVMGetElementType(tydesc_elts.(field));
|
||||||
ret t;
|
ret t;
|
||||||
}
|
}
|
||||||
|
@ -742,7 +742,7 @@ fn T_opaque_tag_ptr(tn: &type_names) -> TypeRef {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn T_captured_tydescs(cx: &crate_ctxt, n: uint) -> TypeRef {
|
fn T_captured_tydescs(cx: &crate_ctxt, n: uint) -> TypeRef {
|
||||||
ret T_struct(std::vec::init_elt[TypeRef](T_ptr(cx.tydesc_type), n));
|
ret T_struct(std::vec::init_elt::<TypeRef>(T_ptr(cx.tydesc_type), n));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn T_obj_ptr(cx: &crate_ctxt, n_captured_tydescs: uint) -> TypeRef {
|
fn T_obj_ptr(cx: &crate_ctxt, n_captured_tydescs: uint) -> TypeRef {
|
||||||
|
|
|
@ -51,7 +51,7 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
|
||||||
let fcx = new_fn_ctxt(cx, sp, llctor_decl);
|
let fcx = new_fn_ctxt(cx, sp, llctor_decl);
|
||||||
|
|
||||||
// Both regular arguments and type parameters are handled here.
|
// Both regular arguments and type parameters are handled here.
|
||||||
create_llargs_for_fn_args(fcx, ast::proto_fn, none[ty::t],
|
create_llargs_for_fn_args(fcx, ast::proto_fn, none::<ty::t>,
|
||||||
ty::ret_ty_of_fn(ccx.tcx, ctor_id), fn_args,
|
ty::ret_ty_of_fn(ccx.tcx, ctor_id), fn_args,
|
||||||
ty_params);
|
ty_params);
|
||||||
let arg_tys: [ty::arg] = arg_tys_of_fn(ccx, ctor_id);
|
let arg_tys: [ty::arg] = arg_tys_of_fn(ccx, ctor_id);
|
||||||
|
@ -95,8 +95,8 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
|
||||||
// typarams, and fields.
|
// typarams, and fields.
|
||||||
let llbox_ty: TypeRef = T_ptr(T_empty_struct());
|
let llbox_ty: TypeRef = T_ptr(T_empty_struct());
|
||||||
|
|
||||||
if std::vec::len[ast::ty_param](ty_params) == 0u &&
|
if std::vec::len::<ast::ty_param>(ty_params) == 0u &&
|
||||||
std::vec::len[ty::arg](arg_tys) == 0u {
|
std::vec::len::<ty::arg>(arg_tys) == 0u {
|
||||||
// If the object we're translating has no fields or type parameters,
|
// If the object we're translating has no fields or type parameters,
|
||||||
// there's not much to do.
|
// there's not much to do.
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
|
||||||
GEP_tup_like(bcx, body_ty, body,
|
GEP_tup_like(bcx, body_ty, body,
|
||||||
~[0, abi::obj_body_elt_tydesc]);
|
~[0, abi::obj_body_elt_tydesc]);
|
||||||
bcx = body_tydesc.bcx;
|
bcx = body_tydesc.bcx;
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let body_td = get_tydesc(bcx, body_ty, true, ti);
|
let body_td = get_tydesc(bcx, body_ty, true, ti);
|
||||||
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_drop_glue, ti);
|
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_drop_glue, ti);
|
||||||
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_free_glue, ti);
|
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_free_glue, ti);
|
||||||
|
@ -295,7 +295,7 @@ fn trans_anon_obj(bcx: @block_ctxt, sp: &span, anon_obj: &ast::anon_obj,
|
||||||
// typarams, fields, and a pointer to our inner_obj.
|
// typarams, fields, and a pointer to our inner_obj.
|
||||||
let llbox_ty: TypeRef = T_ptr(T_empty_struct());
|
let llbox_ty: TypeRef = T_ptr(T_empty_struct());
|
||||||
|
|
||||||
if std::vec::len[ast::anon_obj_field](additional_fields) == 0u &&
|
if std::vec::len::<ast::anon_obj_field>(additional_fields) == 0u &&
|
||||||
anon_obj.inner_obj == none {
|
anon_obj.inner_obj == none {
|
||||||
|
|
||||||
// If the object we're translating has no fields and no inner_obj,
|
// If the object we're translating has no fields and no inner_obj,
|
||||||
|
@ -326,7 +326,7 @@ fn trans_anon_obj(bcx: @block_ctxt, sp: &span, anon_obj: &ast::anon_obj,
|
||||||
GEP_tup_like(bcx, body_ty, body,
|
GEP_tup_like(bcx, body_ty, body,
|
||||||
~[0, abi::obj_body_elt_tydesc]);
|
~[0, abi::obj_body_elt_tydesc]);
|
||||||
bcx = body_tydesc.bcx;
|
bcx = body_tydesc.bcx;
|
||||||
let ti = none[@tydesc_info];
|
let ti = none::<@tydesc_info>;
|
||||||
let body_td = get_tydesc(bcx, body_ty, true, ti);
|
let body_td = get_tydesc(bcx, body_ty, true, ti);
|
||||||
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_drop_glue, ti);
|
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_drop_glue, ti);
|
||||||
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_free_glue, ti);
|
lazily_emit_tydesc_glue(bcx, abi::tydesc_field_free_glue, ti);
|
||||||
|
@ -460,7 +460,7 @@ fn create_vtbl(cx: @local_ctxt, sp: &span, outer_obj_ty: ty::t,
|
||||||
|
|
||||||
// Sort and process all the methods.
|
// Sort and process all the methods.
|
||||||
let meths =
|
let meths =
|
||||||
std::sort::merge_sort[@ast::method]
|
std::sort::merge_sort::<@ast::method>
|
||||||
(bind ast_mthd_lteq(_, _), ob.methods);
|
(bind ast_mthd_lteq(_, _), ob.methods);
|
||||||
|
|
||||||
for m: @ast::method in meths {
|
for m: @ast::method in meths {
|
||||||
|
@ -496,7 +496,7 @@ fn create_vtbl(cx: @local_ctxt, sp: &span, outer_obj_ty: ty::t,
|
||||||
// Filter out any methods that we don't need forwarding slots for
|
// Filter out any methods that we don't need forwarding slots for
|
||||||
// because they're being overridden.
|
// because they're being overridden.
|
||||||
let f = bind filtering_fn(cx, _, ob.methods);
|
let f = bind filtering_fn(cx, _, ob.methods);
|
||||||
meths = std::vec::filter_map[vtbl_mthd, vtbl_mthd](f, meths);
|
meths = std::vec::filter_map::<vtbl_mthd, vtbl_mthd>(f, meths);
|
||||||
|
|
||||||
// And now add the additional ones, both overriding ones and entirely
|
// And now add the additional ones, both overriding ones and entirely
|
||||||
// new ones. These will just be normal methods.
|
// new ones. These will just be normal methods.
|
||||||
|
@ -504,7 +504,7 @@ fn create_vtbl(cx: @local_ctxt, sp: &span, outer_obj_ty: ty::t,
|
||||||
|
|
||||||
// Sort all the methods and process them.
|
// Sort all the methods and process them.
|
||||||
meths =
|
meths =
|
||||||
std::sort::merge_sort[vtbl_mthd]
|
std::sort::merge_sort::<vtbl_mthd>
|
||||||
(bind vtbl_mthd_lteq(_, _), meths);
|
(bind vtbl_mthd_lteq(_, _), meths);
|
||||||
|
|
||||||
// To create forwarding methods, we'll need a "backwarding" vtbl. See
|
// To create forwarding methods, we'll need a "backwarding" vtbl. See
|
||||||
|
@ -615,7 +615,7 @@ fn process_bkwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
|
||||||
// Get the backwarding function's type and declare it.
|
// Get the backwarding function's type and declare it.
|
||||||
let llbackwarding_fn_ty: TypeRef =
|
let llbackwarding_fn_ty: TypeRef =
|
||||||
type_of_fn_full(cx.ccx, sp, m.proto, true, m.inputs, m.output,
|
type_of_fn_full(cx.ccx, sp, m.proto, true, m.inputs, m.output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
let llbackwarding_fn: ValueRef =
|
let llbackwarding_fn: ValueRef =
|
||||||
decl_internal_fastcall_fn(cx.ccx.llmod, s, llbackwarding_fn_ty);
|
decl_internal_fastcall_fn(cx.ccx.llmod, s, llbackwarding_fn_ty);
|
||||||
|
|
||||||
|
@ -684,7 +684,7 @@ fn process_bkwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
|
||||||
type_of_fn_full(bcx_ccx(bcx), sp,
|
type_of_fn_full(bcx_ccx(bcx), sp,
|
||||||
ty::ty_fn_proto(bcx_tcx(bcx), outer_mthd_ty), true,
|
ty::ty_fn_proto(bcx_tcx(bcx), outer_mthd_ty), true,
|
||||||
m.inputs, m.output,
|
m.inputs, m.output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
llouter_mthd =
|
llouter_mthd =
|
||||||
bcx.build.PointerCast(llouter_mthd, T_ptr(T_ptr(llouter_mthd_ty)));
|
bcx.build.PointerCast(llouter_mthd, T_ptr(T_ptr(llouter_mthd_ty)));
|
||||||
llouter_mthd = bcx.build.Load(llouter_mthd);
|
llouter_mthd = bcx.build.Load(llouter_mthd);
|
||||||
|
@ -747,7 +747,7 @@ fn process_fwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
|
||||||
// Get the forwarding function's type and declare it.
|
// Get the forwarding function's type and declare it.
|
||||||
let llforwarding_fn_ty: TypeRef =
|
let llforwarding_fn_ty: TypeRef =
|
||||||
type_of_fn_full(cx.ccx, sp, m.proto, true, m.inputs, m.output,
|
type_of_fn_full(cx.ccx, sp, m.proto, true, m.inputs, m.output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
let llforwarding_fn: ValueRef =
|
let llforwarding_fn: ValueRef =
|
||||||
decl_internal_fastcall_fn(cx.ccx.llmod, s, llforwarding_fn_ty);
|
decl_internal_fastcall_fn(cx.ccx.llmod, s, llforwarding_fn_ty);
|
||||||
|
|
||||||
|
@ -844,7 +844,7 @@ fn process_fwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
|
||||||
type_of_fn_full(bcx_ccx(bcx), sp,
|
type_of_fn_full(bcx_ccx(bcx), sp,
|
||||||
ty::ty_fn_proto(bcx_tcx(bcx), orig_mthd_ty), true,
|
ty::ty_fn_proto(bcx_tcx(bcx), orig_mthd_ty), true,
|
||||||
m.inputs, m.output,
|
m.inputs, m.output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
llorig_mthd =
|
llorig_mthd =
|
||||||
bcx.build.PointerCast(llorig_mthd, T_ptr(T_ptr(llorig_mthd_ty)));
|
bcx.build.PointerCast(llorig_mthd, T_ptr(T_ptr(llorig_mthd_ty)));
|
||||||
llorig_mthd = bcx.build.Load(llorig_mthd);
|
llorig_mthd = bcx.build.Load(llorig_mthd);
|
||||||
|
@ -923,7 +923,7 @@ fn process_normal_mthd(cx: @local_ctxt, m: @ast::method, self_ty: ty::t,
|
||||||
ty::ty_fn(proto, inputs, output, _, _) {
|
ty::ty_fn(proto, inputs, output, _, _) {
|
||||||
llfnty =
|
llfnty =
|
||||||
type_of_fn_full(cx.ccx, m.span, proto, true, inputs, output,
|
type_of_fn_full(cx.ccx, m.span, proto, true, inputs, output,
|
||||||
std::vec::len[ast::ty_param](ty_params));
|
std::vec::len::<ast::ty_param>(ty_params));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mcx: @local_ctxt =
|
let mcx: @local_ctxt =
|
||||||
|
|
|
@ -170,8 +170,8 @@ fn log_states_err(pp: &pre_and_post_state) {
|
||||||
fn print_ident(i: &ident) { log " " + i + " "; }
|
fn print_ident(i: &ident) { log " " + i + " "; }
|
||||||
|
|
||||||
fn print_idents(idents: &mutable [ident]) {
|
fn print_idents(idents: &mutable [ident]) {
|
||||||
if vec::len[ident](idents) == 0u { ret; }
|
if vec::len::<ident>(idents) == 0u { ret; }
|
||||||
log "an ident: " + vec::pop[ident](idents);
|
log "an ident: " + vec::pop::<ident>(idents);
|
||||||
print_idents(idents);
|
print_idents(idents);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -320,8 +320,8 @@ fn add_node(ccx: &crate_ctxt, i: node_id, a: &ts_ann) {
|
||||||
|
|
||||||
fn get_ts_ann(ccx: &crate_ctxt, i: node_id) -> option::t<ts_ann> {
|
fn get_ts_ann(ccx: &crate_ctxt, i: node_id) -> option::t<ts_ann> {
|
||||||
if i as uint < vec::len(*ccx.node_anns) {
|
if i as uint < vec::len(*ccx.node_anns) {
|
||||||
ret some[ts_ann](ccx.node_anns.(i));
|
ret some::<ts_ann>(ccx.node_anns.(i));
|
||||||
} else { ret none[ts_ann]; }
|
} else { ret none::<ts_ann>; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ fn num_constraints(m: fn_info) -> uint { ret m.num_constraints; }
|
||||||
|
|
||||||
fn new_crate_ctxt(cx: ty::ctxt) -> crate_ctxt {
|
fn new_crate_ctxt(cx: ty::ctxt) -> crate_ctxt {
|
||||||
let na: [mutable ts_ann] = ~[mutable];
|
let na: [mutable ts_ann] = ~[mutable];
|
||||||
ret {tcx: cx, node_anns: @mutable na, fm: @new_int_hash[fn_info]()};
|
ret {tcx: cx, node_anns: @mutable na, fm: @new_int_hash::<fn_info>()};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Use e's type to determine whether it returns.
|
/* Use e's type to determine whether it returns.
|
||||||
|
|
|
@ -160,7 +160,7 @@ fn relax_precond_block_inner(b: &blk, cx: &relax_ctxt,
|
||||||
|
|
||||||
fn relax_precond_block(fcx: &fn_ctxt, i: node_id, b:&blk) {
|
fn relax_precond_block(fcx: &fn_ctxt, i: node_id, b:&blk) {
|
||||||
let cx = {fcx: fcx, i: i};
|
let cx = {fcx: fcx, i: i};
|
||||||
let visitor = visit::default_visitor[relax_ctxt]();
|
let visitor = visit::default_visitor::<relax_ctxt>();
|
||||||
visitor =
|
visitor =
|
||||||
@{visit_block: relax_precond_block_inner,
|
@{visit_block: relax_precond_block_inner,
|
||||||
visit_expr: relax_precond_expr,
|
visit_expr: relax_precond_expr,
|
||||||
|
|
|
@ -131,7 +131,7 @@ fn check_states_against_conditions(fcx: &fn_ctxt, f: &_fn,
|
||||||
because we want the smallest possible erroneous statement
|
because we want the smallest possible erroneous statement
|
||||||
or expression. */
|
or expression. */
|
||||||
|
|
||||||
let visitor = visit::default_visitor[fn_ctxt]();
|
let visitor = visit::default_visitor::<fn_ctxt>();
|
||||||
|
|
||||||
visitor =
|
visitor =
|
||||||
@{visit_stmt: check_states_stmt,
|
@{visit_stmt: check_states_stmt,
|
||||||
|
@ -205,7 +205,7 @@ fn check_crate(cx: ty::ctxt, crate: @crate) {
|
||||||
annotate_crate(ccx, *crate);
|
annotate_crate(ccx, *crate);
|
||||||
/* Compute the pre and postcondition for every subexpression */
|
/* Compute the pre and postcondition for every subexpression */
|
||||||
|
|
||||||
let vtor = visit::default_visitor[crate_ctxt]();
|
let vtor = visit::default_visitor::<crate_ctxt>();
|
||||||
vtor = @{visit_fn: fn_pre_post with *vtor};
|
vtor = @{visit_fn: fn_pre_post with *vtor};
|
||||||
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
|
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ fn collect_pred(e: &@expr, cx: &ctxt, v: &visit::vt<ctxt>) {
|
||||||
fn find_locals(tcx: &ty::ctxt, f: &_fn, tps: &[ty_param], sp: &span,
|
fn find_locals(tcx: &ty::ctxt, f: &_fn, tps: &[ty_param], sp: &span,
|
||||||
i: &fn_ident, id: node_id) -> ctxt {
|
i: &fn_ident, id: node_id) -> ctxt {
|
||||||
let cx: ctxt = {cs: @mutable ~[], tcx: tcx};
|
let cx: ctxt = {cs: @mutable ~[], tcx: tcx};
|
||||||
let visitor = visit::default_visitor[ctxt]();
|
let visitor = visit::default_visitor::<ctxt>();
|
||||||
|
|
||||||
visitor =
|
visitor =
|
||||||
@{visit_local: collect_local,
|
@{visit_local: collect_local,
|
||||||
|
@ -91,7 +91,7 @@ fn add_constraint(tcx: &ty::ctxt, c: sp_constr, next: uint, tbl: constr_map)
|
||||||
fn mk_fn_info(ccx: &crate_ctxt, f: &_fn, tp: &[ty_param], f_sp: &span,
|
fn mk_fn_info(ccx: &crate_ctxt, f: &_fn, tp: &[ty_param], f_sp: &span,
|
||||||
f_name: &fn_ident, id: node_id) {
|
f_name: &fn_ident, id: node_id) {
|
||||||
let name = fn_ident_to_string(id, f_name);
|
let name = fn_ident_to_string(id, f_name);
|
||||||
let res_map = @new_def_hash[constraint]();
|
let res_map = @new_def_hash::<constraint>();
|
||||||
let next: uint = 0u;
|
let next: uint = 0u;
|
||||||
|
|
||||||
let cx: ctxt = find_locals(ccx.tcx, f, tp, f_sp, f_name, id);
|
let cx: ctxt = find_locals(ccx.tcx, f, tp, f_sp, f_name, id);
|
||||||
|
|
|
@ -75,7 +75,7 @@ fn find_pre_post_item(ccx: &crate_ctxt, i: &item) {
|
||||||
let v: @mutable [node_id] = @mutable ~[];
|
let v: @mutable [node_id] = @mutable ~[];
|
||||||
let fake_fcx =
|
let fake_fcx =
|
||||||
{enclosing:
|
{enclosing:
|
||||||
{constrs: @new_def_hash[constraint](),
|
{constrs: @new_def_hash::<constraint>(),
|
||||||
num_constraints: 0u,
|
num_constraints: 0u,
|
||||||
cf: return,
|
cf: return,
|
||||||
// just bogus
|
// just bogus
|
||||||
|
@ -115,7 +115,7 @@ fn find_pre_post_item(ccx: &crate_ctxt, i: &item) {
|
||||||
the preconditions for <args>, and the postcondition in a to
|
the preconditions for <args>, and the postcondition in a to
|
||||||
be the union of all postconditions for <args> */
|
be the union of all postconditions for <args> */
|
||||||
fn find_pre_post_exprs(fcx: &fn_ctxt, args: &[@expr], id: node_id) {
|
fn find_pre_post_exprs(fcx: &fn_ctxt, args: &[@expr], id: node_id) {
|
||||||
if vec::len[@expr](args) > 0u {
|
if vec::len::<@expr>(args) > 0u {
|
||||||
log "find_pre_post_exprs: oper =";
|
log "find_pre_post_exprs: oper =";
|
||||||
log_expr(*args.(0));
|
log_expr(*args.(0));
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ fn find_pre_post_exprs(fcx: &fn_ctxt, args: &[@expr], id: node_id) {
|
||||||
fn get_pp(ccx: crate_ctxt, e: &@expr) -> pre_and_post {
|
fn get_pp(ccx: crate_ctxt, e: &@expr) -> pre_and_post {
|
||||||
ret expr_pp(ccx, e);
|
ret expr_pp(ccx, e);
|
||||||
}
|
}
|
||||||
let pps = vec::map[@expr, pre_and_post](bind get_pp(fcx.ccx, _), args);
|
let pps = vec::map::<@expr, pre_and_post>(bind get_pp(fcx.ccx, _), args);
|
||||||
|
|
||||||
set_pre_and_post(fcx.ccx, id, seq_preconds(fcx, pps),
|
set_pre_and_post(fcx.ccx, id, seq_preconds(fcx, pps),
|
||||||
seq_postconds(fcx, vec::map(get_post, pps)));
|
seq_postconds(fcx, vec::map(get_post, pps)));
|
||||||
|
@ -509,7 +509,7 @@ fn find_pre_post_expr(fcx: &fn_ctxt, e: @expr) {
|
||||||
postcondition: false_postcond(num_local_vars)};
|
postcondition: false_postcond(num_local_vars)};
|
||||||
let g = bind combine_pp(antec_pp, fcx, _, _);
|
let g = bind combine_pp(antec_pp, fcx, _, _);
|
||||||
let alts_overall_pp =
|
let alts_overall_pp =
|
||||||
vec::foldl[pre_and_post, pre_and_post](g, e_pp, alt_pps);
|
vec::foldl::<pre_and_post, pre_and_post>(g, e_pp, alt_pps);
|
||||||
set_pre_and_post(fcx.ccx, e.id, alts_overall_pp.precondition,
|
set_pre_and_post(fcx.ccx, e.id, alts_overall_pp.precondition,
|
||||||
alts_overall_pp.postcondition);
|
alts_overall_pp.postcondition);
|
||||||
}
|
}
|
||||||
|
@ -680,7 +680,7 @@ fn find_pre_post_block(fcx: &fn_ctxt, b: blk) {
|
||||||
for s: @stmt in b.node.stmts { do_one_(fcx, s); }
|
for s: @stmt in b.node.stmts { do_one_(fcx, s); }
|
||||||
fn do_inner_(fcx: fn_ctxt, e: &@expr) { find_pre_post_expr(fcx, e); }
|
fn do_inner_(fcx: fn_ctxt, e: &@expr) { find_pre_post_expr(fcx, e); }
|
||||||
let do_inner = bind do_inner_(fcx, _);
|
let do_inner = bind do_inner_(fcx, _);
|
||||||
option::map[@expr, ()](do_inner, b.node.expr);
|
option::map::<@expr, ()>(do_inner, b.node.expr);
|
||||||
|
|
||||||
let pps: [pre_and_post] = ~[];
|
let pps: [pre_and_post] = ~[];
|
||||||
for s: @stmt in b.node.stmts { pps += ~[stmt_pp(fcx.ccx, *s)]; }
|
for s: @stmt in b.node.stmts { pps += ~[stmt_pp(fcx.ccx, *s)]; }
|
||||||
|
|
|
@ -398,9 +398,9 @@ fn mk_rcache() -> creader_cache {
|
||||||
fn mk_ctxt(s: session::session, dm: resolve::def_map, amap: ast_map::map,
|
fn mk_ctxt(s: session::session, dm: resolve::def_map, amap: ast_map::map,
|
||||||
freevars: freevars::freevar_map) -> ctxt {
|
freevars: freevars::freevar_map) -> ctxt {
|
||||||
let ntt: node_type_table =
|
let ntt: node_type_table =
|
||||||
@smallintmap::mk[ty::ty_param_substs_opt_and_ty]();
|
@smallintmap::mk::<ty::ty_param_substs_opt_and_ty>();
|
||||||
let tcache = new_def_hash[ty::ty_param_kinds_and_ty]();
|
let tcache = new_def_hash::<ty::ty_param_kinds_and_ty>();
|
||||||
let ts = @interner::mk[@raw_t](hash_raw_ty, eq_raw_ty);
|
let ts = @interner::mk::<@raw_t>(hash_raw_ty, eq_raw_ty);
|
||||||
let cx =
|
let cx =
|
||||||
@{ts: ts,
|
@{ts: ts,
|
||||||
sess: s,
|
sess: s,
|
||||||
|
@ -428,7 +428,7 @@ fn mk_raw_ty(cx: &ctxt, st: &sty, in_cname: &option::t<str>) -> @raw_t {
|
||||||
let has_vars: bool = false;
|
let has_vars: bool = false;
|
||||||
fn derive_flags_t(cx: &ctxt, has_params: &mutable bool,
|
fn derive_flags_t(cx: &ctxt, has_params: &mutable bool,
|
||||||
has_vars: &mutable bool, tt: &t) {
|
has_vars: &mutable bool, tt: &t) {
|
||||||
let rt = interner::get[@raw_t](*cx.ts, tt);
|
let rt = interner::get::<@raw_t>(*cx.ts, tt);
|
||||||
has_params = has_params || rt.has_params;
|
has_params = has_params || rt.has_params;
|
||||||
has_vars = has_vars || rt.has_vars;
|
has_vars = has_vars || rt.has_vars;
|
||||||
}
|
}
|
||||||
|
@ -1148,7 +1148,7 @@ fn type_has_dynamic_size(cx: &ctxt, ty: &t) -> bool {
|
||||||
ty_istr. { ret false; }
|
ty_istr. { ret false; }
|
||||||
ty_tag(_, subtys) {
|
ty_tag(_, subtys) {
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[t](subtys) {
|
while i < vec::len::<t>(subtys) {
|
||||||
if type_has_dynamic_size(cx, subtys.(i)) { ret true; }
|
if type_has_dynamic_size(cx, subtys.(i)) { ret true; }
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
|
@ -1160,7 +1160,7 @@ fn type_has_dynamic_size(cx: &ctxt, ty: &t) -> bool {
|
||||||
ty_ptr(_) { ret false; }
|
ty_ptr(_) { ret false; }
|
||||||
ty_rec(fields) {
|
ty_rec(fields) {
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[field](fields) {
|
while i < vec::len::<field>(fields) {
|
||||||
if type_has_dynamic_size(cx, fields.(i).mt.ty) { ret true; }
|
if type_has_dynamic_size(cx, fields.(i).mt.ty) { ret true; }
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
|
@ -1599,8 +1599,8 @@ fn equal_type_structures(a: &sty, b: &sty) -> bool {
|
||||||
fn equal_fn(args_a: &[arg], rty_a: &t, args_b: &[arg], rty_b: &t) ->
|
fn equal_fn(args_a: &[arg], rty_a: &t, args_b: &[arg], rty_b: &t) ->
|
||||||
bool {
|
bool {
|
||||||
if !eq_ty(rty_a, rty_b) { ret false; }
|
if !eq_ty(rty_a, rty_b) { ret false; }
|
||||||
let len = vec::len[arg](args_a);
|
let len = vec::len::<arg>(args_a);
|
||||||
if len != vec::len[arg](args_b) { ret false; }
|
if len != vec::len::<arg>(args_b) { ret false; }
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < len {
|
while i < len {
|
||||||
let arg_a = args_a.(i);
|
let arg_a = args_a.(i);
|
||||||
|
@ -1636,8 +1636,8 @@ fn equal_type_structures(a: &sty, b: &sty) -> bool {
|
||||||
alt b {
|
alt b {
|
||||||
ty_tag(id_b, tys_b) {
|
ty_tag(id_b, tys_b) {
|
||||||
if !equal_def(id_a, id_b) { ret false; }
|
if !equal_def(id_a, id_b) { ret false; }
|
||||||
let len = vec::len[t](tys_a);
|
let len = vec::len::<t>(tys_a);
|
||||||
if len != vec::len[t](tys_b) { ret false; }
|
if len != vec::len::<t>(tys_b) { ret false; }
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < len {
|
while i < len {
|
||||||
if !eq_ty(tys_a.(i), tys_b.(i)) { ret false; }
|
if !eq_ty(tys_a.(i), tys_b.(i)) { ret false; }
|
||||||
|
@ -1663,8 +1663,8 @@ fn equal_type_structures(a: &sty, b: &sty) -> bool {
|
||||||
ty_rec(flds_a) {
|
ty_rec(flds_a) {
|
||||||
alt b {
|
alt b {
|
||||||
ty_rec(flds_b) {
|
ty_rec(flds_b) {
|
||||||
let len = vec::len[field](flds_a);
|
let len = vec::len::<field>(flds_a);
|
||||||
if len != vec::len[field](flds_b) { ret false; }
|
if len != vec::len::<field>(flds_b) { ret false; }
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < len {
|
while i < len {
|
||||||
let fld_a = flds_a.(i);
|
let fld_a = flds_a.(i);
|
||||||
|
@ -1716,8 +1716,8 @@ fn equal_type_structures(a: &sty, b: &sty) -> bool {
|
||||||
ty_obj(methods_a) {
|
ty_obj(methods_a) {
|
||||||
alt b {
|
alt b {
|
||||||
ty_obj(methods_b) {
|
ty_obj(methods_b) {
|
||||||
let len = vec::len[method](methods_a);
|
let len = vec::len::<method>(methods_a);
|
||||||
if len != vec::len[method](methods_b) { ret false; }
|
if len != vec::len::<method>(methods_b) { ret false; }
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < len {
|
while i < len {
|
||||||
let m_a = methods_a.(i);
|
let m_a = methods_a.(i);
|
||||||
|
@ -1874,7 +1874,7 @@ fn count_ty_params(cx: &ctxt, ty: t) -> uint {
|
||||||
let param_indices: @mutable [uint] = @mutable ~[];
|
let param_indices: @mutable [uint] = @mutable ~[];
|
||||||
let f = bind counter(cx, param_indices, _);
|
let f = bind counter(cx, param_indices, _);
|
||||||
walk_ty(cx, f, ty);
|
walk_ty(cx, f, ty);
|
||||||
ret vec::len[uint](*param_indices);
|
ret vec::len::<uint>(*param_indices);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_contains_vars(cx: &ctxt, typ: &t) -> bool {
|
fn type_contains_vars(cx: &ctxt, typ: &t) -> bool {
|
||||||
|
@ -1995,7 +1995,7 @@ fn sort_methods(meths: &[method]) -> [method] {
|
||||||
fn method_lteq(a: &method, b: &method) -> bool {
|
fn method_lteq(a: &method, b: &method) -> bool {
|
||||||
ret str::lteq(a.ident, b.ident);
|
ret str::lteq(a.ident, b.ident);
|
||||||
}
|
}
|
||||||
ret std::sort::merge_sort[method](bind method_lteq(_, _), meths);
|
ret std::sort::merge_sort::<method>(bind method_lteq(_, _), meths);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_lval(expr: &@ast::expr) -> bool {
|
fn is_lval(expr: &@ast::expr) -> bool {
|
||||||
|
@ -2071,7 +2071,7 @@ mod unify {
|
||||||
type ctxt = {vb: @var_bindings, tcx: ty_ctxt};
|
type ctxt = {vb: @var_bindings, tcx: ty_ctxt};
|
||||||
|
|
||||||
fn mk_var_bindings() -> @var_bindings {
|
fn mk_var_bindings() -> @var_bindings {
|
||||||
ret @{sets: ufind::make(), types: smallintmap::mk[t]()};
|
ret @{sets: ufind::make(), types: smallintmap::mk::<t>()};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unifies two sets.
|
// Unifies two sets.
|
||||||
|
@ -2084,7 +2084,7 @@ mod unify {
|
||||||
bind fn (cx: &@ctxt, t: t, set_a: uint, set_b: uint) {
|
bind fn (cx: &@ctxt, t: t, set_a: uint, set_b: uint) {
|
||||||
ufind::union(cx.vb.sets, set_a, set_b);
|
ufind::union(cx.vb.sets, set_a, set_b);
|
||||||
let root_c: uint = ufind::find(cx.vb.sets, set_a);
|
let root_c: uint = ufind::find(cx.vb.sets, set_a);
|
||||||
smallintmap::insert[t](cx.vb.types, root_c, t);
|
smallintmap::insert::<t>(cx.vb.types, root_c, t);
|
||||||
}(_, _, set_a, set_b);
|
}(_, _, set_a, set_b);
|
||||||
|
|
||||||
|
|
||||||
|
@ -2115,7 +2115,7 @@ mod unify {
|
||||||
ufind::grow(cx.vb.sets, (key as uint) + 1u);
|
ufind::grow(cx.vb.sets, (key as uint) + 1u);
|
||||||
let root = ufind::find(cx.vb.sets, key as uint);
|
let root = ufind::find(cx.vb.sets, key as uint);
|
||||||
let result_type = typ;
|
let result_type = typ;
|
||||||
alt smallintmap::find[t](cx.vb.types, root) {
|
alt smallintmap::find::<t>(cx.vb.types, root) {
|
||||||
some(old_type) {
|
some(old_type) {
|
||||||
alt unify_step(cx, old_type, typ) {
|
alt unify_step(cx, old_type, typ) {
|
||||||
ures_ok(unified_type) { result_type = unified_type; }
|
ures_ok(unified_type) { result_type = unified_type; }
|
||||||
|
@ -2124,7 +2124,7 @@ mod unify {
|
||||||
}
|
}
|
||||||
none. {/* fall through */ }
|
none. {/* fall through */ }
|
||||||
}
|
}
|
||||||
smallintmap::insert[t](cx.vb.types, root, result_type);
|
smallintmap::insert::<t>(cx.vb.types, root, result_type);
|
||||||
ret ures_ok(typ);
|
ret ures_ok(typ);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2212,8 +2212,8 @@ mod unify {
|
||||||
expected_inputs: &[arg], expected_output: &t,
|
expected_inputs: &[arg], expected_output: &t,
|
||||||
actual_inputs: &[arg], actual_output: &t) ->
|
actual_inputs: &[arg], actual_output: &t) ->
|
||||||
fn_common_res {
|
fn_common_res {
|
||||||
let expected_len = vec::len[arg](expected_inputs);
|
let expected_len = vec::len::<arg>(expected_inputs);
|
||||||
let actual_len = vec::len[arg](actual_inputs);
|
let actual_len = vec::len::<arg>(actual_inputs);
|
||||||
if expected_len != actual_len {
|
if expected_len != actual_len {
|
||||||
ret fn_common_res_err(ures_err(terr_arg_count));
|
ret fn_common_res_err(ures_err(terr_arg_count));
|
||||||
}
|
}
|
||||||
|
@ -2309,8 +2309,8 @@ mod unify {
|
||||||
result {
|
result {
|
||||||
let result_meths: [method] = ~[];
|
let result_meths: [method] = ~[];
|
||||||
let i: uint = 0u;
|
let i: uint = 0u;
|
||||||
let expected_len: uint = vec::len[method](expected_meths);
|
let expected_len: uint = vec::len::<method>(expected_meths);
|
||||||
let actual_len: uint = vec::len[method](actual_meths);
|
let actual_len: uint = vec::len::<method>(actual_meths);
|
||||||
if expected_len != actual_len { ret ures_err(terr_meth_count); }
|
if expected_len != actual_len { ret ures_err(terr_meth_count); }
|
||||||
while i < expected_len {
|
while i < expected_len {
|
||||||
let e_meth = expected_meths.(i);
|
let e_meth = expected_meths.(i);
|
||||||
|
@ -2350,7 +2350,7 @@ mod unify {
|
||||||
ret fix_err(vid);
|
ret fix_err(vid);
|
||||||
}
|
}
|
||||||
let root_id = ufind::find(vb.sets, vid as uint);
|
let root_id = ufind::find(vb.sets, vid as uint);
|
||||||
alt smallintmap::find[t](vb.types, root_id) {
|
alt smallintmap::find::<t>(vb.types, root_id) {
|
||||||
none. { ret fix_err(vid); }
|
none. { ret fix_err(vid); }
|
||||||
some(rt) { ret fix_ok(rt); }
|
some(rt) { ret fix_ok(rt); }
|
||||||
}
|
}
|
||||||
|
@ -2451,7 +2451,7 @@ mod unify {
|
||||||
// TODO: factor this cruft out
|
// TODO: factor this cruft out
|
||||||
let result_tps: [t] = ~[];
|
let result_tps: [t] = ~[];
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
let expected_len = vec::len[t](expected_tps);
|
let expected_len = vec::len::<t>(expected_tps);
|
||||||
while i < expected_len {
|
while i < expected_len {
|
||||||
let expected_tp = expected_tps.(i);
|
let expected_tp = expected_tps.(i);
|
||||||
let actual_tp = actual_tps.(i);
|
let actual_tp = actual_tps.(i);
|
||||||
|
@ -2592,8 +2592,8 @@ mod unify {
|
||||||
ty::ty_rec(expected_fields) {
|
ty::ty_rec(expected_fields) {
|
||||||
alt struct(cx.tcx, actual) {
|
alt struct(cx.tcx, actual) {
|
||||||
ty::ty_rec(actual_fields) {
|
ty::ty_rec(actual_fields) {
|
||||||
let expected_len = vec::len[field](expected_fields);
|
let expected_len = vec::len::<field>(expected_fields);
|
||||||
let actual_len = vec::len[field](actual_fields);
|
let actual_len = vec::len::<field>(actual_fields);
|
||||||
if expected_len != actual_len {
|
if expected_len != actual_len {
|
||||||
let err = terr_record_size(expected_len, actual_len);
|
let err = terr_record_size(expected_len, actual_len);
|
||||||
ret ures_err(err);
|
ret ures_err(err);
|
||||||
|
@ -2733,17 +2733,17 @@ mod unify {
|
||||||
}
|
}
|
||||||
fn dump_var_bindings(tcx: ty_ctxt, vb: @var_bindings) {
|
fn dump_var_bindings(tcx: ty_ctxt, vb: @var_bindings) {
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[ufind::node](vb.sets.nodes) {
|
while i < vec::len::<ufind::node>(vb.sets.nodes) {
|
||||||
let sets = "";
|
let sets = "";
|
||||||
let j = 0u;
|
let j = 0u;
|
||||||
while j < vec::len[option::t<uint>](vb.sets.nodes) {
|
while j < vec::len::<option::t<uint>>(vb.sets.nodes) {
|
||||||
if ufind::find(vb.sets, j) == i {
|
if ufind::find(vb.sets, j) == i {
|
||||||
sets += #fmt(" %u", j);
|
sets += #fmt(" %u", j);
|
||||||
}
|
}
|
||||||
j += 1u;
|
j += 1u;
|
||||||
}
|
}
|
||||||
let typespec;
|
let typespec;
|
||||||
alt smallintmap::find[t](vb.types, i) {
|
alt smallintmap::find::<t>(vb.types, i) {
|
||||||
none. { typespec = ""; }
|
none. { typespec = ""; }
|
||||||
some(typ) { typespec = " =" + ty_to_str(tcx, typ); }
|
some(typ) { typespec = " =" + ty_to_str(tcx, typ); }
|
||||||
}
|
}
|
||||||
|
@ -2767,7 +2767,7 @@ mod unify {
|
||||||
ret ty::mk_var(tcx, vid);
|
ret ty::mk_var(tcx, vid);
|
||||||
}
|
}
|
||||||
let root_id = ufind::find(vb.sets, vid as uint);
|
let root_id = ufind::find(vb.sets, vid as uint);
|
||||||
alt smallintmap::find[t](vb.types, root_id) {
|
alt smallintmap::find::<t>(vb.types, root_id) {
|
||||||
none. { *unresolved = some(vid); ret ty::mk_var(tcx, vid); }
|
none. { *unresolved = some(vid); ret ty::mk_var(tcx, vid); }
|
||||||
some(rt) {
|
some(rt) {
|
||||||
if occurs_check_fails(tcx, sp, vid, rt) {
|
if occurs_check_fails(tcx, sp, vid, rt) {
|
||||||
|
@ -2779,7 +2779,7 @@ mod unify {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let unresolved = @mutable none[int];
|
let unresolved = @mutable none::<int>;
|
||||||
let rty =
|
let rty =
|
||||||
fold_ty(tcx, fm_var(bind subst_vars(tcx, sp, vb, unresolved, _)),
|
fold_ty(tcx, fm_var(bind subst_vars(tcx, sp, vb, unresolved, _)),
|
||||||
typ);
|
typ);
|
||||||
|
@ -2794,7 +2794,7 @@ mod unify {
|
||||||
fixup_result {
|
fixup_result {
|
||||||
if vid as uint >= ufind::set_count(vb.sets) { ret fix_err(vid); }
|
if vid as uint >= ufind::set_count(vb.sets) { ret fix_err(vid); }
|
||||||
let root_id = ufind::find(vb.sets, vid as uint);
|
let root_id = ufind::find(vb.sets, vid as uint);
|
||||||
alt smallintmap::find[t](vb.types, root_id) {
|
alt smallintmap::find::<t>(vb.types, root_id) {
|
||||||
none. { ret fix_err(vid); }
|
none. { ret fix_err(vid); }
|
||||||
some(rt) { ret fixup_vars(tcx, sp, vb, rt); }
|
some(rt) { ret fixup_vars(tcx, sp, vb, rt); }
|
||||||
}
|
}
|
||||||
|
@ -2947,7 +2947,7 @@ fn tag_variant_with_id(cx: &ctxt, tag_id: &ast::def_id,
|
||||||
variant_id: &ast::def_id) -> variant_info {
|
variant_id: &ast::def_id) -> variant_info {
|
||||||
let variants = tag_variants(cx, tag_id);
|
let variants = tag_variants(cx, tag_id);
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[variant_info](variants) {
|
while i < vec::len::<variant_info>(variants) {
|
||||||
let variant = variants.(i);
|
let variant = variants.(i);
|
||||||
if def_eq(variant.id, variant_id) { ret variant; }
|
if def_eq(variant.id, variant_id) { ret variant; }
|
||||||
i += 1u;
|
i += 1u;
|
||||||
|
|
|
@ -164,7 +164,7 @@ fn instantiate_path(fcx: &@fn_ctxt, pth: &ast::path,
|
||||||
ty_param_count);
|
ty_param_count);
|
||||||
let ty_param_vars = bind_result.ids;
|
let ty_param_vars = bind_result.ids;
|
||||||
let ty_substs_opt;
|
let ty_substs_opt;
|
||||||
let ty_substs_len = vec::len[@ast::ty](pth.node.types);
|
let ty_substs_len = vec::len::<@ast::ty>(pth.node.types);
|
||||||
if ty_substs_len > 0u {
|
if ty_substs_len > 0u {
|
||||||
let param_var_len = vec::len(ty_param_vars);
|
let param_var_len = vec::len(ty_param_vars);
|
||||||
if param_var_len == 0u {
|
if param_var_len == 0u {
|
||||||
|
@ -186,7 +186,7 @@ fn instantiate_path(fcx: &@fn_ctxt, pth: &ast::path,
|
||||||
ty_substs += ~[res_ty];
|
ty_substs += ~[res_ty];
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
ty_substs_opt = some[[ty::t]](ty_substs);
|
ty_substs_opt = some::<[ty::t]>(ty_substs);
|
||||||
if ty_param_count == 0u {
|
if ty_param_count == 0u {
|
||||||
fcx.ccx.tcx.sess.span_fatal(sp,
|
fcx.ccx.tcx.sess.span_fatal(sp,
|
||||||
"this item does not take type \
|
"this item does not take type \
|
||||||
|
@ -200,7 +200,7 @@ fn instantiate_path(fcx: &@fn_ctxt, pth: &ast::path,
|
||||||
ty_substs += ~[ty::mk_var(fcx.ccx.tcx, ty_param_vars.(i))];
|
ty_substs += ~[ty::mk_var(fcx.ccx.tcx, ty_param_vars.(i))];
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
ty_substs_opt = some[[ty::t]](ty_substs);
|
ty_substs_opt = some::<[ty::t]>(ty_substs);
|
||||||
}
|
}
|
||||||
ret {substs: ty_substs_opt, ty: tpt.ty};
|
ret {substs: ty_substs_opt, ty: tpt.ty};
|
||||||
}
|
}
|
||||||
|
@ -271,7 +271,7 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
|
||||||
none. { }
|
none. { }
|
||||||
} /* go on */
|
} /* go on */
|
||||||
|
|
||||||
tcx.ast_ty_to_ty_cache.insert(ast_ty, none[ty::t]);
|
tcx.ast_ty_to_ty_cache.insert(ast_ty, none::<ty::t>);
|
||||||
fn ast_arg_to_arg(tcx: &ty::ctxt, getter: &ty_getter, arg: &ast::ty_arg)
|
fn ast_arg_to_arg(tcx: &ty::ctxt, getter: &ty_getter, arg: &ast::ty_arg)
|
||||||
-> {mode: ty::mode, ty: ty::t} {
|
-> {mode: ty::mode, ty: ty::t} {
|
||||||
let ty_mode = ast_mode_to_mode(arg.node.mode);
|
let ty_mode = ast_mode_to_mode(arg.node.mode);
|
||||||
|
@ -309,7 +309,7 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
|
||||||
ret typ;
|
ret typ;
|
||||||
}
|
}
|
||||||
let typ;
|
let typ;
|
||||||
let cname = none[str];
|
let cname = none::<str>;
|
||||||
alt ast_ty.node {
|
alt ast_ty.node {
|
||||||
ast::ty_nil. { typ = ty::mk_nil(tcx); }
|
ast::ty_nil. { typ = ty::mk_nil(tcx); }
|
||||||
ast::ty_bot. { typ = ty::mk_bot(tcx); }
|
ast::ty_bot. { typ = ty::mk_bot(tcx); }
|
||||||
|
@ -476,23 +476,23 @@ mod write {
|
||||||
|
|
||||||
// Writes a type with no type parameters into the node type table.
|
// Writes a type with no type parameters into the node type table.
|
||||||
fn ty_only(tcx: &ty::ctxt, node_id: ast::node_id, typ: ty::t) {
|
fn ty_only(tcx: &ty::ctxt, node_id: ast::node_id, typ: ty::t) {
|
||||||
ty(tcx, node_id, {substs: none[[ty::t]], ty: typ});
|
ty(tcx, node_id, {substs: none::<[ty::t]>, ty: typ});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Writes a type with no type parameters into the node type table. This
|
// Writes a type with no type parameters into the node type table. This
|
||||||
// function allows for the possibility of type variables.
|
// function allows for the possibility of type variables.
|
||||||
fn ty_only_fixup(fcx: @fn_ctxt, node_id: ast::node_id, typ: ty::t) {
|
fn ty_only_fixup(fcx: @fn_ctxt, node_id: ast::node_id, typ: ty::t) {
|
||||||
ret ty_fixup(fcx, node_id, {substs: none[[ty::t]], ty: typ});
|
ret ty_fixup(fcx, node_id, {substs: none::<[ty::t]>, ty: typ});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Writes a nil type into the node type table.
|
// Writes a nil type into the node type table.
|
||||||
fn nil_ty(tcx: &ty::ctxt, node_id: ast::node_id) {
|
fn nil_ty(tcx: &ty::ctxt, node_id: ast::node_id) {
|
||||||
ret ty(tcx, node_id, {substs: none[[ty::t]], ty: ty::mk_nil(tcx)});
|
ret ty(tcx, node_id, {substs: none::<[ty::t]>, ty: ty::mk_nil(tcx)});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Writes the bottom type into the node type table.
|
// Writes the bottom type into the node type table.
|
||||||
fn bot_ty(tcx: &ty::ctxt, node_id: ast::node_id) {
|
fn bot_ty(tcx: &ty::ctxt, node_id: ast::node_id) {
|
||||||
ret ty(tcx, node_id, {substs: none[[ty::t]], ty: ty::mk_bot(tcx)});
|
ret ty(tcx, node_id, {substs: none::<[ty::t]>, ty: ty::mk_bot(tcx)});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -745,7 +745,7 @@ mod collect {
|
||||||
// constructors get turned into functions.
|
// constructors get turned into functions.
|
||||||
|
|
||||||
let result_ty;
|
let result_ty;
|
||||||
if vec::len[ast::variant_arg](variant.node.args) == 0u {
|
if vec::len::<ast::variant_arg>(variant.node.args) == 0u {
|
||||||
result_ty = ty::mk_tag(cx.tcx, tag_id, ty_param_tys);
|
result_ty = ty::mk_tag(cx.tcx, tag_id, ty_param_tys);
|
||||||
} else {
|
} else {
|
||||||
// As above, tell ast_ty_to_ty() that trans_ty_item_to_ty()
|
// As above, tell ast_ty_to_ty() that trans_ty_item_to_ty()
|
||||||
|
@ -784,7 +784,7 @@ mod collect {
|
||||||
ast::item_native_mod(native_mod) {
|
ast::item_native_mod(native_mod) {
|
||||||
// Propagate the native ABI down to convert_native() below,
|
// Propagate the native ABI down to convert_native() below,
|
||||||
// but otherwise do nothing, as native modules have no types.
|
// but otherwise do nothing, as native modules have no types.
|
||||||
*abi = some[ast::native_abi](native_mod.abi);
|
*abi = some::<ast::native_abi>(native_mod.abi);
|
||||||
}
|
}
|
||||||
ast::item_tag(variants, ty_params) {
|
ast::item_tag(variants, ty_params) {
|
||||||
let tpt = ty_of_item(cx, it);
|
let tpt = ty_of_item(cx, it);
|
||||||
|
@ -806,7 +806,7 @@ mod collect {
|
||||||
// ty_of_obj().)
|
// ty_of_obj().)
|
||||||
let method_types = get_obj_method_types(cx, object);
|
let method_types = get_obj_method_types(cx, object);
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[@ast::method](object.methods) {
|
while i < vec::len::<@ast::method>(object.methods) {
|
||||||
write::ty_only(cx.tcx, object.methods.(i).node.id,
|
write::ty_only(cx.tcx, object.methods.(i).node.id,
|
||||||
ty::method_ty_to_fn_ty(cx.tcx,
|
ty::method_ty_to_fn_ty(cx.tcx,
|
||||||
method_types.(i)));
|
method_types.(i)));
|
||||||
|
@ -818,7 +818,7 @@ mod collect {
|
||||||
// an assertion in trans.
|
// an assertion in trans.
|
||||||
let args = ty::ty_fn_args(cx.tcx, tpt.ty);
|
let args = ty::ty_fn_args(cx.tcx, tpt.ty);
|
||||||
i = 0u;
|
i = 0u;
|
||||||
while i < vec::len[ty::arg](args) {
|
while i < vec::len::<ty::arg>(args) {
|
||||||
let fld = object.fields.(i);
|
let fld = object.fields.(i);
|
||||||
write::ty_only(cx.tcx, fld.id, args.(i).ty);
|
write::ty_only(cx.tcx, fld.id, args.(i).ty);
|
||||||
i += 1u;
|
i += 1u;
|
||||||
|
@ -856,7 +856,8 @@ mod collect {
|
||||||
// type of the native item. We simply write it into the node type
|
// type of the native item. We simply write it into the node type
|
||||||
// table.
|
// table.
|
||||||
let tpt =
|
let tpt =
|
||||||
ty_of_native_item(cx, i, option::get[ast::native_abi]({ *abi }));
|
ty_of_native_item(cx, i,
|
||||||
|
option::get::<ast::native_abi>({ *abi }));
|
||||||
alt i.node {
|
alt i.node {
|
||||||
ast::native_item_ty. {
|
ast::native_item_ty. {
|
||||||
// FIXME: Native types have no annotation. Should they? --pcw
|
// FIXME: Native types have no annotation. Should they? --pcw
|
||||||
|
@ -869,7 +870,7 @@ mod collect {
|
||||||
fn collect_item_types(tcx: &ty::ctxt, crate: &@ast::crate) {
|
fn collect_item_types(tcx: &ty::ctxt, crate: &@ast::crate) {
|
||||||
// We have to propagate the surrounding ABI to the native items
|
// We have to propagate the surrounding ABI to the native items
|
||||||
// contained within the native module.
|
// contained within the native module.
|
||||||
let abi = @mutable none[ast::native_abi];
|
let abi = @mutable none::<ast::native_abi>;
|
||||||
let cx = @{tcx: tcx};
|
let cx = @{tcx: tcx};
|
||||||
let visit = visit::mk_simple_visitor
|
let visit = visit::mk_simple_visitor
|
||||||
(@{visit_item: bind convert(cx, abi, _),
|
(@{visit_item: bind convert(cx, abi, _),
|
||||||
|
@ -1101,8 +1102,8 @@ mod writeback {
|
||||||
};
|
};
|
||||||
let new_substs_opt;
|
let new_substs_opt;
|
||||||
alt tpot.substs {
|
alt tpot.substs {
|
||||||
none[[ty::t]]. { new_substs_opt = none[[ty::t]]; }
|
none::<[ty::t]>. { new_substs_opt = none::<[ty::t]>; }
|
||||||
some[[ty::t]](substs) {
|
some::<[ty::t]>(substs) {
|
||||||
let new_substs: [ty::t] = ~[];
|
let new_substs: [ty::t] = ~[];
|
||||||
for subst: ty::t in substs {
|
for subst: ty::t in substs {
|
||||||
alt resolve_type_vars_in_type(fcx, sp, subst) {
|
alt resolve_type_vars_in_type(fcx, sp, subst) {
|
||||||
|
@ -1110,7 +1111,7 @@ mod writeback {
|
||||||
none. { wbcx.success = false; ret; }
|
none. { wbcx.success = false; ret; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_substs_opt = some[[ty::t]](new_substs);
|
new_substs_opt = some::<[ty::t]>(new_substs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
write::ty(fcx.ccx.tcx, id, {substs: new_substs_opt, ty: new_ty});
|
write::ty(fcx.ccx.tcx, id, {substs: new_substs_opt, ty: new_ty});
|
||||||
|
@ -1207,8 +1208,8 @@ fn gather_locals(ccx: &@crate_ctxt, f: &ast::_fn, id: &ast::node_id,
|
||||||
let {vb, locals, local_names, nvi} = alt old_fcx {
|
let {vb, locals, local_names, nvi} = alt old_fcx {
|
||||||
none. {
|
none. {
|
||||||
{ vb: ty::unify::mk_var_bindings(),
|
{ vb: ty::unify::mk_var_bindings(),
|
||||||
locals: new_int_hash[int](),
|
locals: new_int_hash::<int>(),
|
||||||
local_names: new_int_hash[ast::ident](),
|
local_names: new_int_hash::<ast::ident>(),
|
||||||
nvi: @mutable 0 }
|
nvi: @mutable 0 }
|
||||||
}
|
}
|
||||||
some(fcx) {
|
some(fcx) {
|
||||||
|
@ -1357,17 +1358,17 @@ fn check_pat(fcx: &@fn_ctxt, map: &ast::pat_id_map, pat: &@ast::pat,
|
||||||
demand::with_substs(fcx, pat.span, expected, ctor_ty,
|
demand::with_substs(fcx, pat.span, expected, ctor_ty,
|
||||||
expected_tps);
|
expected_tps);
|
||||||
path_tpot =
|
path_tpot =
|
||||||
{substs: some[[ty::t]](path_tpt.substs), ty: path_tpt.ty};
|
{substs: some::<[ty::t]>(path_tpt.substs), ty: path_tpt.ty};
|
||||||
|
|
||||||
// Get the number of arguments in this tag variant.
|
// Get the number of arguments in this tag variant.
|
||||||
let arg_types =
|
let arg_types =
|
||||||
variant_arg_types(fcx.ccx, pat.span, v_def_ids.var,
|
variant_arg_types(fcx.ccx, pat.span, v_def_ids.var,
|
||||||
expected_tps);
|
expected_tps);
|
||||||
let subpats_len = std::vec::len[@ast::pat](subpats);
|
let subpats_len = std::vec::len::<@ast::pat>(subpats);
|
||||||
if std::vec::len[ty::t](arg_types) > 0u {
|
if std::vec::len::<ty::t>(arg_types) > 0u {
|
||||||
// N-ary variant.
|
// N-ary variant.
|
||||||
|
|
||||||
let arg_len = vec::len[ty::t](arg_types);
|
let arg_len = vec::len::<ty::t>(arg_types);
|
||||||
if arg_len != subpats_len {
|
if arg_len != subpats_len {
|
||||||
// TODO: note definition of tag variant
|
// TODO: note definition of tag variant
|
||||||
// TODO (issue #448): Wrap a #fmt string over multiple
|
// TODO (issue #448): Wrap a #fmt string over multiple
|
||||||
|
@ -1586,8 +1587,8 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the correct number of arguments were supplied.
|
// Check that the correct number of arguments were supplied.
|
||||||
let expected_arg_count = vec::len[ty::arg](arg_tys);
|
let expected_arg_count = vec::len::<ty::arg>(arg_tys);
|
||||||
let supplied_arg_count = vec::len[option::t<@ast::expr>](args);
|
let supplied_arg_count = vec::len::<option::t<@ast::expr>>(args);
|
||||||
if expected_arg_count != supplied_arg_count {
|
if expected_arg_count != supplied_arg_count {
|
||||||
fcx.ccx.tcx.sess.span_fatal(sp,
|
fcx.ccx.tcx.sess.span_fatal(sp,
|
||||||
#fmt("this function takes %u \
|
#fmt("this function takes %u \
|
||||||
|
@ -1649,7 +1650,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
args: &[@ast::expr], call_kind: call_kind) -> bool {
|
args: &[@ast::expr], call_kind: call_kind) -> bool {
|
||||||
let args_opt_0: [option::t<@ast::expr>] = ~[];
|
let args_opt_0: [option::t<@ast::expr>] = ~[];
|
||||||
for arg: @ast::expr in args {
|
for arg: @ast::expr in args {
|
||||||
args_opt_0 += ~[some[@ast::expr](arg)];
|
args_opt_0 += ~[some::<@ast::expr>(arg)];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Call the generic checker.
|
// Call the generic checker.
|
||||||
|
@ -1870,7 +1871,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
} else {
|
} else {
|
||||||
// The definition doesn't take type parameters. If the programmer
|
// The definition doesn't take type parameters. If the programmer
|
||||||
// supplied some, that's an error.
|
// supplied some, that's an error.
|
||||||
if vec::len[@ast::ty](pth.node.types) > 0u {
|
if vec::len::<@ast::ty>(pth.node.types) > 0u {
|
||||||
tcx.sess.span_fatal(expr.span,
|
tcx.sess.span_fatal(expr.span,
|
||||||
"this kind of value does not \
|
"this kind of value does not \
|
||||||
take type parameters");
|
take type parameters");
|
||||||
|
@ -2093,7 +2094,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
// For each blank argument, add the type of that argument
|
// For each blank argument, add the type of that argument
|
||||||
// to the resulting function type.
|
// to the resulting function type.
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[option::t<@ast::expr>](args) {
|
while i < vec::len::<option::t<@ast::expr>>(args) {
|
||||||
alt args.(i) {
|
alt args.(i) {
|
||||||
some(_) {/* no-op */ }
|
some(_) {/* no-op */ }
|
||||||
none. { arg_tys_1 += ~[arg_tys.(i)]; }
|
none. { arg_tys_1 += ~[arg_tys.(i)]; }
|
||||||
|
@ -2259,7 +2260,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
ty::ty_rec(fields) {
|
ty::ty_rec(fields) {
|
||||||
let ix: uint =
|
let ix: uint =
|
||||||
ty::field_idx(tcx.sess, expr.span, field, fields);
|
ty::field_idx(tcx.sess, expr.span, field, fields);
|
||||||
if ix >= vec::len[ty::field](fields) {
|
if ix >= vec::len::<ty::field>(fields) {
|
||||||
tcx.sess.span_fatal(expr.span, "bad index on record");
|
tcx.sess.span_fatal(expr.span, "bad index on record");
|
||||||
}
|
}
|
||||||
write::ty_only_fixup(fcx, id, fields.(ix).mt.ty);
|
write::ty_only_fixup(fcx, id, fields.(ix).mt.ty);
|
||||||
|
@ -2267,7 +2268,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
ty::ty_obj(methods) {
|
ty::ty_obj(methods) {
|
||||||
let ix: uint =
|
let ix: uint =
|
||||||
ty::method_idx(tcx.sess, expr.span, field, methods);
|
ty::method_idx(tcx.sess, expr.span, field, methods);
|
||||||
if ix >= vec::len[ty::method](methods) {
|
if ix >= vec::len::<ty::method>(methods) {
|
||||||
tcx.sess.span_fatal(expr.span, "bad index on obj");
|
tcx.sess.span_fatal(expr.span, "bad index on obj");
|
||||||
}
|
}
|
||||||
let meth = methods.(ix);
|
let meth = methods.(ix);
|
||||||
|
@ -2420,8 +2421,8 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
|
|
||||||
let f = bind filtering_fn(fcx.ccx, _, ao.methods);
|
let f = bind filtering_fn(fcx.ccx, _, ao.methods);
|
||||||
inner_obj_methods =
|
inner_obj_methods =
|
||||||
std::vec::filter_map[ty::method,
|
std::vec::filter_map::<ty::method,
|
||||||
ty::method](f, inner_obj_methods);
|
ty::method>(f, inner_obj_methods);
|
||||||
|
|
||||||
method_types += inner_obj_methods;
|
method_types += inner_obj_methods;
|
||||||
}
|
}
|
||||||
|
@ -2433,7 +2434,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
// Write the methods into the node type table. (This happens in
|
// Write the methods into the node type table. (This happens in
|
||||||
// collect::convert for regular objects.)
|
// collect::convert for regular objects.)
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
while i < vec::len[@ast::method](ao.methods) {
|
while i < vec::len::<@ast::method>(ao.methods) {
|
||||||
write::ty_only(tcx, ao.methods.(i).node.id,
|
write::ty_only(tcx, ao.methods.(i).node.id,
|
||||||
ty::method_ty_to_fn_ty(tcx,
|
ty::method_ty_to_fn_ty(tcx,
|
||||||
method_types.(i)));
|
method_types.(i)));
|
||||||
|
@ -2446,7 +2447,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now remove the info from the stack.
|
// Now remove the info from the stack.
|
||||||
vec::pop[obj_info](fcx.ccx.obj_infos);
|
vec::pop::<obj_info>(fcx.ccx.obj_infos);
|
||||||
}
|
}
|
||||||
ast::expr_uniq(x) {
|
ast::expr_uniq(x) {
|
||||||
let t = next_ty_var(fcx);
|
let t = next_ty_var(fcx);
|
||||||
|
@ -2474,7 +2475,7 @@ fn next_ty_var(fcx: &@fn_ctxt) -> ty::t {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_obj_info(ccx: &@crate_ctxt) -> option::t<obj_info> {
|
fn get_obj_info(ccx: &@crate_ctxt) -> option::t<obj_info> {
|
||||||
ret vec::last[obj_info](ccx.obj_infos);
|
ret vec::last::<obj_info>(ccx.obj_infos);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_decl_initializer(fcx: &@fn_ctxt, nid: ast::node_id,
|
fn check_decl_initializer(fcx: &@fn_ctxt, nid: ast::node_id,
|
||||||
|
@ -2569,8 +2570,8 @@ fn check_const(ccx: &@crate_ctxt, sp: &span, e: &@ast::expr,
|
||||||
purity: ast::pure_fn,
|
purity: ast::pure_fn,
|
||||||
proto: ast::proto_fn,
|
proto: ast::proto_fn,
|
||||||
var_bindings: ty::unify::mk_var_bindings(),
|
var_bindings: ty::unify::mk_var_bindings(),
|
||||||
locals: new_int_hash[int](),
|
locals: new_int_hash::<int>(),
|
||||||
local_names: new_int_hash[ast::ident](),
|
local_names: new_int_hash::<ast::ident>(),
|
||||||
next_var_id: @mutable 0,
|
next_var_id: @mutable 0,
|
||||||
mutable fixups: fixups,
|
mutable fixups: fixups,
|
||||||
ccx: ccx};
|
ccx: ccx};
|
||||||
|
@ -2653,7 +2654,7 @@ fn check_item(ccx: @crate_ctxt, it: &@ast::item) {
|
||||||
for method: @ast::method in ob.methods { check_method(ccx, method); }
|
for method: @ast::method in ob.methods { check_method(ccx, method); }
|
||||||
|
|
||||||
// Now remove the info from the stack.
|
// Now remove the info from the stack.
|
||||||
vec::pop[obj_info](ccx.obj_infos);
|
vec::pop::<obj_info>(ccx.obj_infos);
|
||||||
}
|
}
|
||||||
_ {/* nothing to do */ }
|
_ {/* nothing to do */ }
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,7 +142,7 @@ type pat_id_map = std::map::hashmap<str, ast::node_id>;
|
||||||
// This is used because same-named variables in alternative patterns need to
|
// This is used because same-named variables in alternative patterns need to
|
||||||
// use the node_id of their namesake in the first pattern.
|
// use the node_id of their namesake in the first pattern.
|
||||||
fn pat_id_map(pat: &@pat) -> pat_id_map {
|
fn pat_id_map(pat: &@pat) -> pat_id_map {
|
||||||
let map = std::map::new_str_hash[node_id]();
|
let map = std::map::new_str_hash::<node_id>();
|
||||||
for each bound in pat_bindings(pat) {
|
for each bound in pat_bindings(pat) {
|
||||||
let name = alt bound.node { pat_bind(n) { n } };
|
let name = alt bound.node { pat_bind(n) { n } };
|
||||||
map.insert(name, bound.id);
|
map.insert(name, bound.id);
|
||||||
|
@ -661,7 +661,7 @@ fn eq_ty(a: &@ty, b: &@ty) -> bool { ret std::box::ptr_eq(a, b); }
|
||||||
fn hash_ty(t: &@ty) -> uint { ret t.span.lo << 16u + t.span.hi; }
|
fn hash_ty(t: &@ty) -> uint { ret t.span.lo << 16u + t.span.hi; }
|
||||||
|
|
||||||
fn block_from_expr(e: @expr) -> blk {
|
fn block_from_expr(e: @expr) -> blk {
|
||||||
let blk_ = {stmts: ~[], expr: option::some[@expr](e), id: e.id};
|
let blk_ = {stmts: ~[], expr: option::some::<@expr>(e), id: e.id};
|
||||||
ret {node: blk_, span: e.span};
|
ret {node: blk_, span: e.span};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ tag syntax_extension {
|
||||||
// A temporary hard-coded map of methods for expanding syntax extension
|
// A temporary hard-coded map of methods for expanding syntax extension
|
||||||
// AST nodes into full ASTs
|
// AST nodes into full ASTs
|
||||||
fn syntax_expander_table() -> hashmap<str, syntax_extension> {
|
fn syntax_expander_table() -> hashmap<str, syntax_extension> {
|
||||||
let syntax_expanders = new_str_hash[syntax_extension]();
|
let syntax_expanders = new_str_hash::<syntax_extension>();
|
||||||
syntax_expanders.insert("fmt", normal(ext::fmt::expand_syntax_ext));
|
syntax_expanders.insert("fmt", normal(ext::fmt::expand_syntax_ext));
|
||||||
syntax_expanders.insert("env", normal(ext::env::expand_syntax_ext));
|
syntax_expanders.insert("env", normal(ext::env::expand_syntax_ext));
|
||||||
syntax_expanders.insert("macro",
|
syntax_expanders.insert("macro",
|
||||||
|
|
|
@ -16,7 +16,7 @@ fn expand_syntax_ext(cx: &ext_ctxt, sp: codemap::span, arg: @ast::expr,
|
||||||
ast::expr_vec(elts, _, _) { elts }
|
ast::expr_vec(elts, _, _) { elts }
|
||||||
_ { cx.span_fatal(sp, "#env requires arguments of the form `[...]`.") }
|
_ { cx.span_fatal(sp, "#env requires arguments of the form `[...]`.") }
|
||||||
};
|
};
|
||||||
if vec::len[@ast::expr](args) != 1u {
|
if vec::len::<@ast::expr>(args) != 1u {
|
||||||
cx.span_fatal(sp, "malformed #env call");
|
cx.span_fatal(sp, "malformed #env call");
|
||||||
}
|
}
|
||||||
// FIXME: if this was more thorough it would manufacture an
|
// FIXME: if this was more thorough it would manufacture an
|
||||||
|
|
|
@ -21,7 +21,7 @@ fn expand_syntax_ext(cx: &ext_ctxt, sp: span, arg: @ast::expr,
|
||||||
ast::expr_vec(elts, _, _) { elts }
|
ast::expr_vec(elts, _, _) { elts }
|
||||||
_ { cx.span_fatal(sp, "#fmt requires arguments of the form `[...]`.") }
|
_ { cx.span_fatal(sp, "#fmt requires arguments of the form `[...]`.") }
|
||||||
};
|
};
|
||||||
if vec::len[@ast::expr](args) == 0u {
|
if vec::len::<@ast::expr>(args) == 0u {
|
||||||
cx.span_fatal(sp, "#fmt requires a format string");
|
cx.span_fatal(sp, "#fmt requires a format string");
|
||||||
}
|
}
|
||||||
let fmt =
|
let fmt =
|
||||||
|
@ -93,7 +93,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
|
||||||
{node: {mut: ast::imm, ident: ident, expr: val}, span: sp};
|
{node: {mut: ast::imm, ident: ident, expr: val}, span: sp};
|
||||||
astfields += ~[astfield];
|
astfields += ~[astfield];
|
||||||
}
|
}
|
||||||
let recexpr = ast::expr_rec(astfields, option::none[@ast::expr]);
|
let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>);
|
||||||
ret @{id: cx.next_id(), node: recexpr, span: sp};
|
ret @{id: cx.next_id(), node: recexpr, span: sp};
|
||||||
}
|
}
|
||||||
fn make_path_vec(cx: &ext_ctxt, ident: str) -> [str] {
|
fn make_path_vec(cx: &ext_ctxt, ident: str) -> [str] {
|
||||||
|
@ -130,7 +130,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
|
||||||
// through the rec that these flags are a member of, so
|
// through the rec that these flags are a member of, so
|
||||||
// this is a hack placeholder flag
|
// this is a hack placeholder flag
|
||||||
|
|
||||||
if vec::len[@ast::expr](flagexprs) == 0u {
|
if vec::len::<@ast::expr>(flagexprs) == 0u {
|
||||||
flagexprs += ~[make_rt_path_expr(cx, sp, "flag_none")];
|
flagexprs += ~[make_rt_path_expr(cx, sp, "flag_none")];
|
||||||
}
|
}
|
||||||
ret make_vec_expr(cx, sp, flagexprs);
|
ret make_vec_expr(cx, sp, flagexprs);
|
||||||
|
@ -307,7 +307,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
|
||||||
let fmt_sp = args.(0).span;
|
let fmt_sp = args.(0).span;
|
||||||
let n = 0u;
|
let n = 0u;
|
||||||
let tmp_expr = make_new_str(cx, sp, "");
|
let tmp_expr = make_new_str(cx, sp, "");
|
||||||
let nargs = vec::len[@ast::expr](args);
|
let nargs = vec::len::<@ast::expr>(args);
|
||||||
for pc: piece in pieces {
|
for pc: piece in pieces {
|
||||||
alt pc {
|
alt pc {
|
||||||
piece_string(s) {
|
piece_string(s) {
|
||||||
|
|
|
@ -9,7 +9,7 @@ fn expand_syntax_ext(cx: &ext_ctxt, sp: codemap::span, arg: @ast::expr,
|
||||||
ast::expr_vec(elts, _, _) { elts }
|
ast::expr_vec(elts, _, _) { elts }
|
||||||
_ { cx.span_fatal(sp, "#ident_to_str requires a vector argument .") }
|
_ { cx.span_fatal(sp, "#ident_to_str requires a vector argument .") }
|
||||||
};
|
};
|
||||||
if vec::len[@ast::expr](args) != 1u {
|
if vec::len::<@ast::expr>(args) != 1u {
|
||||||
cx.span_fatal(sp, "malformed #ident_to_str call");
|
cx.span_fatal(sp, "malformed #ident_to_str call");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -167,7 +167,7 @@ fn acumm_bindings(cx: &ext_ctxt, b_dest: &bindings, b_src: &bindings) { }
|
||||||
|
|
||||||
fn pattern_to_selectors(cx: &ext_ctxt, e: @expr) -> binders {
|
fn pattern_to_selectors(cx: &ext_ctxt, e: @expr) -> binders {
|
||||||
let res: binders =
|
let res: binders =
|
||||||
{real_binders: new_str_hash[selector](),
|
{real_binders: new_str_hash::<selector>(),
|
||||||
mutable literal_ast_matchers: ~[]};
|
mutable literal_ast_matchers: ~[]};
|
||||||
//this oughta return binders instead, but macro args are a sequence of
|
//this oughta return binders instead, but macro args are a sequence of
|
||||||
//expressions, rather than a single expression
|
//expressions, rather than a single expression
|
||||||
|
@ -183,7 +183,7 @@ bindings. Most of the work is done in p_t_s, which generates the
|
||||||
selectors. */
|
selectors. */
|
||||||
|
|
||||||
fn use_selectors_to_bind(b: &binders, e: @expr) -> option::t<bindings> {
|
fn use_selectors_to_bind(b: &binders, e: @expr) -> option::t<bindings> {
|
||||||
let res = new_str_hash[arb_depth<matchable>]();
|
let res = new_str_hash::<arb_depth<matchable>>();
|
||||||
//need to do this first, to check vec lengths.
|
//need to do this first, to check vec lengths.
|
||||||
for sel: selector in b.literal_ast_matchers {
|
for sel: selector in b.literal_ast_matchers {
|
||||||
alt sel(match_expr(e)) { none. { ret none; } _ { } }
|
alt sel(match_expr(e)) { none. { ret none; } _ { } }
|
||||||
|
@ -262,7 +262,7 @@ fn follow_for_trans(cx: &ext_ctxt, mmaybe: &option::t<arb_depth<matchable>>,
|
||||||
|
|
||||||
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
|
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
|
||||||
iter free_vars(b: &bindings, e: @expr) -> ident {
|
iter free_vars(b: &bindings, e: @expr) -> ident {
|
||||||
let idents: hashmap<ident, ()> = new_str_hash[()]();
|
let idents: hashmap<ident, ()> = new_str_hash::<()>();
|
||||||
fn mark_ident(i: &ident, fld: ast_fold, b: &bindings,
|
fn mark_ident(i: &ident, fld: ast_fold, b: &bindings,
|
||||||
idents: &hashmap<ident, ()>) -> ident {
|
idents: &hashmap<ident, ()>) -> ident {
|
||||||
if b.contains_key(i) { idents.insert(i, ()); }
|
if b.contains_key(i) { idents.insert(i, ()); }
|
||||||
|
|
|
@ -488,9 +488,9 @@ fn noop_fold_local(l: &local_, fld: ast_fold) -> local_ {
|
||||||
ret {ty: fld.fold_ty(l.ty),
|
ret {ty: fld.fold_ty(l.ty),
|
||||||
pat: fld.fold_pat(l.pat),
|
pat: fld.fold_pat(l.pat),
|
||||||
init: alt l.init {
|
init: alt l.init {
|
||||||
option::none[initializer]. { l.init }
|
option::none::<initializer>. { l.init }
|
||||||
option::some[initializer](init) {
|
option::some::<initializer>(init) {
|
||||||
option::some[initializer]({op: init.op,
|
option::some::<initializer>({op: init.op,
|
||||||
expr: fld.fold_expr(init.expr)})
|
expr: fld.fold_expr(init.expr)})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -188,7 +188,7 @@ fn scan_exponent(rdr: &reader) -> option::t<str> {
|
||||||
if str::byte_len(exponent) > 0u {
|
if str::byte_len(exponent) > 0u {
|
||||||
ret some(rslt + exponent);
|
ret some(rslt + exponent);
|
||||||
} else { rdr.err("scan_exponent: bad fp literal"); fail; }
|
} else { rdr.err("scan_exponent: bad fp literal"); fail; }
|
||||||
} else { ret none[str]; }
|
} else { ret none::<str>; }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_dec_digits(rdr: &reader) -> str {
|
fn scan_dec_digits(rdr: &reader) -> str {
|
||||||
|
@ -301,14 +301,14 @@ fn scan_number(c: char, rdr: &reader) -> token::token {
|
||||||
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ret token::LIT_FLOAT(interner::intern[str](*rdr.get_interner(),
|
ret token::LIT_FLOAT(interner::intern::<str>(*rdr.get_interner(),
|
||||||
float_str));
|
float_str));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let maybe_exponent = scan_exponent(rdr);
|
let maybe_exponent = scan_exponent(rdr);
|
||||||
alt maybe_exponent {
|
alt maybe_exponent {
|
||||||
some(s) {
|
some(s) {
|
||||||
ret token::LIT_FLOAT(interner::intern[str](*rdr.get_interner(),
|
ret token::LIT_FLOAT(interner::intern::<str>(*rdr.get_interner(),
|
||||||
dec_str + s));
|
dec_str + s));
|
||||||
}
|
}
|
||||||
none. { ret token::LIT_INT(accum_int); }
|
none. { ret token::LIT_INT(accum_int); }
|
||||||
|
@ -350,7 +350,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
|
||||||
}
|
}
|
||||||
if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
|
if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
|
||||||
let is_mod_name = c == ':' && rdr.next() == ':';
|
let is_mod_name = c == ':' && rdr.next() == ':';
|
||||||
ret token::IDENT(interner::intern[str](*rdr.get_interner(),
|
ret token::IDENT(interner::intern::<str>(*rdr.get_interner(),
|
||||||
accum_str), is_mod_name);
|
accum_str), is_mod_name);
|
||||||
}
|
}
|
||||||
if is_dec_digit(c) { ret scan_number(c, rdr); }
|
if is_dec_digit(c) { ret scan_number(c, rdr); }
|
||||||
|
@ -510,7 +510,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rdr.bump();
|
rdr.bump();
|
||||||
ret token::LIT_STR(interner::intern[str](*rdr.get_interner(),
|
ret token::LIT_STR(interner::intern::<str>(*rdr.get_interner(),
|
||||||
accum_str));
|
accum_str));
|
||||||
}
|
}
|
||||||
'-' {
|
'-' {
|
||||||
|
@ -712,7 +712,7 @@ fn gather_comments_and_literals(cm: &codemap::codemap, path: str,
|
||||||
srdr: io::reader) ->
|
srdr: io::reader) ->
|
||||||
{cmnts: [cmnt], lits: [lit]} {
|
{cmnts: [cmnt], lits: [lit]} {
|
||||||
let src = str::unsafe_from_bytes(srdr.read_whole_stream());
|
let src = str::unsafe_from_bytes(srdr.read_whole_stream());
|
||||||
let itr = @interner::mk[str](str::hash, str::eq);
|
let itr = @interner::mk::<str>(str::hash, str::eq);
|
||||||
let rdr = new_reader(cm, src, codemap::new_filemap(path, 0u, 0u), itr);
|
let rdr = new_reader(cm, src, codemap::new_filemap(path, 0u, 0u), itr);
|
||||||
let comments: [cmnt] = ~[];
|
let comments: [cmnt] = ~[];
|
||||||
let literals: [lit] = ~[];
|
let literals: [lit] = ~[];
|
||||||
|
|
|
@ -672,7 +672,7 @@ fn parse_seq_lt_gt<T>(sep: option::t<token::token>, f: fn(&parser) -> T,
|
||||||
p: &parser) -> spanned<[T]> {
|
p: &parser) -> spanned<[T]> {
|
||||||
let lo = p.get_lo_pos();
|
let lo = p.get_lo_pos();
|
||||||
expect(p, token::LT);
|
expect(p, token::LT);
|
||||||
let result = parse_seq_to_before_gt[T](sep, f, p);
|
let result = parse_seq_to_before_gt::<T>(sep, f, p);
|
||||||
let hi = p.get_hi_pos();
|
let hi = p.get_hi_pos();
|
||||||
expect_gt(p);
|
expect_gt(p);
|
||||||
ret spanned(lo, hi, result);
|
ret spanned(lo, hi, result);
|
||||||
|
@ -705,7 +705,7 @@ fn parse_seq<T>(bra: token::token, ket: token::token,
|
||||||
p: &parser) -> spanned<[T]> {
|
p: &parser) -> spanned<[T]> {
|
||||||
let lo = p.get_lo_pos();
|
let lo = p.get_lo_pos();
|
||||||
expect(p, bra);
|
expect(p, bra);
|
||||||
let result = parse_seq_to_before_end[T](ket, sep, f, p);
|
let result = parse_seq_to_before_end::<T>(ket, sep, f, p);
|
||||||
let hi = p.get_hi_pos();
|
let hi = p.get_hi_pos();
|
||||||
p.bump();
|
p.bump();
|
||||||
ret spanned(lo, hi, result);
|
ret spanned(lo, hi, result);
|
||||||
|
|
|
@ -153,12 +153,12 @@ fn to_str(r: lexer::reader, t: token) -> str {
|
||||||
ret int::to_str(i, 10u) + "_" + ty_mach_to_str(tm);
|
ret int::to_str(i, 10u) + "_" + ty_mach_to_str(tm);
|
||||||
}
|
}
|
||||||
LIT_MACH_FLOAT(tm, s) {
|
LIT_MACH_FLOAT(tm, s) {
|
||||||
ret interner::get[str](*r.get_interner(), s) + "_" +
|
ret interner::get::<str>(*r.get_interner(), s) + "_" +
|
||||||
ty_mach_to_str(tm);
|
ty_mach_to_str(tm);
|
||||||
}
|
}
|
||||||
LIT_FLOAT(s) { ret interner::get[str](*r.get_interner(), s); }
|
LIT_FLOAT(s) { ret interner::get::<str>(*r.get_interner(), s); }
|
||||||
LIT_STR(s) { // FIXME: escape.
|
LIT_STR(s) { // FIXME: escape.
|
||||||
ret "\"" + interner::get[str](*r.get_interner(), s) + "\"";
|
ret "\"" + interner::get::<str>(*r.get_interner(), s) + "\"";
|
||||||
}
|
}
|
||||||
LIT_CHAR(c) {
|
LIT_CHAR(c) {
|
||||||
// FIXME: escape.
|
// FIXME: escape.
|
||||||
|
@ -171,7 +171,7 @@ fn to_str(r: lexer::reader, t: token) -> str {
|
||||||
|
|
||||||
/* Name components */
|
/* Name components */
|
||||||
IDENT(s, _) {
|
IDENT(s, _) {
|
||||||
ret interner::get[str](*r.get_interner(), s);
|
ret interner::get::<str>(*r.get_interner(), s);
|
||||||
}
|
}
|
||||||
IDX(i) { ret "_" + int::to_str(i, 10u); }
|
IDX(i) { ret "_" + int::to_str(i, 10u); }
|
||||||
UNDERSCORE. { ret "_"; }
|
UNDERSCORE. { ret "_"; }
|
||||||
|
|
|
@ -55,9 +55,9 @@ fn end(s: &ps) { vec::pop(s.boxes); pp::end(s.s); }
|
||||||
fn rust_printer(writer: io::writer) -> ps {
|
fn rust_printer(writer: io::writer) -> ps {
|
||||||
let boxes: [pp::breaks] = ~[];
|
let boxes: [pp::breaks] = ~[];
|
||||||
ret @{s: pp::mk_printer(writer, default_columns),
|
ret @{s: pp::mk_printer(writer, default_columns),
|
||||||
cm: none[codemap],
|
cm: none::<codemap>,
|
||||||
comments: none[[lexer::cmnt]],
|
comments: none::<[lexer::cmnt]>,
|
||||||
literals: none[[lexer::lit]],
|
literals: none::<[lexer::lit]>,
|
||||||
mutable cur_cmnt: 0u,
|
mutable cur_cmnt: 0u,
|
||||||
mutable cur_lit: 0u,
|
mutable cur_lit: 0u,
|
||||||
mutable boxes: boxes,
|
mutable boxes: boxes,
|
||||||
|
@ -231,7 +231,7 @@ fn commasep<IN>(s: &ps, b: breaks, elts: &[IN], op: fn(&ps, &IN) ) {
|
||||||
fn commasep_cmnt<IN>(s: &ps, b: breaks, elts: &[IN], op: fn(&ps, &IN) ,
|
fn commasep_cmnt<IN>(s: &ps, b: breaks, elts: &[IN], op: fn(&ps, &IN) ,
|
||||||
get_span: fn(&IN) -> codemap::span ) {
|
get_span: fn(&IN) -> codemap::span ) {
|
||||||
box(s, 0u, b);
|
box(s, 0u, b);
|
||||||
let len = vec::len[IN](elts);
|
let len = vec::len::<IN>(elts);
|
||||||
let i = 0u;
|
let i = 0u;
|
||||||
for elt: IN in elts {
|
for elt: IN in elts {
|
||||||
maybe_print_comment(s, get_span(elt).hi);
|
maybe_print_comment(s, get_span(elt).hi);
|
||||||
|
@ -327,7 +327,7 @@ fn print_type(s: &ps, ty: &@ast::ty) {
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
ast::ty_fn(proto, inputs, output, cf, constrs) {
|
ast::ty_fn(proto, inputs, output, cf, constrs) {
|
||||||
print_ty_fn(s, proto, none[str], inputs, output, cf, constrs);
|
print_ty_fn(s, proto, none::<str>, inputs, output, cf, constrs);
|
||||||
}
|
}
|
||||||
ast::ty_obj(methods) {
|
ast::ty_obj(methods) {
|
||||||
head(s, "obj");
|
head(s, "obj");
|
||||||
|
@ -486,7 +486,7 @@ fn print_item(s: &ps, item: &@ast::item) {
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
word(s.s, ";");
|
word(s.s, ";");
|
||||||
maybe_print_trailing_comment(s, v.span, none[uint]);
|
maybe_print_trailing_comment(s, v.span, none::<uint>);
|
||||||
}
|
}
|
||||||
bclose(s, item.span);
|
bclose(s, item.span);
|
||||||
}
|
}
|
||||||
|
@ -575,7 +575,7 @@ fn print_stmt(s: &ps, st: &ast::stmt) {
|
||||||
ast::stmt_expr(expr, _) { space_if_not_bol(s); print_expr(s, expr); }
|
ast::stmt_expr(expr, _) { space_if_not_bol(s); print_expr(s, expr); }
|
||||||
}
|
}
|
||||||
if parse::parser::stmt_ends_with_semi(st) { word(s.s, ";"); }
|
if parse::parser::stmt_ends_with_semi(st) { word(s.s, ";"); }
|
||||||
maybe_print_trailing_comment(s, st.span, none[uint]);
|
maybe_print_trailing_comment(s, st.span, none::<uint>);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_block(s: &ps, blk: &ast::blk) {
|
fn print_block(s: &ps, blk: &ast::blk) {
|
||||||
|
@ -1436,9 +1436,9 @@ fn next_lit(s: &ps) -> option::t<lexer::lit> {
|
||||||
some(lits) {
|
some(lits) {
|
||||||
if s.cur_lit < vec::len(lits) {
|
if s.cur_lit < vec::len(lits) {
|
||||||
ret some(lits.(s.cur_lit));
|
ret some(lits.(s.cur_lit));
|
||||||
} else { ret none[lexer::lit]; }
|
} else { ret none::<lexer::lit>; }
|
||||||
}
|
}
|
||||||
_ { ret none[lexer::lit]; }
|
_ { ret none::<lexer::lit>; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1528,9 +1528,9 @@ fn next_comment(s: &ps) -> option::t<lexer::cmnt> {
|
||||||
some(cmnts) {
|
some(cmnts) {
|
||||||
if s.cur_cmnt < vec::len(cmnts) {
|
if s.cur_cmnt < vec::len(cmnts) {
|
||||||
ret some(cmnts.(s.cur_cmnt));
|
ret some(cmnts.(s.cur_cmnt));
|
||||||
} else { ret none[lexer::cmnt]; }
|
} else { ret none::<lexer::cmnt>; }
|
||||||
}
|
}
|
||||||
_ { ret none[lexer::cmnt]; }
|
_ { ret none::<lexer::cmnt>; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1542,7 +1542,7 @@ fn constr_args_to_str<T>(f: &fn(&T) -> str ,
|
||||||
let s = "(";
|
let s = "(";
|
||||||
for a: @ast::sp_constr_arg<T> in args {
|
for a: @ast::sp_constr_arg<T> in args {
|
||||||
if comma { s += ", "; } else { comma = true; }
|
if comma { s += ", "; } else { comma = true; }
|
||||||
s += constr_arg_to_str[T](f, a.node);
|
s += constr_arg_to_str::<T>(f, a.node);
|
||||||
}
|
}
|
||||||
s += ")";
|
s += ")";
|
||||||
ret s;
|
ret s;
|
||||||
|
@ -1611,7 +1611,7 @@ fn proto_to_str(p: &ast::proto) -> str {
|
||||||
|
|
||||||
fn ty_constr_to_str(c: &@ast::ty_constr) -> str {
|
fn ty_constr_to_str(c: &@ast::ty_constr) -> str {
|
||||||
ret path_to_str(c.node.path) +
|
ret path_to_str(c.node.path) +
|
||||||
constr_args_to_str[ast::path](path_to_str, c.node.args);
|
constr_args_to_str::<ast::path>(path_to_str, c.node.args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ type interner<T> =
|
||||||
eqer: eqfn<T>};
|
eqer: eqfn<T>};
|
||||||
|
|
||||||
fn mk<@T>(hasher: hashfn<T>, eqer: eqfn<T>) -> interner<T> {
|
fn mk<@T>(hasher: hashfn<T>, eqer: eqfn<T>) -> interner<T> {
|
||||||
let m = map::mk_hashmap[T, uint](hasher, eqer);
|
let m = map::mk_hashmap::<T, uint>(hasher, eqer);
|
||||||
ret {map: m, mutable vect: ~[], hasher: hasher, eqer: eqer};
|
ret {map: m, mutable vect: ~[], hasher: hasher, eqer: eqer};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ fn intern<@T>(itr: &interner<T>, val: &T) -> uint {
|
||||||
alt itr.map.find(val) {
|
alt itr.map.find(val) {
|
||||||
some(idx) { ret idx; }
|
some(idx) { ret idx; }
|
||||||
none. {
|
none. {
|
||||||
let new_idx = vec::len[T](itr.vect);
|
let new_idx = vec::len::<T>(itr.vect);
|
||||||
itr.map.insert(val, new_idx);
|
itr.map.insert(val, new_idx);
|
||||||
itr.vect += ~[val];
|
itr.vect += ~[val];
|
||||||
ret new_idx;
|
ret new_idx;
|
||||||
|
|
|
@ -36,20 +36,20 @@ type visitor<E> =
|
||||||
fn(&_fn, &[ty_param], &span, &fn_ident, node_id, &E, &vt<E>) };
|
fn(&_fn, &[ty_param], &span, &fn_ident, node_id, &E, &vt<E>) };
|
||||||
|
|
||||||
fn default_visitor<E>() -> visitor<E> {
|
fn default_visitor<E>() -> visitor<E> {
|
||||||
ret @{visit_mod: bind visit_mod[E](_, _, _, _),
|
ret @{visit_mod: bind visit_mod::<E>(_, _, _, _),
|
||||||
visit_view_item: bind visit_view_item[E](_, _, _),
|
visit_view_item: bind visit_view_item::<E>(_, _, _),
|
||||||
visit_native_item: bind visit_native_item[E](_, _, _),
|
visit_native_item: bind visit_native_item::<E>(_, _, _),
|
||||||
visit_item: bind visit_item[E](_, _, _),
|
visit_item: bind visit_item::<E>(_, _, _),
|
||||||
visit_local: bind visit_local[E](_, _, _),
|
visit_local: bind visit_local::<E>(_, _, _),
|
||||||
visit_block: bind visit_block[E](_, _, _),
|
visit_block: bind visit_block::<E>(_, _, _),
|
||||||
visit_stmt: bind visit_stmt[E](_, _, _),
|
visit_stmt: bind visit_stmt::<E>(_, _, _),
|
||||||
visit_arm: bind visit_arm[E](_, _, _),
|
visit_arm: bind visit_arm::<E>(_, _, _),
|
||||||
visit_pat: bind visit_pat[E](_, _, _),
|
visit_pat: bind visit_pat::<E>(_, _, _),
|
||||||
visit_decl: bind visit_decl[E](_, _, _),
|
visit_decl: bind visit_decl::<E>(_, _, _),
|
||||||
visit_expr: bind visit_expr[E](_, _, _),
|
visit_expr: bind visit_expr::<E>(_, _, _),
|
||||||
visit_ty: bind visit_ty[E](_, _, _),
|
visit_ty: bind visit_ty::<E>(_, _, _),
|
||||||
visit_constr: bind visit_constr[E](_, _, _, _, _),
|
visit_constr: bind visit_constr::<E>(_, _, _, _, _),
|
||||||
visit_fn: bind visit_fn[E](_, _, _, _, _, _, _)};
|
visit_fn: bind visit_fn::<E>(_, _, _, _, _, _, _)};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_crate<E>(c: &crate, e: &E, v: &vt<E>) {
|
fn visit_crate<E>(c: &crate, e: &E, v: &vt<E>) {
|
||||||
|
|
|
@ -44,7 +44,7 @@ fn hash_def(d: &ast::def_id) -> uint {
|
||||||
fn new_def_hash<@V>() -> std::map::hashmap<ast::def_id, V> {
|
fn new_def_hash<@V>() -> std::map::hashmap<ast::def_id, V> {
|
||||||
let hasher: std::map::hashfn<ast::def_id> = hash_def;
|
let hasher: std::map::hashfn<ast::def_id> = hash_def;
|
||||||
let eqer: std::map::eqfn<ast::def_id> = def_eq;
|
let eqer: std::map::eqfn<ast::def_id> = def_eq;
|
||||||
ret std::map::mk_hashmap[ast::def_id, V](hasher, eqer);
|
ret std::map::mk_hashmap::<ast::def_id, V>(hasher, eqer);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_expr(f: &ast::field) -> @ast::expr { ret f.node.expr; }
|
fn field_expr(f: &ast::field) -> @ast::expr { ret f.node.expr; }
|
||||||
|
|
|
@ -61,7 +61,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
|
||||||
ret s;
|
ret s;
|
||||||
}
|
}
|
||||||
fn method_to_str(cx: &ctxt, m: &method) -> str {
|
fn method_to_str(cx: &ctxt, m: &method) -> str {
|
||||||
ret fn_to_str(cx, m.proto, some[ast::ident](m.ident), m.inputs,
|
ret fn_to_str(cx, m.proto, some::<ast::ident>(m.ident), m.inputs,
|
||||||
m.output, m.cf, m.constrs) + ";";
|
m.output, m.cf, m.constrs) + ";";
|
||||||
}
|
}
|
||||||
fn field_to_str(cx: &ctxt, f: &field) -> str {
|
fn field_to_str(cx: &ctxt, f: &field) -> str {
|
||||||
|
@ -109,7 +109,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
|
||||||
// The user should never see this if the cname is set properly!
|
// The user should never see this if the cname is set properly!
|
||||||
|
|
||||||
s += "<tag#" + int::str(id.crate) + ":" + int::str(id.node) + ">";
|
s += "<tag#" + int::str(id.crate) + ":" + int::str(id.node) + ">";
|
||||||
if vec::len[t](tps) > 0u {
|
if vec::len::<t>(tps) > 0u {
|
||||||
let strs: [str] = ~[];
|
let strs: [str] = ~[];
|
||||||
for typ: t in tps { strs += ~[ty_to_str(cx, typ)]; }
|
for typ: t in tps { strs += ~[ty_to_str(cx, typ)]; }
|
||||||
s += "[" + str::connect(strs, ",") + "]";
|
s += "[" + str::connect(strs, ",") + "]";
|
||||||
|
@ -165,7 +165,7 @@ fn constrs_str(constrs: &[@constr]) -> str {
|
||||||
fn ty_constr_to_str<Q>(c: &@ast::spanned<ast::constr_general_<ast::path, Q>>)
|
fn ty_constr_to_str<Q>(c: &@ast::spanned<ast::constr_general_<ast::path, Q>>)
|
||||||
-> str {
|
-> str {
|
||||||
ret path_to_str(c.node.path) +
|
ret path_to_str(c.node.path) +
|
||||||
constr_args_to_str[ast::path](path_to_str, c.node.args);
|
constr_args_to_str::<ast::path>(path_to_str, c.node.args);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local Variables:
|
// Local Variables:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue