Refactored ast_map and friends, mainly to have Paths without storing them.
This commit is contained in:
parent
22c34f3c4c
commit
a02b10a062
92 changed files with 1987 additions and 2573 deletions
|
@ -74,7 +74,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
|
|||
|
||||
let little = match endian {
|
||||
None => false,
|
||||
Some(Ident{ident, span}) => match token::get_ident(ident.name).get() {
|
||||
Some(Ident{ident, span}) => match token::get_ident(ident).get() {
|
||||
"little" => true,
|
||||
"big" => false,
|
||||
"target" => target_endian_little(cx, sp),
|
||||
|
|
|
@ -38,11 +38,12 @@ use serialize::hex::ToHex;
|
|||
use extra::tempfile::TempDir;
|
||||
use syntax::abi;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::{PathMod, PathName, PathPrettyName};
|
||||
use syntax::ast_map::{PathElem, PathElems, PathName};
|
||||
use syntax::ast_map;
|
||||
use syntax::attr;
|
||||
use syntax::attr::AttrMetaMethods;
|
||||
use syntax::crateid::CrateId;
|
||||
use syntax::parse::token;
|
||||
|
||||
#[deriving(Clone, Eq, TotalOrd, TotalEq)]
|
||||
pub enum OutputType {
|
||||
|
@ -531,11 +532,8 @@ fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str {
|
|||
|
||||
|
||||
// This calculates STH for a symbol, as defined above
|
||||
pub fn symbol_hash(tcx: ty::ctxt,
|
||||
symbol_hasher: &mut Sha256,
|
||||
t: ty::t,
|
||||
link_meta: &LinkMeta)
|
||||
-> ~str {
|
||||
fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &mut Sha256,
|
||||
t: ty::t, link_meta: &LinkMeta) -> ~str {
|
||||
// NB: do *not* use abbrevs here as we want the symbol names
|
||||
// to be independent of one another in the crate.
|
||||
|
||||
|
@ -551,13 +549,10 @@ pub fn symbol_hash(tcx: ty::ctxt,
|
|||
hash
|
||||
}
|
||||
|
||||
pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
|
||||
{
|
||||
let type_hashcodes = ccx.type_hashcodes.borrow();
|
||||
match type_hashcodes.get().find(&t) {
|
||||
Some(h) => return h.to_str(),
|
||||
None => {}
|
||||
}
|
||||
fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
|
||||
match ccx.type_hashcodes.borrow().get().find(&t) {
|
||||
Some(h) => return h.to_str(),
|
||||
None => {}
|
||||
}
|
||||
|
||||
let mut type_hashcodes = ccx.type_hashcodes.borrow_mut();
|
||||
|
@ -615,8 +610,9 @@ pub fn sanitize(s: &str) -> ~str {
|
|||
return result;
|
||||
}
|
||||
|
||||
pub fn mangle(sess: Session, ss: ast_map::Path,
|
||||
hash: Option<&str>, vers: Option<&str>) -> ~str {
|
||||
pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
|
||||
hash: Option<&str>,
|
||||
vers: Option<&str>) -> ~str {
|
||||
// Follow C++ namespace-mangling style, see
|
||||
// http://en.wikipedia.org/wiki/Name_mangling for more info.
|
||||
//
|
||||
|
@ -625,7 +621,7 @@ pub fn mangle(sess: Session, ss: ast_map::Path,
|
|||
// when using unix's linker. Perhaps one day when we just use a linker from LLVM
|
||||
// we won't need to do this name mangling. The problem with name mangling is
|
||||
// that it seriously limits the available characters. For example we can't
|
||||
// have things like @T or ~[T] in symbol names when one would theoretically
|
||||
// have things like &T or ~[T] in symbol names when one would theoretically
|
||||
// want them for things like impls of traits on that type.
|
||||
//
|
||||
// To be able to work on all platforms and get *some* reasonable output, we
|
||||
|
@ -633,41 +629,19 @@ pub fn mangle(sess: Session, ss: ast_map::Path,
|
|||
|
||||
let mut n = ~"_ZN"; // _Z == Begin name-sequence, N == nested
|
||||
|
||||
let push = |n: &mut ~str, s: &str| {
|
||||
fn push(n: &mut ~str, s: &str) {
|
||||
let sani = sanitize(s);
|
||||
n.push_str(format!("{}{}", sani.len(), sani));
|
||||
};
|
||||
}
|
||||
|
||||
// First, connect each component with <len, name> pairs.
|
||||
for s in ss.iter() {
|
||||
match *s {
|
||||
PathName(s) | PathMod(s) | PathPrettyName(s, _) => {
|
||||
push(&mut n, sess.str_of(s))
|
||||
}
|
||||
}
|
||||
for e in path {
|
||||
push(&mut n, token::get_name(e.name()).get().as_slice())
|
||||
}
|
||||
|
||||
// next, if any identifiers are "pretty" and need extra information tacked
|
||||
// on, then use the hash to generate two unique characters. For now
|
||||
// hopefully 2 characters is enough to avoid collisions.
|
||||
static EXTRA_CHARS: &'static str =
|
||||
"abcdefghijklmnopqrstuvwxyz\
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ\
|
||||
0123456789";
|
||||
let mut hash = match hash { Some(s) => s.to_owned(), None => ~"" };
|
||||
for s in ss.iter() {
|
||||
match *s {
|
||||
PathPrettyName(_, extra) => {
|
||||
let hi = (extra >> 32) as u32 as uint;
|
||||
let lo = extra as u32 as uint;
|
||||
hash.push_char(EXTRA_CHARS[hi % EXTRA_CHARS.len()] as char);
|
||||
hash.push_char(EXTRA_CHARS[lo % EXTRA_CHARS.len()] as char);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if hash.len() > 0 {
|
||||
push(&mut n, hash);
|
||||
match hash {
|
||||
Some(s) => push(&mut n, s),
|
||||
None => {}
|
||||
}
|
||||
match vers {
|
||||
Some(s) => push(&mut n, s),
|
||||
|
@ -678,10 +652,7 @@ pub fn mangle(sess: Session, ss: ast_map::Path,
|
|||
n
|
||||
}
|
||||
|
||||
pub fn exported_name(sess: Session,
|
||||
path: ast_map::Path,
|
||||
hash: &str,
|
||||
vers: &str) -> ~str {
|
||||
pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> ~str {
|
||||
// The version will get mangled to have a leading '_', but it makes more
|
||||
// sense to lead with a 'v' b/c this is a version...
|
||||
let vers = if vers.len() > 0 && !char::is_XID_start(vers.char_at(0)) {
|
||||
|
@ -690,53 +661,56 @@ pub fn exported_name(sess: Session,
|
|||
vers.to_owned()
|
||||
};
|
||||
|
||||
mangle(sess, path, Some(hash), Some(vers.as_slice()))
|
||||
mangle(path, Some(hash), Some(vers.as_slice()))
|
||||
}
|
||||
|
||||
pub fn mangle_exported_name(ccx: &CrateContext,
|
||||
path: ast_map::Path,
|
||||
t: ty::t) -> ~str {
|
||||
let hash = get_symbol_hash(ccx, t);
|
||||
return exported_name(ccx.sess, path,
|
||||
hash,
|
||||
ccx.link_meta.crateid.version_or_default());
|
||||
pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
|
||||
t: ty::t, id: ast::NodeId) -> ~str {
|
||||
let mut hash = get_symbol_hash(ccx, t);
|
||||
|
||||
// Paths can be completely identical for different nodes,
|
||||
// e.g. `fn foo() { { fn a() {} } { fn a() {} } }`, so we
|
||||
// generate unique characters from the node id. For now
|
||||
// hopefully 3 characters is enough to avoid collisions.
|
||||
static EXTRA_CHARS: &'static str =
|
||||
"abcdefghijklmnopqrstuvwxyz\
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ\
|
||||
0123456789";
|
||||
let id = id as uint;
|
||||
let extra1 = id % EXTRA_CHARS.len();
|
||||
let id = id / EXTRA_CHARS.len();
|
||||
let extra2 = id % EXTRA_CHARS.len();
|
||||
let id = id / EXTRA_CHARS.len();
|
||||
let extra3 = id % EXTRA_CHARS.len();
|
||||
hash.push_char(EXTRA_CHARS[extra1] as char);
|
||||
hash.push_char(EXTRA_CHARS[extra2] as char);
|
||||
hash.push_char(EXTRA_CHARS[extra3] as char);
|
||||
|
||||
exported_name(path, hash, ccx.link_meta.crateid.version_or_default())
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_type_only(ccx: &CrateContext,
|
||||
t: ty::t,
|
||||
name: &str) -> ~str {
|
||||
let s = ppaux::ty_to_short_str(ccx.tcx, t);
|
||||
let path = [PathName(token::intern(name)),
|
||||
PathName(token::intern(s))];
|
||||
let hash = get_symbol_hash(ccx, t);
|
||||
return mangle(ccx.sess,
|
||||
~[PathName(ccx.sess.ident_of(name)),
|
||||
PathName(ccx.sess.ident_of(s))],
|
||||
Some(hash.as_slice()),
|
||||
None);
|
||||
mangle(ast_map::Values(path.iter()), Some(hash.as_slice()), None)
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext,
|
||||
t: ty::t,
|
||||
name: &str) -> ~str {
|
||||
let s = ppaux::ty_to_str(ccx.tcx, t);
|
||||
let path = [PathName(token::intern(s)),
|
||||
gensym_name(name)];
|
||||
let hash = get_symbol_hash(ccx, t);
|
||||
let (_, name) = gensym_name(name);
|
||||
return mangle(ccx.sess,
|
||||
~[PathName(ccx.sess.ident_of(s)), name],
|
||||
Some(hash.as_slice()),
|
||||
None);
|
||||
mangle(ast_map::Values(path.iter()), Some(hash.as_slice()), None)
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_path_and_seq(ccx: &CrateContext,
|
||||
mut path: ast_map::Path,
|
||||
flav: &str) -> ~str {
|
||||
let (_, name) = gensym_name(flav);
|
||||
path.push(name);
|
||||
mangle(ccx.sess, path, None, None)
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_path(ccx: &CrateContext,
|
||||
path: ast_map::Path) -> ~str {
|
||||
mangle(ccx.sess, path, None, None)
|
||||
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> ~str {
|
||||
mangle(path.chain(Some(gensym_name(flav)).move_iter()), None, None)
|
||||
}
|
||||
|
||||
pub fn output_lib_filename(lm: &LinkMeta) -> ~str {
|
||||
|
|
|
@ -264,7 +264,7 @@ pub fn phase_3_run_analysis_passes(sess: Session,
|
|||
|_| middle::resolve_lifetime::krate(sess, krate));
|
||||
|
||||
time(time_passes, "looking for entry point", (),
|
||||
|_| middle::entry::find_entry_point(sess, krate, ast_map));
|
||||
|_| middle::entry::find_entry_point(sess, krate, &ast_map));
|
||||
|
||||
sess.macro_registrar_fn.with_mut(|r| *r =
|
||||
time(time_passes, "looking for macro registrar", (), |_|
|
||||
|
@ -288,7 +288,7 @@ pub fn phase_3_run_analysis_passes(sess: Session,
|
|||
middle::const_eval::process_crate(krate, ty_cx));
|
||||
|
||||
time(time_passes, "const checking", (), |_|
|
||||
middle::check_const::check_crate(sess, krate, ast_map, def_map,
|
||||
middle::check_const::check_crate(sess, krate, def_map,
|
||||
method_map, ty_cx));
|
||||
|
||||
let maps = (external_exports, last_private_map);
|
||||
|
@ -638,7 +638,6 @@ pub fn pretty_print_input(sess: Session,
|
|||
let mut rdr = MemReader::new(src.as_bytes().to_owned());
|
||||
let stdout = io::stdout();
|
||||
pprust::print_crate(sess.codemap,
|
||||
token::get_ident_interner(),
|
||||
sess.span_diagnostic,
|
||||
&krate,
|
||||
source_name(input),
|
||||
|
@ -1135,7 +1134,6 @@ pub fn early_error(msg: &str) -> ! {
|
|||
pub fn list_metadata(sess: Session, path: &Path,
|
||||
out: &mut io::Writer) -> io::IoResult<()> {
|
||||
metadata::loader::list_file_metadata(
|
||||
token::get_ident_interner(),
|
||||
session::sess_os_to_meta_os(sess.targ_cfg.os), path, out)
|
||||
}
|
||||
|
||||
|
|
|
@ -22,9 +22,7 @@ use syntax::ast::{IntTy, UintTy};
|
|||
use syntax::codemap::Span;
|
||||
use syntax::diagnostic;
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::{ast, codemap};
|
||||
use syntax::abi;
|
||||
use syntax::parse::token;
|
||||
use syntax::{abi, ast, codemap};
|
||||
use syntax;
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
|
@ -301,23 +299,6 @@ impl Session_ {
|
|||
pub fn show_span(&self) -> bool {
|
||||
self.debugging_opt(SHOW_SPAN)
|
||||
}
|
||||
|
||||
// DEPRECATED. This function results in a lot of allocations when they
|
||||
// are not necessary.
|
||||
pub fn str_of(&self, id: ast::Ident) -> ~str {
|
||||
let string = token::get_ident(id.name);
|
||||
string.get().to_str()
|
||||
}
|
||||
|
||||
// pointless function, now...
|
||||
pub fn ident_of(&self, st: &str) -> ast::Ident {
|
||||
token::str_to_ident(st)
|
||||
}
|
||||
|
||||
// pointless function, now...
|
||||
pub fn intr(&self) -> @syntax::parse::token::IdentInterner {
|
||||
token::get_ident_interner()
|
||||
}
|
||||
}
|
||||
|
||||
/// Some reasonable defaults
|
||||
|
|
|
@ -25,5 +25,5 @@ impl ast_map::FoldOps for NodeIdAssigner {
|
|||
}
|
||||
|
||||
pub fn assign_node_ids_and_map(sess: Session, krate: ast::Crate) -> (ast::Crate, ast_map::Map) {
|
||||
ast_map::map_crate(sess.diagnostic(), krate, NodeIdAssigner { sess: sess })
|
||||
ast_map::map_crate(krate, NodeIdAssigner { sess: sess })
|
||||
}
|
||||
|
|
|
@ -99,10 +99,7 @@ impl Context {
|
|||
|
||||
impl Visitor<()> for Context {
|
||||
fn visit_ident(&mut self, sp: Span, id: ast::Ident, _: ()) {
|
||||
let string = token::get_ident(id.name);
|
||||
let s = string.get();
|
||||
|
||||
if !s.is_ascii() {
|
||||
if !token::get_ident(id).get().is_ascii() {
|
||||
self.gate_feature("non_ascii_idents", sp,
|
||||
"non-ascii idents are not fully supported.");
|
||||
}
|
||||
|
@ -196,29 +193,29 @@ impl Visitor<()> for Context {
|
|||
let msg = " is not stable enough for use and are subject to change";
|
||||
|
||||
|
||||
if id == self.sess.ident_of("macro_rules") {
|
||||
if id == token::str_to_ident("macro_rules") {
|
||||
self.gate_feature("macro_rules", path.span, "macro definitions are \
|
||||
not stable enough for use and are subject to change");
|
||||
}
|
||||
|
||||
else if id == self.sess.ident_of("asm") {
|
||||
else if id == token::str_to_ident("asm") {
|
||||
self.gate_feature("asm", path.span, "inline assembly is not \
|
||||
stable enough for use and is subject to change");
|
||||
}
|
||||
|
||||
else if id == self.sess.ident_of("log_syntax") {
|
||||
else if id == token::str_to_ident("log_syntax") {
|
||||
self.gate_feature("log_syntax", path.span, "`log_syntax!` is not \
|
||||
stable enough for use and is subject to change");
|
||||
}
|
||||
|
||||
else if id == self.sess.ident_of("trace_macros") {
|
||||
else if id == token::str_to_ident("trace_macros") {
|
||||
self.gate_feature("trace_macros", path.span, "`trace_macros` is not \
|
||||
stable enough for use and is subject to change");
|
||||
}
|
||||
|
||||
else {
|
||||
for "e in quotes.iter() {
|
||||
if id == self.sess.ident_of(quote) {
|
||||
if id == token::str_to_ident(quote) {
|
||||
self.gate_feature("quote", path.span, quote + msg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ pub fn with_version(krate: &str) -> Option<(InternedString, ast::StrStyle)> {
|
|||
impl fold::Folder for StandardLibraryInjector {
|
||||
fn fold_crate(&mut self, krate: ast::Crate) -> ast::Crate {
|
||||
let mut vis = ~[ast::ViewItem {
|
||||
node: ast::ViewItemExternMod(self.sess.ident_of("std"),
|
||||
node: ast::ViewItemExternMod(token::str_to_ident("std"),
|
||||
with_version("std"),
|
||||
ast::DUMMY_NODE_ID),
|
||||
attrs: ~[
|
||||
|
@ -90,7 +90,7 @@ impl fold::Folder for StandardLibraryInjector {
|
|||
|
||||
if use_uv(&krate) && !self.sess.building_library.get() {
|
||||
vis.push(ast::ViewItem {
|
||||
node: ast::ViewItemExternMod(self.sess.ident_of("green"),
|
||||
node: ast::ViewItemExternMod(token::str_to_ident("green"),
|
||||
with_version("green"),
|
||||
ast::DUMMY_NODE_ID),
|
||||
attrs: ~[],
|
||||
|
@ -98,7 +98,7 @@ impl fold::Folder for StandardLibraryInjector {
|
|||
span: DUMMY_SP
|
||||
});
|
||||
vis.push(ast::ViewItem {
|
||||
node: ast::ViewItemExternMod(self.sess.ident_of("rustuv"),
|
||||
node: ast::ViewItemExternMod(token::str_to_ident("rustuv"),
|
||||
with_version("rustuv"),
|
||||
ast::DUMMY_NODE_ID),
|
||||
attrs: ~[],
|
||||
|
@ -163,12 +163,12 @@ impl fold::Folder for PreludeInjector {
|
|||
global: false,
|
||||
segments: ~[
|
||||
ast::PathSegment {
|
||||
identifier: self.sess.ident_of("std"),
|
||||
identifier: token::str_to_ident("std"),
|
||||
lifetimes: opt_vec::Empty,
|
||||
types: opt_vec::Empty,
|
||||
},
|
||||
ast::PathSegment {
|
||||
identifier: self.sess.ident_of("prelude"),
|
||||
identifier: token::str_to_ident("prelude"),
|
||||
lifetimes: opt_vec::Empty,
|
||||
types: opt_vec::Empty,
|
||||
},
|
||||
|
|
|
@ -289,7 +289,7 @@ mod __test {
|
|||
*/
|
||||
|
||||
fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
|
||||
let id_extra = cx.sess.ident_of("extra");
|
||||
let id_extra = token::str_to_ident("extra");
|
||||
let vi = if cx.is_extra {
|
||||
ast::ViewItemUse(
|
||||
~[@nospan(ast::ViewPathSimple(id_extra,
|
||||
|
@ -337,7 +337,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
|
|||
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
|
||||
|
||||
let item = ast::Item {
|
||||
ident: cx.sess.ident_of("__test"),
|
||||
ident: token::str_to_ident("__test"),
|
||||
attrs: ~[resolve_unexported_attr],
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: item_,
|
||||
|
@ -345,8 +345,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
|
|||
span: DUMMY_SP,
|
||||
};
|
||||
|
||||
debug!("Synthetic test module:\n{}\n",
|
||||
pprust::item_to_str(&item, cx.sess.intr()));
|
||||
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item));
|
||||
|
||||
return @item;
|
||||
}
|
||||
|
|
|
@ -13,88 +13,88 @@ use syntax::crateid::CrateId;
|
|||
|
||||
// EBML enum definitions and utils shared by the encoder and decoder
|
||||
|
||||
pub static tag_items: uint = 0x02u;
|
||||
pub static tag_items: uint = 0x00;
|
||||
|
||||
pub static tag_paths_data_name: uint = 0x04u;
|
||||
pub static tag_paths_data_name: uint = 0x01;
|
||||
|
||||
pub static tag_def_id: uint = 0x07u;
|
||||
pub static tag_def_id: uint = 0x02;
|
||||
|
||||
pub static tag_items_data: uint = 0x08u;
|
||||
pub static tag_items_data: uint = 0x03;
|
||||
|
||||
pub static tag_items_data_item: uint = 0x09u;
|
||||
pub static tag_items_data_item: uint = 0x04;
|
||||
|
||||
pub static tag_items_data_item_family: uint = 0x0au;
|
||||
pub static tag_items_data_item_family: uint = 0x05;
|
||||
|
||||
pub static tag_items_data_item_ty_param_bounds: uint = 0x0bu;
|
||||
pub static tag_items_data_item_ty_param_bounds: uint = 0x06;
|
||||
|
||||
pub static tag_items_data_item_type: uint = 0x0cu;
|
||||
pub static tag_items_data_item_type: uint = 0x07;
|
||||
|
||||
pub static tag_items_data_item_symbol: uint = 0x0du;
|
||||
pub static tag_items_data_item_symbol: uint = 0x08;
|
||||
|
||||
pub static tag_items_data_item_variant: uint = 0x0eu;
|
||||
pub static tag_items_data_item_variant: uint = 0x09;
|
||||
|
||||
pub static tag_items_data_parent_item: uint = 0x0fu;
|
||||
pub static tag_items_data_parent_item: uint = 0x0a;
|
||||
|
||||
pub static tag_items_data_item_is_tuple_struct_ctor: uint = 0x10u;
|
||||
pub static tag_items_data_item_is_tuple_struct_ctor: uint = 0x0b;
|
||||
|
||||
pub static tag_index: uint = 0x11u;
|
||||
pub static tag_index: uint = 0x0c;
|
||||
|
||||
pub static tag_index_buckets: uint = 0x12u;
|
||||
pub static tag_index_buckets: uint = 0x0d;
|
||||
|
||||
pub static tag_index_buckets_bucket: uint = 0x13u;
|
||||
pub static tag_index_buckets_bucket: uint = 0x0e;
|
||||
|
||||
pub static tag_index_buckets_bucket_elt: uint = 0x14u;
|
||||
pub static tag_index_buckets_bucket_elt: uint = 0x0f;
|
||||
|
||||
pub static tag_index_table: uint = 0x15u;
|
||||
pub static tag_index_table: uint = 0x10;
|
||||
|
||||
pub static tag_meta_item_name_value: uint = 0x18u;
|
||||
pub static tag_meta_item_name_value: uint = 0x11;
|
||||
|
||||
pub static tag_meta_item_name: uint = 0x19u;
|
||||
pub static tag_meta_item_name: uint = 0x12;
|
||||
|
||||
pub static tag_meta_item_value: uint = 0x20u;
|
||||
pub static tag_meta_item_value: uint = 0x13;
|
||||
|
||||
pub static tag_attributes: uint = 0x21u;
|
||||
pub static tag_attributes: uint = 0x14;
|
||||
|
||||
pub static tag_attribute: uint = 0x22u;
|
||||
pub static tag_attribute: uint = 0x15;
|
||||
|
||||
pub static tag_meta_item_word: uint = 0x23u;
|
||||
pub static tag_meta_item_word: uint = 0x16;
|
||||
|
||||
pub static tag_meta_item_list: uint = 0x24u;
|
||||
pub static tag_meta_item_list: uint = 0x17;
|
||||
|
||||
// The list of crates that this crate depends on
|
||||
pub static tag_crate_deps: uint = 0x25u;
|
||||
pub static tag_crate_deps: uint = 0x18;
|
||||
|
||||
// A single crate dependency
|
||||
pub static tag_crate_dep: uint = 0x26u;
|
||||
pub static tag_crate_dep: uint = 0x19;
|
||||
|
||||
pub static tag_crate_hash: uint = 0x28u;
|
||||
pub static tag_crate_hash: uint = 0x1a;
|
||||
|
||||
pub static tag_parent_item: uint = 0x29u;
|
||||
pub static tag_parent_item: uint = 0x1b;
|
||||
|
||||
pub static tag_crate_dep_name: uint = 0x2au;
|
||||
pub static tag_crate_dep_hash: uint = 0x2bu;
|
||||
pub static tag_crate_dep_vers: uint = 0x2cu;
|
||||
pub static tag_crate_dep_name: uint = 0x1c;
|
||||
pub static tag_crate_dep_hash: uint = 0x1d;
|
||||
pub static tag_crate_dep_vers: uint = 0x1e;
|
||||
|
||||
pub static tag_mod_impl: uint = 0x30u;
|
||||
pub static tag_mod_impl: uint = 0x1f;
|
||||
|
||||
pub static tag_item_trait_method: uint = 0x31u;
|
||||
pub static tag_item_trait_method: uint = 0x20;
|
||||
|
||||
pub static tag_item_trait_ref: uint = 0x32u;
|
||||
pub static tag_item_super_trait_ref: uint = 0x33u;
|
||||
pub static tag_item_trait_ref: uint = 0x21;
|
||||
pub static tag_item_super_trait_ref: uint = 0x22;
|
||||
|
||||
// discriminator value for variants
|
||||
pub static tag_disr_val: uint = 0x34u;
|
||||
pub static tag_disr_val: uint = 0x23;
|
||||
|
||||
// used to encode ast_map::Path and ast_map::PathElem
|
||||
pub static tag_path: uint = 0x40u;
|
||||
pub static tag_path_len: uint = 0x41u;
|
||||
pub static tag_path_elem_mod: uint = 0x42u;
|
||||
pub static tag_path_elem_name: uint = 0x43u;
|
||||
pub static tag_item_field: uint = 0x44u;
|
||||
pub static tag_struct_mut: uint = 0x45u;
|
||||
// used to encode ast_map::PathElem
|
||||
pub static tag_path: uint = 0x24;
|
||||
pub static tag_path_len: uint = 0x25;
|
||||
pub static tag_path_elem_mod: uint = 0x26;
|
||||
pub static tag_path_elem_name: uint = 0x27;
|
||||
pub static tag_item_field: uint = 0x28;
|
||||
pub static tag_struct_mut: uint = 0x29;
|
||||
|
||||
pub static tag_item_variances: uint = 0x46;
|
||||
pub static tag_mod_impl_trait: uint = 0x47u;
|
||||
pub static tag_item_variances: uint = 0x2a;
|
||||
pub static tag_mod_impl_trait: uint = 0x2b;
|
||||
/*
|
||||
trait items contain tag_item_trait_method elements,
|
||||
impl items contain tag_item_impl_method elements, and classes
|
||||
|
@ -103,47 +103,47 @@ pub static tag_mod_impl_trait: uint = 0x47u;
|
|||
both, tag_item_trait_method and tag_item_impl_method have to be two
|
||||
different tags.
|
||||
*/
|
||||
pub static tag_item_impl_method: uint = 0x48u;
|
||||
pub static tag_item_trait_method_explicit_self: uint = 0x4b;
|
||||
pub static tag_item_trait_method_self_ty_region: uint = 0x4c;
|
||||
pub static tag_item_impl_method: uint = 0x2c;
|
||||
pub static tag_item_trait_method_explicit_self: uint = 0x2d;
|
||||
pub static tag_item_trait_method_self_ty_region: uint = 0x2e;
|
||||
|
||||
|
||||
// Reexports are found within module tags. Each reexport contains def_ids
|
||||
// and names.
|
||||
pub static tag_items_data_item_reexport: uint = 0x4d;
|
||||
pub static tag_items_data_item_reexport_def_id: uint = 0x4e;
|
||||
pub static tag_items_data_item_reexport_name: uint = 0x4f;
|
||||
pub static tag_items_data_item_reexport: uint = 0x2f;
|
||||
pub static tag_items_data_item_reexport_def_id: uint = 0x30;
|
||||
pub static tag_items_data_item_reexport_name: uint = 0x31;
|
||||
|
||||
// used to encode crate_ctxt side tables
|
||||
#[deriving(Eq)]
|
||||
#[repr(uint)]
|
||||
pub enum astencode_tag { // Reserves 0x50 -- 0x6f
|
||||
tag_ast = 0x50,
|
||||
pub enum astencode_tag { // Reserves 0x32 -- 0x45
|
||||
tag_ast = 0x32,
|
||||
|
||||
tag_tree = 0x51,
|
||||
tag_tree = 0x33,
|
||||
|
||||
tag_id_range = 0x52,
|
||||
tag_id_range = 0x34,
|
||||
|
||||
tag_table = 0x53,
|
||||
tag_table_id = 0x54,
|
||||
tag_table_val = 0x55,
|
||||
tag_table_def = 0x56,
|
||||
tag_table_node_type = 0x57,
|
||||
tag_table_node_type_subst = 0x58,
|
||||
tag_table_freevars = 0x59,
|
||||
tag_table_tcache = 0x5a,
|
||||
tag_table_param_defs = 0x5b,
|
||||
tag_table_mutbl = 0x5d,
|
||||
tag_table_last_use = 0x5e,
|
||||
tag_table_spill = 0x5f,
|
||||
tag_table_method_map = 0x60,
|
||||
tag_table_vtable_map = 0x61,
|
||||
tag_table_adjustments = 0x62,
|
||||
tag_table_moves_map = 0x63,
|
||||
tag_table_capture_map = 0x64
|
||||
tag_table = 0x35,
|
||||
tag_table_id = 0x36,
|
||||
tag_table_val = 0x37,
|
||||
tag_table_def = 0x38,
|
||||
tag_table_node_type = 0x39,
|
||||
tag_table_node_type_subst = 0x3a,
|
||||
tag_table_freevars = 0x3b,
|
||||
tag_table_tcache = 0x3c,
|
||||
tag_table_param_defs = 0x3d,
|
||||
tag_table_mutbl = 0x3e,
|
||||
tag_table_last_use = 0x3f,
|
||||
tag_table_spill = 0x40,
|
||||
tag_table_method_map = 0x41,
|
||||
tag_table_vtable_map = 0x42,
|
||||
tag_table_adjustments = 0x43,
|
||||
tag_table_moves_map = 0x44,
|
||||
tag_table_capture_map = 0x45
|
||||
}
|
||||
static first_astencode_tag : uint = tag_ast as uint;
|
||||
static last_astencode_tag : uint = tag_table_capture_map as uint;
|
||||
static first_astencode_tag: uint = tag_ast as uint;
|
||||
static last_astencode_tag: uint = tag_table_capture_map as uint;
|
||||
impl astencode_tag {
|
||||
pub fn from_uint(value : uint) -> Option<astencode_tag> {
|
||||
let is_a_tag = first_astencode_tag <= value && value <= last_astencode_tag;
|
||||
|
@ -153,9 +153,9 @@ impl astencode_tag {
|
|||
}
|
||||
}
|
||||
|
||||
pub static tag_item_trait_method_sort: uint = 0x70;
|
||||
pub static tag_item_trait_method_sort: uint = 0x46;
|
||||
|
||||
pub static tag_item_impl_type_basename: uint = 0x71;
|
||||
pub static tag_item_impl_type_basename: uint = 0x47;
|
||||
|
||||
// Language items are a top-level directory (for speed). Hierarchy:
|
||||
//
|
||||
|
@ -164,50 +164,46 @@ pub static tag_item_impl_type_basename: uint = 0x71;
|
|||
// - tag_lang_items_item_id: u32
|
||||
// - tag_lang_items_item_node_id: u32
|
||||
|
||||
pub static tag_lang_items: uint = 0x72;
|
||||
pub static tag_lang_items_item: uint = 0x73;
|
||||
pub static tag_lang_items_item_id: uint = 0x74;
|
||||
pub static tag_lang_items_item_node_id: uint = 0x75;
|
||||
pub static tag_lang_items: uint = 0x48;
|
||||
pub static tag_lang_items_item: uint = 0x49;
|
||||
pub static tag_lang_items_item_id: uint = 0x4a;
|
||||
pub static tag_lang_items_item_node_id: uint = 0x4b;
|
||||
|
||||
pub static tag_item_unnamed_field: uint = 0x76;
|
||||
pub static tag_items_data_item_struct_ctor: uint = 0x77;
|
||||
pub static tag_items_data_item_visibility: uint = 0x78;
|
||||
pub static tag_item_unnamed_field: uint = 0x4c;
|
||||
pub static tag_items_data_item_struct_ctor: uint = 0x4d;
|
||||
pub static tag_items_data_item_visibility: uint = 0x4e;
|
||||
|
||||
pub static tag_link_args: uint = 0x79;
|
||||
pub static tag_link_args_arg: uint = 0x7a;
|
||||
pub static tag_link_args: uint = 0x4f;
|
||||
pub static tag_link_args_arg: uint = 0x50;
|
||||
|
||||
pub static tag_item_method_tps: uint = 0x7b;
|
||||
pub static tag_item_method_fty: uint = 0x7c;
|
||||
pub static tag_item_method_tps: uint = 0x51;
|
||||
pub static tag_item_method_fty: uint = 0x52;
|
||||
|
||||
pub static tag_mod_child: uint = 0x7d;
|
||||
pub static tag_misc_info: uint = 0x7e;
|
||||
pub static tag_misc_info_crate_items: uint = 0x7f;
|
||||
pub static tag_mod_child: uint = 0x53;
|
||||
pub static tag_misc_info: uint = 0x54;
|
||||
pub static tag_misc_info_crate_items: uint = 0x55;
|
||||
|
||||
pub static tag_item_method_provided_source: uint = 0x80;
|
||||
pub static tag_item_impl_vtables: uint = 0x81;
|
||||
pub static tag_item_method_provided_source: uint = 0x56;
|
||||
pub static tag_item_impl_vtables: uint = 0x57;
|
||||
|
||||
pub static tag_impls: uint = 0x82;
|
||||
pub static tag_impls_impl: uint = 0x83;
|
||||
pub static tag_impls: uint = 0x58;
|
||||
pub static tag_impls_impl: uint = 0x59;
|
||||
|
||||
pub static tag_items_data_item_inherent_impl: uint = 0x84;
|
||||
pub static tag_items_data_item_extension_impl: uint = 0x85;
|
||||
pub static tag_items_data_item_inherent_impl: uint = 0x5a;
|
||||
pub static tag_items_data_item_extension_impl: uint = 0x5b;
|
||||
|
||||
pub static tag_path_elem_pretty_name: uint = 0x86;
|
||||
pub static tag_path_elem_pretty_name_ident: uint = 0x87;
|
||||
pub static tag_path_elem_pretty_name_extra: uint = 0x88;
|
||||
pub static tag_region_param_def: uint = 0x5c;
|
||||
pub static tag_region_param_def_ident: uint = 0x5d;
|
||||
pub static tag_region_param_def_def_id: uint = 0x5e;
|
||||
|
||||
pub static tag_region_param_def: uint = 0x100;
|
||||
pub static tag_region_param_def_ident: uint = 0x101;
|
||||
pub static tag_region_param_def_def_id: uint = 0x102;
|
||||
pub static tag_native_libraries: uint = 0x5f;
|
||||
pub static tag_native_libraries_lib: uint = 0x60;
|
||||
pub static tag_native_libraries_name: uint = 0x61;
|
||||
pub static tag_native_libraries_kind: uint = 0x62;
|
||||
|
||||
pub static tag_native_libraries: uint = 0x103;
|
||||
pub static tag_native_libraries_lib: uint = 0x104;
|
||||
pub static tag_native_libraries_name: uint = 0x105;
|
||||
pub static tag_native_libraries_kind: uint = 0x106;
|
||||
|
||||
pub static tag_macro_registrar_fn: uint = 0x110;
|
||||
pub static tag_exported_macros: uint = 0x111;
|
||||
pub static tag_macro_def: uint = 0x112;
|
||||
pub static tag_macro_registrar_fn: uint = 0x63;
|
||||
pub static tag_exported_macros: uint = 0x64;
|
||||
pub static tag_macro_def: uint = 0x65;
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub struct LinkMeta {
|
||||
|
|
|
@ -167,10 +167,10 @@ struct CrateInfo {
|
|||
|
||||
fn extract_crate_info(i: &ast::ViewItem) -> Option<CrateInfo> {
|
||||
match i.node {
|
||||
ast::ViewItemExternMod(ref ident, ref path_opt, id) => {
|
||||
let ident = token::get_ident(ident.name);
|
||||
ast::ViewItemExternMod(ident, ref path_opt, id) => {
|
||||
let ident = token::get_ident(ident);
|
||||
debug!("resolving extern mod stmt. ident: {:?} path_opt: {:?}",
|
||||
ident.get(), path_opt);
|
||||
ident, path_opt);
|
||||
let (name, version) = match *path_opt {
|
||||
Some((ref path_str, _)) => {
|
||||
let crateid: Option<CrateId> = from_str(path_str.get());
|
||||
|
@ -282,16 +282,16 @@ fn visit_item(e: &Env, i: &ast::Item) {
|
|||
}
|
||||
}
|
||||
|
||||
fn existing_match(e: &Env, name: ~str, version: ~str, hash: &str) -> Option<ast::CrateNum> {
|
||||
fn existing_match(e: &Env, name: &str, version: &str, hash: &str) -> Option<ast::CrateNum> {
|
||||
let crate_cache = e.crate_cache.borrow();
|
||||
for c in crate_cache.get().iter() {
|
||||
let crateid_version = match c.crateid.version {
|
||||
None => ~"0.0",
|
||||
Some(ref ver) => ver.to_str(),
|
||||
};
|
||||
if (name.is_empty() || c.crateid.name == name) &&
|
||||
(version.is_empty() || crateid_version == version) &&
|
||||
(hash.is_empty() || c.hash.as_slice() == hash) {
|
||||
if (name.is_empty() || name == c.crateid.name) &&
|
||||
(version.is_empty() || version == crateid_version) &&
|
||||
(hash.is_empty() || hash == c.hash) {
|
||||
return Some(c.cnum);
|
||||
}
|
||||
}
|
||||
|
@ -305,13 +305,13 @@ fn resolve_crate(e: &mut Env,
|
|||
hash: ~str,
|
||||
span: Span)
|
||||
-> ast::CrateNum {
|
||||
match existing_match(e, name.clone(), version.clone(), hash.clone()) {
|
||||
match existing_match(e, name, version, hash) {
|
||||
None => {
|
||||
let load_ctxt = loader::Context {
|
||||
sess: e.sess,
|
||||
span: span,
|
||||
ident: ident,
|
||||
name: name.clone(),
|
||||
name: name,
|
||||
version: version,
|
||||
hash: hash,
|
||||
os: e.os,
|
||||
|
@ -342,7 +342,7 @@ fn resolve_crate(e: &mut Env,
|
|||
let cnum_map = resolve_crate_deps(e, metadata.as_slice());
|
||||
|
||||
let cmeta = @cstore::crate_metadata {
|
||||
name: name,
|
||||
name: load_ctxt.name,
|
||||
data: metadata,
|
||||
cnum_map: cnum_map,
|
||||
cnum: cnum
|
||||
|
@ -372,13 +372,13 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map {
|
|||
let r = decoder::get_crate_deps(cdata);
|
||||
for dep in r.iter() {
|
||||
let extrn_cnum = dep.cnum;
|
||||
let cname_str = token::get_ident(dep.name.name);
|
||||
let cname_str = token::get_ident(dep.name);
|
||||
debug!("resolving dep crate {} ver: {} hash: {}",
|
||||
cname_str, dep.vers, dep.hash);
|
||||
match existing_match(e,
|
||||
cname_str.get().to_str(),
|
||||
dep.vers.clone(),
|
||||
dep.hash.clone()) {
|
||||
cname_str.get(),
|
||||
dep.vers,
|
||||
dep.hash) {
|
||||
Some(local_cnum) => {
|
||||
debug!("already have it");
|
||||
// We've already seen this crate
|
||||
|
|
|
@ -23,6 +23,7 @@ use std::rc::Rc;
|
|||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::diagnostic::expect;
|
||||
use syntax::parse::token;
|
||||
|
||||
pub struct StaticMethodInfo {
|
||||
ident: ast::Ident,
|
||||
|
@ -84,15 +85,14 @@ pub fn each_top_level_item_of_crate(cstore: @cstore::CStore,
|
|||
callback)
|
||||
}
|
||||
|
||||
pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> ast_map::Path {
|
||||
pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> ~[ast_map::PathElem] {
|
||||
let cstore = tcx.cstore;
|
||||
let cdata = cstore.get_crate_data(def.krate);
|
||||
let path = decoder::get_item_path(cdata, def.node);
|
||||
|
||||
// FIXME #1920: This path is not always correct if the crate is not linked
|
||||
// into the root namespace.
|
||||
vec::append(~[ast_map::PathMod(tcx.sess.ident_of(
|
||||
cdata.name))], path)
|
||||
vec::append(~[ast_map::PathMod(token::intern(cdata.name))], path)
|
||||
}
|
||||
|
||||
pub enum found_ast {
|
||||
|
@ -105,12 +105,11 @@ pub enum found_ast {
|
|||
// not marked for inlining, then the AST will not be present and hence none
|
||||
// will be returned.
|
||||
pub fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::DefId,
|
||||
decode_inlined_item: decoder::decode_inlined_item)
|
||||
decode_inlined_item: decoder::DecodeInlinedItem)
|
||||
-> found_ast {
|
||||
let cstore = tcx.cstore;
|
||||
let cdata = cstore.get_crate_data(def.krate);
|
||||
decoder::maybe_get_item_ast(cdata, tcx, def.node,
|
||||
decode_inlined_item)
|
||||
decoder::maybe_get_item_ast(cdata, tcx, def.node, decode_inlined_item)
|
||||
}
|
||||
|
||||
pub fn get_enum_variants(tcx: ty::ctxt, def: ast::DefId)
|
||||
|
|
|
@ -37,10 +37,10 @@ use serialize::Decodable;
|
|||
use syntax::ast_map;
|
||||
use syntax::attr;
|
||||
use syntax::parse::token::{IdentInterner, special_idents};
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ast;
|
||||
use syntax::codemap;
|
||||
use syntax::parse::token;
|
||||
|
||||
type Cmd = @crate_metadata;
|
||||
|
||||
|
@ -96,7 +96,7 @@ fn find_item<'a>(item_id: ast::NodeId, items: ebml::Doc<'a>) -> ebml::Doc<'a> {
|
|||
|
||||
// Looks up an item in the given metadata and returns an ebml doc pointing
|
||||
// to the item data.
|
||||
pub fn lookup_item<'a>(item_id: ast::NodeId, data: &'a [u8]) -> ebml::Doc<'a> {
|
||||
fn lookup_item<'a>(item_id: ast::NodeId, data: &'a [u8]) -> ebml::Doc<'a> {
|
||||
let items = reader::get_doc(reader::Doc(data), tag_items);
|
||||
find_item(item_id, items)
|
||||
}
|
||||
|
@ -258,15 +258,13 @@ fn item_ty_param_defs(item: ebml::Doc,
|
|||
Rc::new(bounds)
|
||||
}
|
||||
|
||||
fn item_region_param_defs(item_doc: ebml::Doc,
|
||||
tcx: ty::ctxt,
|
||||
cdata: Cmd)
|
||||
fn item_region_param_defs(item_doc: ebml::Doc, cdata: Cmd)
|
||||
-> Rc<~[ty::RegionParameterDef]> {
|
||||
let mut v = ~[];
|
||||
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
|
||||
let ident_str_doc = reader::get_doc(rp_doc,
|
||||
tag_region_param_def_ident);
|
||||
let ident = item_name(tcx.sess.intr(), ident_str_doc);
|
||||
let ident = item_name(token::get_ident_interner(), ident_str_doc);
|
||||
let def_id_doc = reader::get_doc(rp_doc,
|
||||
tag_region_param_def_def_id);
|
||||
let def_id = reader::with_doc_data(def_id_doc, parse_def_id);
|
||||
|
@ -296,7 +294,7 @@ fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] {
|
|||
return ids;
|
||||
}
|
||||
|
||||
pub fn item_path(item_doc: ebml::Doc) -> ast_map::Path {
|
||||
fn item_path(item_doc: ebml::Doc) -> ~[ast_map::PathElem] {
|
||||
let path_doc = reader::get_doc(item_doc, tag_path);
|
||||
|
||||
let len_doc = reader::get_doc(path_doc, tag_path_len);
|
||||
|
@ -305,30 +303,21 @@ pub fn item_path(item_doc: ebml::Doc) -> ast_map::Path {
|
|||
let mut result = vec::with_capacity(len);
|
||||
reader::docs(path_doc, |tag, elt_doc| {
|
||||
if tag == tag_path_elem_mod {
|
||||
let str = elt_doc.as_str_slice();
|
||||
result.push(ast_map::PathMod(token::str_to_ident(str)));
|
||||
let s = elt_doc.as_str_slice();
|
||||
result.push(ast_map::PathMod(token::intern(s)));
|
||||
} else if tag == tag_path_elem_name {
|
||||
let str = elt_doc.as_str_slice();
|
||||
result.push(ast_map::PathName(token::str_to_ident(str)));
|
||||
} else if tag == tag_path_elem_pretty_name {
|
||||
let name_doc = reader::get_doc(elt_doc,
|
||||
tag_path_elem_pretty_name_ident);
|
||||
let extra_doc = reader::get_doc(elt_doc,
|
||||
tag_path_elem_pretty_name_extra);
|
||||
let str = name_doc.as_str_slice();
|
||||
let extra = reader::doc_as_u64(extra_doc);
|
||||
result.push(ast_map::PathPrettyName(token::str_to_ident(str),
|
||||
extra));
|
||||
let s = elt_doc.as_str_slice();
|
||||
result.push(ast_map::PathName(token::intern(s)));
|
||||
} else {
|
||||
// ignore tag_path_len element
|
||||
}
|
||||
true
|
||||
});
|
||||
|
||||
return result;
|
||||
result
|
||||
}
|
||||
|
||||
fn item_name(intr: @IdentInterner, item: ebml::Doc) -> ast::Ident {
|
||||
fn item_name(intr: &IdentInterner, item: ebml::Doc) -> ast::Ident {
|
||||
let name = reader::get_doc(item, tag_paths_data_name);
|
||||
let string = name.as_str_slice();
|
||||
match intr.find_equiv(&string) {
|
||||
|
@ -337,7 +326,7 @@ fn item_name(intr: @IdentInterner, item: ebml::Doc) -> ast::Ident {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn item_to_def_like(item: ebml::Doc, did: ast::DefId, cnum: ast::CrateNum)
|
||||
fn item_to_def_like(item: ebml::Doc, did: ast::DefId, cnum: ast::CrateNum)
|
||||
-> DefLike {
|
||||
let fam = item_family(item);
|
||||
match fam {
|
||||
|
@ -383,14 +372,6 @@ pub fn item_to_def_like(item: ebml::Doc, did: ast::DefId, cnum: ast::CrateNum)
|
|||
}
|
||||
}
|
||||
|
||||
pub fn lookup_def(cnum: ast::CrateNum, data: &[u8], did_: ast::DefId) ->
|
||||
ast::Def {
|
||||
let item = lookup_item(did_.node, data);
|
||||
let did = ast::DefId { krate: cnum, node: did_.node };
|
||||
// We treat references to enums as references to types.
|
||||
return def_like_to_def(item_to_def_like(item, did, cnum));
|
||||
}
|
||||
|
||||
pub fn get_trait_def(cdata: Cmd,
|
||||
item_id: ast::NodeId,
|
||||
tcx: ty::ctxt) -> ty::TraitDef
|
||||
|
@ -398,7 +379,7 @@ pub fn get_trait_def(cdata: Cmd,
|
|||
let item_doc = lookup_item(item_id, cdata.data());
|
||||
let tp_defs = item_ty_param_defs(item_doc, tcx, cdata,
|
||||
tag_items_data_item_ty_param_bounds);
|
||||
let rp_defs = item_region_param_defs(item_doc, tcx, cdata);
|
||||
let rp_defs = item_region_param_defs(item_doc, cdata);
|
||||
let mut bounds = ty::EmptyBuiltinBounds();
|
||||
// Collect the builtin bounds from the encoded supertraits.
|
||||
// FIXME(#8559): They should be encoded directly.
|
||||
|
@ -427,7 +408,7 @@ pub fn get_type(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
|
|||
cdata);
|
||||
|
||||
let tp_defs = item_ty_param_defs(item, tcx, cdata, tag_items_data_item_ty_param_bounds);
|
||||
let rp_defs = item_region_param_defs(item, tcx, cdata);
|
||||
let rp_defs = item_region_param_defs(item, cdata);
|
||||
|
||||
ty::ty_param_bounds_and_ty {
|
||||
generics: ty::Generics {type_param_defs: tp_defs,
|
||||
|
@ -682,41 +663,37 @@ pub fn each_top_level_item_of_crate(intr: @IdentInterner,
|
|||
callback)
|
||||
}
|
||||
|
||||
pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> ast_map::Path {
|
||||
pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> ~[ast_map::PathElem] {
|
||||
item_path(lookup_item(id, cdata.data()))
|
||||
}
|
||||
|
||||
pub type decode_inlined_item<'a> = 'a |cdata: @cstore::crate_metadata,
|
||||
tcx: ty::ctxt,
|
||||
path: ast_map::Path,
|
||||
par_doc: ebml::Doc|
|
||||
-> Option<ast::InlinedItem>;
|
||||
pub type DecodeInlinedItem<'a> = 'a |cdata: @cstore::crate_metadata,
|
||||
tcx: ty::ctxt,
|
||||
path: ~[ast_map::PathElem],
|
||||
par_doc: ebml::Doc|
|
||||
-> Result<ast::InlinedItem, ~[ast_map::PathElem]>;
|
||||
|
||||
pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt,
|
||||
id: ast::NodeId,
|
||||
decode_inlined_item: decode_inlined_item)
|
||||
-> csearch::found_ast {
|
||||
pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,
|
||||
decode_inlined_item: DecodeInlinedItem)
|
||||
-> csearch::found_ast {
|
||||
debug!("Looking up item: {}", id);
|
||||
let item_doc = lookup_item(id, cdata.data());
|
||||
let path = {
|
||||
let item_path = item_path(item_doc);
|
||||
item_path.init().to_owned()
|
||||
};
|
||||
match decode_inlined_item(cdata, tcx, /*bad*/path.clone(), item_doc) {
|
||||
Some(ref ii) => csearch::found(*ii),
|
||||
None => {
|
||||
match item_parent_item(item_doc) {
|
||||
Some(did) => {
|
||||
let did = translate_def_id(cdata, did);
|
||||
let parent_item = lookup_item(did.node, cdata.data());
|
||||
match decode_inlined_item(cdata, tcx, path, parent_item) {
|
||||
Some(ref ii) => csearch::found_parent(did, *ii),
|
||||
None => csearch::not_found
|
||||
let path = item_path(item_doc).init().to_owned();
|
||||
match decode_inlined_item(cdata, tcx, path, item_doc) {
|
||||
Ok(ref ii) => csearch::found(*ii),
|
||||
Err(path) => {
|
||||
match item_parent_item(item_doc) {
|
||||
Some(did) => {
|
||||
let did = translate_def_id(cdata, did);
|
||||
let parent_item = lookup_item(did.node, cdata.data());
|
||||
match decode_inlined_item(cdata, tcx, path, parent_item) {
|
||||
Ok(ref ii) => csearch::found_parent(did, *ii),
|
||||
Err(_) => csearch::not_found
|
||||
}
|
||||
}
|
||||
None => csearch::not_found
|
||||
}
|
||||
}
|
||||
None => csearch::not_found
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -835,7 +812,7 @@ pub fn get_method(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId,
|
|||
let name = item_name(intr, method_doc);
|
||||
let type_param_defs = item_ty_param_defs(method_doc, tcx, cdata,
|
||||
tag_item_method_tps);
|
||||
let rp_defs = item_region_param_defs(method_doc, tcx, cdata);
|
||||
let rp_defs = item_region_param_defs(method_doc, cdata);
|
||||
let fty = doc_method_fty(method_doc, tcx, cdata);
|
||||
let vis = item_visibility(method_doc);
|
||||
let explicit_self = get_explicit_self(method_doc);
|
||||
|
@ -1109,13 +1086,13 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
|
|||
return attrs;
|
||||
}
|
||||
|
||||
fn list_crate_attributes(intr: @IdentInterner, md: ebml::Doc, hash: &str,
|
||||
fn list_crate_attributes(md: ebml::Doc, hash: &str,
|
||||
out: &mut io::Writer) -> io::IoResult<()> {
|
||||
if_ok!(write!(out, "=Crate Attributes ({})=\n", hash));
|
||||
|
||||
let r = get_attributes(md);
|
||||
for attr in r.iter() {
|
||||
if_ok!(write!(out, "{}\n", pprust::attribute_to_str(attr, intr)));
|
||||
if_ok!(write!(out, "{}\n", pprust::attribute_to_str(attr)));
|
||||
}
|
||||
|
||||
write!(out, "\n\n")
|
||||
|
@ -1158,11 +1135,10 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
|
|||
|
||||
let r = get_crate_deps(data);
|
||||
for dep in r.iter() {
|
||||
let string = token::get_ident(dep.name.name);
|
||||
if_ok!(write!(out,
|
||||
"{} {}-{}-{}\n",
|
||||
dep.cnum,
|
||||
string.get(),
|
||||
token::get_ident(dep.name),
|
||||
dep.hash,
|
||||
dep.vers));
|
||||
}
|
||||
|
@ -1185,11 +1161,10 @@ pub fn get_crate_vers(data: &[u8]) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn list_crate_metadata(intr: @IdentInterner, bytes: &[u8],
|
||||
out: &mut io::Writer) -> io::IoResult<()> {
|
||||
pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
|
||||
let hash = get_crate_hash(bytes);
|
||||
let md = reader::Doc(bytes);
|
||||
if_ok!(list_crate_attributes(intr, md, hash, out));
|
||||
if_ok!(list_crate_attributes(md, hash, out));
|
||||
list_crate_deps(bytes, out)
|
||||
}
|
||||
|
||||
|
|
|
@ -28,10 +28,10 @@ use std::cell::{Cell, RefCell};
|
|||
use std::hashmap::{HashMap, HashSet};
|
||||
use std::io::MemWriter;
|
||||
use std::str;
|
||||
use std::vec;
|
||||
use syntax::abi::AbiSet;
|
||||
use syntax::ast::*;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::{PathElem, PathElems};
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util::*;
|
||||
use syntax::ast_util;
|
||||
|
@ -57,10 +57,9 @@ pub enum InlinedItemRef<'a> {
|
|||
IIForeignRef(&'a ast::ForeignItem)
|
||||
}
|
||||
|
||||
pub type encode_inlined_item<'a> = 'a |ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
path: &[ast_map::PathElem],
|
||||
ii: InlinedItemRef|;
|
||||
pub type EncodeInlinedItem<'a> = 'a |ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
ii: InlinedItemRef|;
|
||||
|
||||
pub struct EncodeParams<'a> {
|
||||
diag: @SpanHandler,
|
||||
|
@ -70,7 +69,7 @@ pub struct EncodeParams<'a> {
|
|||
non_inlineable_statics: &'a RefCell<HashSet<ast::NodeId>>,
|
||||
link_meta: &'a LinkMeta,
|
||||
cstore: @cstore::CStore,
|
||||
encode_inlined_item: encode_inlined_item<'a>,
|
||||
encode_inlined_item: EncodeInlinedItem<'a>,
|
||||
reachable: @RefCell<HashSet<ast::NodeId>>,
|
||||
codemap: @codemap::CodeMap,
|
||||
}
|
||||
|
@ -100,7 +99,7 @@ pub struct EncodeContext<'a> {
|
|||
non_inlineable_statics: &'a RefCell<HashSet<ast::NodeId>>,
|
||||
link_meta: &'a LinkMeta,
|
||||
cstore: &'a cstore::CStore,
|
||||
encode_inlined_item: encode_inlined_item<'a>,
|
||||
encode_inlined_item: EncodeInlinedItem<'a>,
|
||||
type_abbrevs: abbrev_map,
|
||||
reachable: @RefCell<HashSet<ast::NodeId>>,
|
||||
codemap: @codemap::CodeMap,
|
||||
|
@ -111,17 +110,12 @@ pub fn reachable(ecx: &EncodeContext, id: NodeId) -> bool {
|
|||
reachable.get().contains(&id)
|
||||
}
|
||||
|
||||
fn encode_name(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
name: Ident) {
|
||||
ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name));
|
||||
fn encode_name(ebml_w: &mut writer::Encoder, name: Ident) {
|
||||
ebml_w.wr_tagged_str(tag_paths_data_name, token::get_ident(name).get());
|
||||
}
|
||||
|
||||
fn encode_impl_type_basename(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
name: Ident) {
|
||||
ebml_w.wr_tagged_str(tag_item_impl_type_basename,
|
||||
ecx.tcx.sess.str_of(name));
|
||||
fn encode_impl_type_basename(ebml_w: &mut writer::Encoder, name: Ident) {
|
||||
ebml_w.wr_tagged_str(tag_item_impl_type_basename, token::get_ident(name).get());
|
||||
}
|
||||
|
||||
pub fn encode_def_id(ebml_w: &mut writer::Encoder, id: DefId) {
|
||||
|
@ -188,13 +182,12 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
|
|||
}
|
||||
|
||||
fn encode_region_param_defs(ebml_w: &mut writer::Encoder,
|
||||
ecx: &EncodeContext,
|
||||
params: &[ty::RegionParameterDef]) {
|
||||
for param in params.iter() {
|
||||
ebml_w.start_tag(tag_region_param_def);
|
||||
|
||||
ebml_w.start_tag(tag_region_param_def_ident);
|
||||
encode_name(ecx, ebml_w, param.ident);
|
||||
encode_name(ebml_w, param.ident);
|
||||
ebml_w.end_tag();
|
||||
|
||||
ebml_w.wr_tagged_str(tag_region_param_def_def_id,
|
||||
|
@ -218,7 +211,7 @@ fn encode_bounds_and_type(ebml_w: &mut writer::Encoder,
|
|||
tpt: &ty::ty_param_bounds_and_ty) {
|
||||
encode_ty_type_param_defs(ebml_w, ecx, tpt.generics.type_param_defs(),
|
||||
tag_items_data_item_ty_param_bounds);
|
||||
encode_region_param_defs(ebml_w, ecx, tpt.generics.region_param_defs());
|
||||
encode_region_param_defs(ebml_w, tpt.generics.region_param_defs());
|
||||
encode_type(ecx, ebml_w, tpt.ty);
|
||||
}
|
||||
|
||||
|
@ -311,15 +304,14 @@ fn encode_parent_item(ebml_w: &mut writer::Encoder, id: DefId) {
|
|||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_struct_fields(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
def: @StructDef) {
|
||||
fn encode_struct_fields(ebml_w: &mut writer::Encoder,
|
||||
def: @StructDef) {
|
||||
for f in def.fields.iter() {
|
||||
match f.node.kind {
|
||||
NamedField(ident, vis) => {
|
||||
ebml_w.start_tag(tag_item_field);
|
||||
encode_struct_field_family(ebml_w, vis);
|
||||
encode_name(ecx, ebml_w, ident);
|
||||
encode_name(ebml_w, ident);
|
||||
encode_def_id(ebml_w, local_def(f.node.id));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -336,7 +328,6 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
|||
ebml_w: &mut writer::Encoder,
|
||||
id: NodeId,
|
||||
variants: &[P<Variant>],
|
||||
path: &[ast_map::PathElem],
|
||||
index: @RefCell<~[entry<i64>]>,
|
||||
generics: &ast::Generics) {
|
||||
debug!("encode_enum_variant_info(id={:?})", id);
|
||||
|
@ -360,7 +351,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
|||
ast::TupleVariantKind(_) => encode_family(ebml_w, 'v'),
|
||||
ast::StructVariantKind(_) => encode_family(ebml_w, 'V')
|
||||
}
|
||||
encode_name(ecx, ebml_w, variant.node.name);
|
||||
encode_name(ebml_w, variant.node.name);
|
||||
encode_parent_item(ebml_w, local_def(id));
|
||||
encode_visibility(ebml_w, variant.node.vis);
|
||||
encode_attributes(ebml_w, variant.node.attrs);
|
||||
|
@ -371,9 +362,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
|||
}
|
||||
ast::TupleVariantKind(_) => {},
|
||||
ast::StructVariantKind(def) => {
|
||||
let idx = encode_info_for_struct(ecx, ebml_w, path,
|
||||
def.fields, index);
|
||||
encode_struct_fields(ecx, ebml_w, def);
|
||||
let idx = encode_info_for_struct(ecx, ebml_w, def.fields, index);
|
||||
encode_struct_fields(ebml_w, def);
|
||||
let bkts = create_index(idx);
|
||||
encode_index(ebml_w, bkts, write_i64);
|
||||
}
|
||||
|
@ -384,60 +374,40 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
|||
}
|
||||
encode_bounds_and_type(ebml_w, ecx,
|
||||
&lookup_item_type(ecx.tcx, def_id));
|
||||
encode_path(ecx, ebml_w, path,
|
||||
ast_map::PathName(variant.node.name));
|
||||
|
||||
ecx.tcx.map.with_path(variant.node.id, |path| encode_path(ebml_w, path));
|
||||
ebml_w.end_tag();
|
||||
disr_val += 1;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_path(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
path: &[ast_map::PathElem],
|
||||
name: ast_map::PathElem) {
|
||||
fn encode_path_elem(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
elt: ast_map::PathElem) {
|
||||
match elt {
|
||||
ast_map::PathMod(n) => {
|
||||
ebml_w.wr_tagged_str(tag_path_elem_mod, ecx.tcx.sess.str_of(n));
|
||||
}
|
||||
ast_map::PathName(n) => {
|
||||
ebml_w.wr_tagged_str(tag_path_elem_name, ecx.tcx.sess.str_of(n));
|
||||
}
|
||||
ast_map::PathPrettyName(n, extra) => {
|
||||
ebml_w.start_tag(tag_path_elem_pretty_name);
|
||||
ebml_w.wr_tagged_str(tag_path_elem_pretty_name_ident,
|
||||
ecx.tcx.sess.str_of(n));
|
||||
ebml_w.wr_tagged_u64(tag_path_elem_pretty_name_extra, extra);
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_path<PI: Iterator<PathElem> + Clone>(ebml_w: &mut writer::Encoder,
|
||||
mut path: PI) {
|
||||
ebml_w.start_tag(tag_path);
|
||||
ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32);
|
||||
for pe in path.iter() {
|
||||
encode_path_elem(ecx, ebml_w, *pe);
|
||||
ebml_w.wr_tagged_u32(tag_path_len, path.clone().len() as u32);
|
||||
for pe in path {
|
||||
let tag = match pe {
|
||||
ast_map::PathMod(_) => tag_path_elem_mod,
|
||||
ast_map::PathName(_) => tag_path_elem_name
|
||||
};
|
||||
ebml_w.wr_tagged_str(tag, token::get_name(pe.name()).get());
|
||||
}
|
||||
encode_path_elem(ecx, ebml_w, name);
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_reexported_static_method(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
fn encode_reexported_static_method(ebml_w: &mut writer::Encoder,
|
||||
exp: &middle::resolve::Export2,
|
||||
method_def_id: DefId,
|
||||
method_ident: Ident) {
|
||||
debug!("(encode reexported static method) {}::{}",
|
||||
exp.name, ecx.tcx.sess.str_of(method_ident));
|
||||
exp.name, token::get_ident(method_ident));
|
||||
ebml_w.start_tag(tag_items_data_item_reexport);
|
||||
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
|
||||
ebml_w.wr_str(def_to_str(method_def_id));
|
||||
ebml_w.end_tag();
|
||||
ebml_w.start_tag(tag_items_data_item_reexport_name);
|
||||
ebml_w.wr_str(format!("{}::{}", exp.name, ecx.tcx.sess.str_of(method_ident)));
|
||||
ebml_w.wr_str(format!("{}::{}", exp.name, token::get_ident(method_ident)));
|
||||
ebml_w.end_tag();
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -453,8 +423,7 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext,
|
|||
for &base_impl in implementations.get().iter() {
|
||||
for &m in base_impl.methods.iter() {
|
||||
if m.explicit_self == ast::SelfStatic {
|
||||
encode_reexported_static_method(ecx, ebml_w, exp,
|
||||
m.def_id, m.ident);
|
||||
encode_reexported_static_method(ebml_w, exp, m.def_id, m.ident);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -474,8 +443,7 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext,
|
|||
Some(methods) => {
|
||||
for &m in methods.iter() {
|
||||
if m.explicit_self == ast::SelfStatic {
|
||||
encode_reexported_static_method(ecx, ebml_w, exp,
|
||||
m.def_id, m.ident);
|
||||
encode_reexported_static_method(ebml_w, exp, m.def_id, m.ident);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -487,11 +455,22 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext,
|
|||
|
||||
fn encode_reexported_static_methods(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
mod_path: &[ast_map::PathElem],
|
||||
mod_path: PathElems,
|
||||
exp: &middle::resolve::Export2) {
|
||||
match ecx.tcx.items.find(exp.def_id.node) {
|
||||
Some(ast_map::NodeItem(item, path)) => {
|
||||
let original_name = token::get_ident(item.ident.name);
|
||||
match ecx.tcx.map.find(exp.def_id.node) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
let original_name = token::get_ident(item.ident);
|
||||
|
||||
let path_differs = ecx.tcx.map.with_path(exp.def_id.node, |path| {
|
||||
let (mut a, mut b) = (path, mod_path.clone());
|
||||
loop {
|
||||
match (a.next(), b.next()) {
|
||||
(None, None) => return true,
|
||||
(None, _) | (_, None) => return false,
|
||||
(Some(x), Some(y)) => if x != y { return false },
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// We don't need to reexport static methods on items
|
||||
|
@ -503,7 +482,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
|
|||
// encoded metadata for static methods relative to Bar,
|
||||
// but not yet for Foo.
|
||||
//
|
||||
if mod_path != *path || original_name.get() != exp.name {
|
||||
if path_differs || original_name.get() != exp.name {
|
||||
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
|
||||
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
|
||||
debug!("(encode reexported static methods) {} \
|
||||
|
@ -558,7 +537,7 @@ fn each_auxiliary_node_id(item: @Item, callback: |NodeId| -> bool) -> bool {
|
|||
fn encode_reexports(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
id: NodeId,
|
||||
path: &[ast_map::PathElem]) {
|
||||
path: PathElems) {
|
||||
debug!("(encoding info for module) encoding reexports for {}", id);
|
||||
let reexports2 = ecx.reexports2.borrow();
|
||||
match reexports2.get().find(&id) {
|
||||
|
@ -579,7 +558,7 @@ fn encode_reexports(ecx: &EncodeContext,
|
|||
ebml_w.wr_str(exp.name);
|
||||
ebml_w.end_tag();
|
||||
ebml_w.end_tag();
|
||||
encode_reexported_static_methods(ecx, ebml_w, path, exp);
|
||||
encode_reexported_static_methods(ecx, ebml_w, path.clone(), exp);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
@ -593,13 +572,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
|||
ebml_w: &mut writer::Encoder,
|
||||
md: &Mod,
|
||||
id: NodeId,
|
||||
path: &[ast_map::PathElem],
|
||||
path: PathElems,
|
||||
name: Ident,
|
||||
vis: Visibility) {
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(id));
|
||||
encode_family(ebml_w, 'm');
|
||||
encode_name(ecx, ebml_w, name);
|
||||
encode_name(ebml_w, name);
|
||||
debug!("(encoding info for module) encoding info for module ID {}", id);
|
||||
|
||||
// Encode info about all the module children.
|
||||
|
@ -620,9 +599,8 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
|||
let (ident, did) = (item.ident, item.id);
|
||||
debug!("(encoding info for module) ... encoding impl {} \
|
||||
({:?}/{:?})",
|
||||
ecx.tcx.sess.str_of(ident),
|
||||
did,
|
||||
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
|
||||
token::get_ident(ident),
|
||||
did, ecx.tcx.map.node_to_str(did));
|
||||
|
||||
ebml_w.start_tag(tag_mod_impl);
|
||||
ebml_w.wr_str(def_to_str(local_def(did)));
|
||||
|
@ -632,7 +610,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
|||
}
|
||||
}
|
||||
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathMod(name));
|
||||
encode_path(ebml_w, path.clone());
|
||||
encode_visibility(ebml_w, vis);
|
||||
|
||||
// Encode the reexports of this module, if this module is public.
|
||||
|
@ -709,7 +687,6 @@ fn encode_provided_source(ebml_w: &mut writer::Encoder,
|
|||
/* Returns an index of items in this class */
|
||||
fn encode_info_for_struct(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
path: &[ast_map::PathElem],
|
||||
fields: &[StructField],
|
||||
global_index: @RefCell<~[entry<i64>]>)
|
||||
-> ~[entry<i64>] {
|
||||
|
@ -736,10 +713,9 @@ fn encode_info_for_struct(ecx: &EncodeContext,
|
|||
}
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
debug!("encode_info_for_struct: doing {} {}",
|
||||
tcx.sess.str_of(nm), id);
|
||||
token::get_ident(nm), id);
|
||||
encode_struct_field_family(ebml_w, vis);
|
||||
encode_name(ecx, ebml_w, nm);
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(nm));
|
||||
encode_name(ebml_w, nm);
|
||||
encode_type(ecx, ebml_w, node_id_to_type(tcx, id));
|
||||
encode_def_id(ebml_w, local_def(id));
|
||||
ebml_w.end_tag();
|
||||
|
@ -749,7 +725,6 @@ fn encode_info_for_struct(ecx: &EncodeContext,
|
|||
|
||||
fn encode_info_for_struct_ctor(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
path: &[ast_map::PathElem],
|
||||
name: ast::Ident,
|
||||
ctor_id: NodeId,
|
||||
index: @RefCell<~[entry<i64>]>,
|
||||
|
@ -767,9 +742,9 @@ fn encode_info_for_struct_ctor(ecx: &EncodeContext,
|
|||
encode_family(ebml_w, 'f');
|
||||
encode_bounds_and_type(ebml_w, ecx,
|
||||
&lookup_item_type(ecx.tcx, local_def(ctor_id)));
|
||||
encode_name(ecx, ebml_w, name);
|
||||
encode_name(ebml_w, name);
|
||||
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, ctor_id));
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(name));
|
||||
ecx.tcx.map.with_path(ctor_id, |path| encode_path(ebml_w, path));
|
||||
encode_parent_item(ebml_w, local_def(struct_id));
|
||||
|
||||
let item_symbols = ecx.item_symbols.borrow();
|
||||
|
@ -790,7 +765,7 @@ fn encode_method_ty_fields(ecx: &EncodeContext,
|
|||
ebml_w: &mut writer::Encoder,
|
||||
method_ty: &ty::Method) {
|
||||
encode_def_id(ebml_w, method_ty.def_id);
|
||||
encode_name(ecx, ebml_w, method_ty.ident);
|
||||
encode_name(ebml_w, method_ty.ident);
|
||||
encode_ty_type_param_defs(ebml_w, ecx,
|
||||
method_ty.generics.type_param_defs(),
|
||||
tag_item_method_tps);
|
||||
|
@ -810,13 +785,13 @@ fn encode_method_ty_fields(ecx: &EncodeContext,
|
|||
fn encode_info_for_method(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
m: &ty::Method,
|
||||
impl_path: &[ast_map::PathElem],
|
||||
impl_path: PathElems,
|
||||
is_default_impl: bool,
|
||||
parent_id: NodeId,
|
||||
ast_method_opt: Option<@Method>) {
|
||||
|
||||
debug!("encode_info_for_method: {:?} {}", m.def_id,
|
||||
ecx.tcx.sess.str_of(m.ident));
|
||||
token::get_ident(m.ident));
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
|
||||
encode_method_ty_fields(ecx, ebml_w, m);
|
||||
|
@ -826,7 +801,8 @@ fn encode_info_for_method(ecx: &EncodeContext,
|
|||
let tpt = lookup_item_type(ecx.tcx, m.def_id);
|
||||
encode_bounds_and_type(ebml_w, ecx, &tpt);
|
||||
|
||||
encode_path(ecx, ebml_w, impl_path, ast_map::PathName(m.ident));
|
||||
let elem = ast_map::PathName(m.ident.name);
|
||||
encode_path(ebml_w, impl_path.chain(Some(elem).move_iter()));
|
||||
match ast_method_opt {
|
||||
Some(ast_method) => encode_attributes(ebml_w, ast_method.attrs),
|
||||
None => ()
|
||||
|
@ -834,11 +810,9 @@ fn encode_info_for_method(ecx: &EncodeContext,
|
|||
|
||||
for &ast_method in ast_method_opt.iter() {
|
||||
let num_params = tpt.generics.type_param_defs().len();
|
||||
if num_params > 0u || is_default_impl
|
||||
|| should_inline(ast_method.attrs) {
|
||||
if num_params > 0u || is_default_impl || should_inline(ast_method.attrs) {
|
||||
(ecx.encode_inlined_item)(
|
||||
ecx, ebml_w, impl_path,
|
||||
IIMethodRef(local_def(parent_id), false, ast_method));
|
||||
ecx, ebml_w, IIMethodRef(local_def(parent_id), false, ast_method));
|
||||
} else {
|
||||
encode_symbol(ecx, ebml_w, m.def_id.node);
|
||||
}
|
||||
|
@ -912,7 +886,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
ebml_w: &mut writer::Encoder,
|
||||
item: &Item,
|
||||
index: @RefCell<~[entry<i64>]>,
|
||||
path: &[ast_map::PathElem],
|
||||
path: PathElems,
|
||||
vis: ast::Visibility) {
|
||||
let tcx = ecx.tcx;
|
||||
|
||||
|
@ -941,18 +915,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
}
|
||||
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
|
||||
encode_symbol(ecx, ebml_w, item.id);
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
let elt = ast_map::PathPrettyName(item.ident, item.id as u64);
|
||||
encode_path(ecx, ebml_w, path, elt);
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_path(ebml_w, path);
|
||||
|
||||
let non_inlineable;
|
||||
{
|
||||
let non_inlineable_statics = ecx.non_inlineable_statics.borrow();
|
||||
non_inlineable = non_inlineable_statics.get().contains(&item.id);
|
||||
}
|
||||
let inlineable = !ecx.non_inlineable_statics.borrow().get().contains(&item.id);
|
||||
|
||||
if !non_inlineable {
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, path, IIItemRef(item));
|
||||
if inlineable {
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, IIItemRef(item));
|
||||
}
|
||||
encode_visibility(ebml_w, vis);
|
||||
ebml_w.end_tag();
|
||||
|
@ -964,11 +933,11 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_family(ebml_w, purity_fn_family(purity));
|
||||
let tps_len = generics.ty_params.len();
|
||||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_path(ebml_w, path);
|
||||
encode_attributes(ebml_w, item.attrs);
|
||||
if tps_len > 0u || should_inline(item.attrs) {
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, path, IIItemRef(item));
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, IIItemRef(item));
|
||||
} else {
|
||||
encode_symbol(ecx, ebml_w, item.id);
|
||||
}
|
||||
|
@ -990,8 +959,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, def_id);
|
||||
encode_family(ebml_w, 'n');
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_path(ebml_w, path);
|
||||
|
||||
// Encode all the items in this module.
|
||||
for foreign_item in fm.items.iter() {
|
||||
|
@ -1008,8 +977,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_def_id(ebml_w, def_id);
|
||||
encode_family(ebml_w, 'y');
|
||||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_path(ebml_w, path);
|
||||
encode_visibility(ebml_w, vis);
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -1021,13 +990,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_family(ebml_w, 't');
|
||||
encode_item_variances(ebml_w, ecx, item.id);
|
||||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_attributes(ebml_w, item.attrs);
|
||||
for v in (*enum_definition).variants.iter() {
|
||||
encode_variant_id(ebml_w, local_def(v.node.id));
|
||||
}
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, path, IIItemRef(item));
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, IIItemRef(item));
|
||||
encode_path(ebml_w, path);
|
||||
|
||||
// Encode inherent implementations for this enumeration.
|
||||
encode_inherent_implementations(ecx, ebml_w, def_id);
|
||||
|
@ -1039,7 +1008,6 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
ebml_w,
|
||||
item.id,
|
||||
(*enum_definition).variants,
|
||||
path,
|
||||
index,
|
||||
generics);
|
||||
}
|
||||
|
@ -1048,7 +1016,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
These come first because we need to write them to make
|
||||
the index, and the index needs to be in the item for the
|
||||
class itself */
|
||||
let idx = encode_info_for_struct(ecx, ebml_w, path,
|
||||
let idx = encode_info_for_struct(ecx, ebml_w,
|
||||
struct_def.fields, index);
|
||||
|
||||
/* Index the class*/
|
||||
|
@ -1061,17 +1029,17 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
|
||||
encode_item_variances(ebml_w, ecx, item.id);
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_attributes(ebml_w, item.attrs);
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
encode_path(ebml_w, path.clone());
|
||||
encode_visibility(ebml_w, vis);
|
||||
|
||||
/* Encode def_ids for each field and method
|
||||
for methods, write all the stuff get_trait_method
|
||||
needs to know*/
|
||||
encode_struct_fields(ecx, ebml_w, struct_def);
|
||||
encode_struct_fields(ebml_w, struct_def);
|
||||
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, path, IIItemRef(item));
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, IIItemRef(item));
|
||||
|
||||
// Encode inherent implementations for this structure.
|
||||
encode_inherent_implementations(ecx, ebml_w, def_id);
|
||||
|
@ -1081,22 +1049,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_index(ebml_w, bkts, write_i64);
|
||||
ebml_w.end_tag();
|
||||
|
||||
// If this is a tuple- or enum-like struct, encode the type of the
|
||||
// constructor.
|
||||
if struct_def.fields.len() > 0 &&
|
||||
struct_def.fields[0].node.kind == ast::UnnamedField {
|
||||
let ctor_id = match struct_def.ctor_id {
|
||||
Some(ctor_id) => ctor_id,
|
||||
None => ecx.tcx.sess.bug("struct def didn't have ctor id"),
|
||||
};
|
||||
|
||||
encode_info_for_struct_ctor(ecx,
|
||||
ebml_w,
|
||||
path,
|
||||
item.ident,
|
||||
ctor_id,
|
||||
index,
|
||||
def_id.node);
|
||||
// If this is a tuple-like struct, encode the type of the constructor.
|
||||
match struct_def.ctor_id {
|
||||
Some(ctor_id) => {
|
||||
encode_info_for_struct_ctor(ecx, ebml_w, item.ident,
|
||||
ctor_id, index, def_id.node);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
ItemImpl(_, ref opt_trait, ty, ref ast_methods) => {
|
||||
|
@ -1110,14 +1069,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_def_id(ebml_w, def_id);
|
||||
encode_family(ebml_w, 'i');
|
||||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_attributes(ebml_w, item.attrs);
|
||||
match ty.node {
|
||||
ast::TyPath(ref path, ref bounds, _) if path.segments
|
||||
.len() == 1 => {
|
||||
assert!(bounds.is_none());
|
||||
encode_impl_type_basename(ecx, ebml_w,
|
||||
ast_util::path_to_ident(path));
|
||||
encode_impl_type_basename(ebml_w, ast_util::path_to_ident(path));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -1134,14 +1092,9 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
let impl_vtables = ty::lookup_impl_vtables(tcx, def_id);
|
||||
encode_impl_vtables(ebml_w, ecx, &impl_vtables);
|
||||
}
|
||||
let elt = ast_map::impl_pretty_name(opt_trait, ty);
|
||||
encode_path(ecx, ebml_w, path, elt);
|
||||
encode_path(ebml_w, path.clone());
|
||||
ebml_w.end_tag();
|
||||
|
||||
// >:-<
|
||||
let mut impl_path = vec::append(~[], path);
|
||||
impl_path.push(elt);
|
||||
|
||||
// Iterate down the methods, emitting them. We rely on the
|
||||
// assumption that all of the actually implemented methods
|
||||
// appear first in the impl structure, in the same order they do
|
||||
|
@ -1162,7 +1115,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_info_for_method(ecx,
|
||||
ebml_w,
|
||||
*m,
|
||||
impl_path,
|
||||
path.clone(),
|
||||
false,
|
||||
item.id,
|
||||
ast_method)
|
||||
|
@ -1178,10 +1131,9 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
encode_ty_type_param_defs(ebml_w, ecx,
|
||||
trait_def.generics.type_param_defs(),
|
||||
tag_items_data_item_ty_param_bounds);
|
||||
encode_region_param_defs(ebml_w, ecx,
|
||||
trait_def.generics.region_param_defs());
|
||||
encode_region_param_defs(ebml_w, trait_def.generics.region_param_defs());
|
||||
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
|
||||
encode_name(ecx, ebml_w, item.ident);
|
||||
encode_name(ebml_w, item.ident);
|
||||
encode_attributes(ebml_w, item.attrs);
|
||||
encode_visibility(ebml_w, vis);
|
||||
for &method_def_id in ty::trait_method_def_ids(tcx, def_id).iter() {
|
||||
|
@ -1193,7 +1145,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
ebml_w.wr_str(def_to_str(method_def_id));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
encode_path(ecx, ebml_w, path, ast_map::PathName(item.ident));
|
||||
encode_path(ebml_w, path.clone());
|
||||
// FIXME(#8559): This should use the tcx's supertrait cache instead of
|
||||
// reading the AST's list, because the former has already filtered out
|
||||
// the builtin-kinds-as-supertraits. See corresponding fixme in decoder.
|
||||
|
@ -1228,9 +1180,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
|
||||
encode_parent_item(ebml_w, def_id);
|
||||
|
||||
let mut trait_path = vec::append(~[], path);
|
||||
trait_path.push(ast_map::PathName(item.ident));
|
||||
encode_path(ecx, ebml_w, trait_path, ast_map::PathName(method_ty.ident));
|
||||
let elem = ast_map::PathName(method_ty.ident.name);
|
||||
encode_path(ebml_w, path.clone().chain(Some(elem).move_iter()));
|
||||
|
||||
match method_ty.explicit_self {
|
||||
SelfStatic => {
|
||||
|
@ -1266,8 +1217,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
|||
}
|
||||
encode_method_sort(ebml_w, 'p');
|
||||
(ecx.encode_inlined_item)(
|
||||
ecx, ebml_w, path,
|
||||
IIMethodRef(def_id, true, m));
|
||||
ecx, ebml_w, IIMethodRef(def_id, true, m));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1287,7 +1237,7 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
|
|||
ebml_w: &mut writer::Encoder,
|
||||
nitem: &ForeignItem,
|
||||
index: @RefCell<~[entry<i64>]>,
|
||||
path: &ast_map::Path,
|
||||
path: PathElems,
|
||||
abi: AbiSet) {
|
||||
{
|
||||
let mut index = index.borrow_mut();
|
||||
|
@ -1298,22 +1248,20 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
|
|||
}
|
||||
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(nitem.id));
|
||||
match nitem.node {
|
||||
ForeignItemFn(..) => {
|
||||
encode_def_id(ebml_w, local_def(nitem.id));
|
||||
encode_family(ebml_w, purity_fn_family(ImpureFn));
|
||||
encode_bounds_and_type(ebml_w, ecx,
|
||||
&lookup_item_type(ecx.tcx,local_def(nitem.id)));
|
||||
encode_name(ecx, ebml_w, nitem.ident);
|
||||
encode_name(ebml_w, nitem.ident);
|
||||
if abi.is_intrinsic() {
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, *path, IIForeignRef(nitem));
|
||||
(ecx.encode_inlined_item)(ecx, ebml_w, IIForeignRef(nitem));
|
||||
} else {
|
||||
encode_symbol(ecx, ebml_w, nitem.id);
|
||||
}
|
||||
encode_path(ecx, ebml_w, *path, ast_map::PathName(nitem.ident));
|
||||
}
|
||||
ForeignItemStatic(_, mutbl) => {
|
||||
encode_def_id(ebml_w, local_def(nitem.id));
|
||||
if mutbl {
|
||||
encode_family(ebml_w, 'b');
|
||||
} else {
|
||||
|
@ -1321,66 +1269,51 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext,
|
|||
}
|
||||
encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, nitem.id));
|
||||
encode_symbol(ecx, ebml_w, nitem.id);
|
||||
encode_name(ecx, ebml_w, nitem.ident);
|
||||
encode_path(ecx, ebml_w, *path, ast_map::PathName(nitem.ident));
|
||||
encode_name(ebml_w, nitem.ident);
|
||||
}
|
||||
}
|
||||
encode_path(ebml_w, path);
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn my_visit_expr(_e: &Expr) { }
|
||||
|
||||
fn my_visit_item(i: &Item,
|
||||
items: ast_map::Map,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
ecx_ptr: *int,
|
||||
index: @RefCell<~[entry<i64>]>) {
|
||||
match items.get(i.id) {
|
||||
ast_map::NodeItem(_, pt) => {
|
||||
let mut ebml_w = unsafe {
|
||||
ebml_w.unsafe_clone()
|
||||
};
|
||||
// See above
|
||||
let ecx : &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
|
||||
encode_info_for_item(ecx, &mut ebml_w, i, index, *pt, i.vis);
|
||||
}
|
||||
_ => fail!("bad item")
|
||||
}
|
||||
let mut ebml_w = unsafe { ebml_w.unsafe_clone() };
|
||||
// See above
|
||||
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
|
||||
ecx.tcx.map.with_path(i.id, |path| {
|
||||
encode_info_for_item(ecx, &mut ebml_w, i, index, path, i.vis);
|
||||
});
|
||||
}
|
||||
|
||||
fn my_visit_foreign_item(ni: &ForeignItem,
|
||||
items: ast_map::Map,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
ecx_ptr:*int,
|
||||
index: @RefCell<~[entry<i64>]>) {
|
||||
match items.get(ni.id) {
|
||||
ast_map::NodeForeignItem(_, abi, _, pt) => {
|
||||
let string = token::get_ident(ni.ident.name);
|
||||
debug!("writing foreign item {}::{}",
|
||||
ast_map::path_to_str(*pt, token::get_ident_interner()),
|
||||
string.get());
|
||||
// See above
|
||||
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
|
||||
debug!("writing foreign item {}::{}",
|
||||
ecx.tcx.map.path_to_str(ni.id),
|
||||
token::get_ident(ni.ident));
|
||||
|
||||
let mut ebml_w = unsafe {
|
||||
ebml_w.unsafe_clone()
|
||||
};
|
||||
// See above
|
||||
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
|
||||
encode_info_for_foreign_item(ecx,
|
||||
&mut ebml_w,
|
||||
ni,
|
||||
index,
|
||||
pt,
|
||||
abi);
|
||||
}
|
||||
// case for separate item and foreign-item tables
|
||||
_ => fail!("bad foreign item")
|
||||
}
|
||||
let mut ebml_w = unsafe {
|
||||
ebml_w.unsafe_clone()
|
||||
};
|
||||
let abis = ecx.tcx.map.get_foreign_abis(ni.id);
|
||||
ecx.tcx.map.with_path(ni.id, |path| {
|
||||
encode_info_for_foreign_item(ecx, &mut ebml_w,
|
||||
ni, index,
|
||||
path, abis);
|
||||
});
|
||||
}
|
||||
|
||||
struct EncodeVisitor<'a,'b> {
|
||||
ebml_w_for_visit_item: &'a mut writer::Encoder<'b>,
|
||||
ecx_ptr:*int,
|
||||
items: ast_map::Map,
|
||||
index: @RefCell<~[entry<i64>]>,
|
||||
}
|
||||
|
||||
|
@ -1392,7 +1325,6 @@ impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> {
|
|||
fn visit_item(&mut self, i: &Item, _: ()) {
|
||||
visit::walk_item(self, i, ());
|
||||
my_visit_item(i,
|
||||
self.items,
|
||||
self.ebml_w_for_visit_item,
|
||||
self.ecx_ptr,
|
||||
self.index);
|
||||
|
@ -1400,7 +1332,6 @@ impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> {
|
|||
fn visit_foreign_item(&mut self, ni: &ForeignItem, _: ()) {
|
||||
visit::walk_foreign_item(self, ni, ());
|
||||
my_visit_foreign_item(ni,
|
||||
self.items,
|
||||
self.ebml_w_for_visit_item,
|
||||
self.ecx_ptr,
|
||||
self.index);
|
||||
|
@ -1424,17 +1355,15 @@ fn encode_info_for_items(ecx: &EncodeContext,
|
|||
ebml_w,
|
||||
&krate.module,
|
||||
CRATE_NODE_ID,
|
||||
[],
|
||||
ast_map::Values([].iter()).chain(None),
|
||||
syntax::parse::token::special_idents::invalid,
|
||||
Public);
|
||||
let items = ecx.tcx.items;
|
||||
|
||||
// See comment in `encode_side_tables_for_ii` in astencode
|
||||
let ecx_ptr : *int = unsafe { cast::transmute(ecx) };
|
||||
let ecx_ptr: *int = unsafe { cast::transmute(ecx) };
|
||||
{
|
||||
let mut visitor = EncodeVisitor {
|
||||
index: index,
|
||||
items: items,
|
||||
ecx_ptr: ecx_ptr,
|
||||
ebml_w_for_visit_item: &mut *ebml_w,
|
||||
};
|
||||
|
@ -1581,20 +1510,17 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
|
|||
attrs
|
||||
}
|
||||
|
||||
fn encode_crate_deps(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
cstore: &cstore::CStore) {
|
||||
fn get_ordered_deps(ecx: &EncodeContext, cstore: &cstore::CStore)
|
||||
-> ~[decoder::CrateDep] {
|
||||
type numdep = decoder::CrateDep;
|
||||
|
||||
fn encode_crate_deps(ebml_w: &mut writer::Encoder, cstore: &cstore::CStore) {
|
||||
fn get_ordered_deps(cstore: &cstore::CStore) -> ~[decoder::CrateDep] {
|
||||
// Pull the cnums and name,vers,hash out of cstore
|
||||
let mut deps = ~[];
|
||||
cstore.iter_crate_data(|key, val| {
|
||||
let dep = decoder::CrateDep {cnum: key,
|
||||
name: ecx.tcx.sess.ident_of(val.name),
|
||||
vers: decoder::get_crate_vers(val.data()),
|
||||
hash: decoder::get_crate_hash(val.data())};
|
||||
let dep = decoder::CrateDep {
|
||||
cnum: key,
|
||||
name: token::str_to_ident(val.name),
|
||||
vers: decoder::get_crate_vers(val.data()),
|
||||
hash: decoder::get_crate_hash(val.data())
|
||||
};
|
||||
deps.push(dep);
|
||||
});
|
||||
|
||||
|
@ -1616,9 +1542,9 @@ fn encode_crate_deps(ecx: &EncodeContext,
|
|||
// FIXME (#2166): This is not nearly enough to support correct versioning
|
||||
// but is enough to get transitive crate dependencies working.
|
||||
ebml_w.start_tag(tag_crate_deps);
|
||||
let r = get_ordered_deps(ecx, cstore);
|
||||
let r = get_ordered_deps(cstore);
|
||||
for dep in r.iter() {
|
||||
encode_crate_dep(ecx, ebml_w, (*dep).clone());
|
||||
encode_crate_dep(ebml_w, (*dep).clone());
|
||||
}
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -1801,19 +1727,18 @@ fn encode_misc_info(ecx: &EncodeContext,
|
|||
}
|
||||
|
||||
// Encode reexports for the root module.
|
||||
encode_reexports(ecx, ebml_w, 0, []);
|
||||
encode_reexports(ecx, ebml_w, 0, ast_map::Values([].iter()).chain(None));
|
||||
|
||||
ebml_w.end_tag();
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn encode_crate_dep(ecx: &EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
fn encode_crate_dep(ebml_w: &mut writer::Encoder,
|
||||
dep: decoder::CrateDep) {
|
||||
ebml_w.start_tag(tag_crate_dep);
|
||||
ebml_w.start_tag(tag_crate_dep_name);
|
||||
let s = ecx.tcx.sess.str_of(dep.name);
|
||||
ebml_w.writer.write(s.as_bytes());
|
||||
let s = token::get_ident(dep.name);
|
||||
ebml_w.writer.write(s.get().as_bytes());
|
||||
ebml_w.end_tag();
|
||||
ebml_w.start_tag(tag_crate_dep_vers);
|
||||
ebml_w.writer.write(dep.vers.as_bytes());
|
||||
|
@ -1900,7 +1825,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
|
|||
ecx.stats.attr_bytes.set(ebml_w.writer.tell().unwrap() - i);
|
||||
|
||||
i = ebml_w.writer.tell().unwrap();
|
||||
encode_crate_deps(&ecx, &mut ebml_w, ecx.cstore);
|
||||
encode_crate_deps(&mut ebml_w, ecx.cstore);
|
||||
ecx.stats.dep_bytes.set(ebml_w.writer.tell().unwrap() - i);
|
||||
|
||||
// Encode the language items.
|
||||
|
|
|
@ -28,7 +28,6 @@ use std::c_str::ToCStr;
|
|||
use std::cast;
|
||||
use std::cmp;
|
||||
use std::io;
|
||||
use std::option;
|
||||
use std::os::consts::{macos, freebsd, linux, android, win32};
|
||||
use std::str;
|
||||
use std::vec;
|
||||
|
@ -377,16 +376,10 @@ pub fn read_meta_section_name(os: Os) -> &'static str {
|
|||
}
|
||||
|
||||
// A diagnostic function for dumping crate metadata to an output stream
|
||||
pub fn list_file_metadata(intr: @IdentInterner,
|
||||
os: Os,
|
||||
path: &Path,
|
||||
pub fn list_file_metadata(os: Os, path: &Path,
|
||||
out: &mut io::Writer) -> io::IoResult<()> {
|
||||
match get_metadata_section(os, path) {
|
||||
option::Some(bytes) => decoder::list_crate_metadata(intr,
|
||||
bytes.as_slice(),
|
||||
out),
|
||||
option::None => {
|
||||
write!(out, "could not find metadata in {}.\n", path.display())
|
||||
}
|
||||
Some(bytes) => decoder::list_crate_metadata(bytes.as_slice(), out),
|
||||
None => write!(out, "could not find metadata in {}.\n", path.display())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ use syntax::abi;
|
|||
use syntax::ast;
|
||||
use syntax::ast::*;
|
||||
use syntax::opt_vec;
|
||||
use syntax::parse::token;
|
||||
|
||||
// Compact string representation for ty::t values. API ty_str &
|
||||
// parse_from_str. Extra parameters are for converting to/from def_ids in the
|
||||
|
@ -96,10 +97,9 @@ pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
|
|||
}
|
||||
|
||||
fn parse_ident_(st: &mut PState, is_last: |char| -> bool) -> ast::Ident {
|
||||
let tcx = st.tcx;
|
||||
scan(st, is_last, |bytes| {
|
||||
tcx.sess.ident_of(str::from_utf8(bytes).unwrap())
|
||||
})
|
||||
token::str_to_ident(str::from_utf8(bytes).unwrap())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_state_from_data<'a>(data: &'a [u8], crate_num: ast::CrateNum,
|
||||
|
@ -212,7 +212,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion {
|
|||
}
|
||||
'[' => {
|
||||
let def = parse_def(st, RegionParameter, |x,y| conv(x,y));
|
||||
let ident = st.tcx.sess.ident_of(parse_str(st, ']'));
|
||||
let ident = token::str_to_ident(parse_str(st, ']'));
|
||||
ty::BrNamed(def, ident)
|
||||
}
|
||||
'f' => {
|
||||
|
@ -240,7 +240,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
|
|||
assert_eq!(next(st), '|');
|
||||
let index = parse_uint(st);
|
||||
assert_eq!(next(st), '|');
|
||||
let nm = st.tcx.sess.ident_of(parse_str(st, ']'));
|
||||
let nm = token::str_to_ident(parse_str(st, ']'));
|
||||
ty::ReEarlyBound(node_id, index, nm)
|
||||
}
|
||||
'f' => {
|
||||
|
|
|
@ -26,6 +26,7 @@ use syntax::abi::AbiSet;
|
|||
use syntax::ast;
|
||||
use syntax::ast::*;
|
||||
use syntax::diagnostic::SpanHandler;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::*;
|
||||
|
||||
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
|
||||
|
@ -177,7 +178,7 @@ fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
|
|||
mywrite!(w, "B[{}|{}|{}]",
|
||||
node_id,
|
||||
index,
|
||||
cx.tcx.sess.str_of(ident));
|
||||
token::get_ident(ident));
|
||||
}
|
||||
ty::ReFree(ref fr) => {
|
||||
mywrite!(w, "f[{}|", fr.scope_id);
|
||||
|
@ -208,7 +209,7 @@ fn enc_bound_region(w: &mut MemWriter, cx: @ctxt, br: ty::BoundRegion) {
|
|||
ty::BrNamed(d, s) => {
|
||||
mywrite!(w, "[{}|{}]",
|
||||
(cx.ds)(d),
|
||||
cx.tcx.sess.str_of(s));
|
||||
token::get_ident(s));
|
||||
}
|
||||
ty::BrFresh(id) => {
|
||||
mywrite!(w, "f{}|", id);
|
||||
|
@ -420,7 +421,7 @@ fn enc_bounds(w: &mut MemWriter, cx: @ctxt, bs: &ty::ParamBounds) {
|
|||
}
|
||||
|
||||
pub fn enc_type_param_def(w: &mut MemWriter, cx: @ctxt, v: &ty::TypeParameterDef) {
|
||||
mywrite!(w, "{}:{}|", cx.tcx.sess.str_of(v.ident), (cx.ds)(v.def_id));
|
||||
mywrite!(w, "{}:{}|", token::get_ident(v.ident), (cx.ds)(v.def_id));
|
||||
enc_bounds(w, cx, v.bounds);
|
||||
enc_opt(w, v.default, |w, t| enc_ty(w, cx, t));
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ use util::ppaux::ty_to_str;
|
|||
|
||||
use syntax::{ast, ast_map, ast_util, codemap, fold};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::diagnostic::SpanHandler;
|
||||
use syntax::fold::Folder;
|
||||
use syntax::parse::token;
|
||||
use syntax;
|
||||
|
@ -80,17 +79,15 @@ trait tr_intern {
|
|||
|
||||
pub fn encode_inlined_item(ecx: &e::EncodeContext,
|
||||
ebml_w: &mut writer::Encoder,
|
||||
path: &[ast_map::PathElem],
|
||||
ii: e::InlinedItemRef,
|
||||
maps: Maps) {
|
||||
let ident = match ii {
|
||||
e::IIItemRef(i) => i.ident,
|
||||
e::IIForeignRef(i) => i.ident,
|
||||
e::IIMethodRef(_, _, m) => m.ident,
|
||||
let id = match ii {
|
||||
e::IIItemRef(i) => i.id,
|
||||
e::IIForeignRef(i) => i.id,
|
||||
e::IIMethodRef(_, _, m) => m.id,
|
||||
};
|
||||
debug!("> Encoding inlined item: {}::{} ({})",
|
||||
ast_map::path_to_str(path, token::get_ident_interner()),
|
||||
ecx.tcx.sess.str_of(ident),
|
||||
debug!("> Encoding inlined item: {} ({})",
|
||||
ecx.tcx.map.path_to_str(id),
|
||||
ebml_w.writer.tell());
|
||||
|
||||
let ii = simplify_ast(ii);
|
||||
|
@ -102,9 +99,8 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
|
|||
encode_side_tables_for_ii(ecx, maps, ebml_w, &ii);
|
||||
ebml_w.end_tag();
|
||||
|
||||
debug!("< Encoded inlined fn: {}::{} ({})",
|
||||
ast_map::path_to_str(path, token::get_ident_interner()),
|
||||
ecx.tcx.sess.str_of(ident),
|
||||
debug!("< Encoded inlined fn: {} ({})",
|
||||
ecx.tcx.map.path_to_str(id),
|
||||
ebml_w.writer.tell());
|
||||
}
|
||||
|
||||
|
@ -118,19 +114,25 @@ pub fn encode_exported_macro(ebml_w: &mut writer::Encoder, i: &ast::Item) {
|
|||
pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
|
||||
tcx: ty::ctxt,
|
||||
maps: Maps,
|
||||
path: &[ast_map::PathElem],
|
||||
path: ~[ast_map::PathElem],
|
||||
par_doc: ebml::Doc)
|
||||
-> Option<ast::InlinedItem> {
|
||||
-> Result<ast::InlinedItem, ~[ast_map::PathElem]> {
|
||||
let dcx = @DecodeContext {
|
||||
cdata: cdata,
|
||||
tcx: tcx,
|
||||
maps: maps
|
||||
};
|
||||
match par_doc.opt_child(c::tag_ast) {
|
||||
None => None,
|
||||
None => Err(path),
|
||||
Some(ast_doc) => {
|
||||
let mut path_as_str = None;
|
||||
debug!("> Decoding inlined fn: {}::?",
|
||||
ast_map::path_to_str(path, token::get_ident_interner()));
|
||||
{
|
||||
// Do an Option dance to use the path after it is moved below.
|
||||
let s = ast_map::path_to_str(ast_map::Values(path.iter()));
|
||||
path_as_str = Some(s);
|
||||
path_as_str.as_ref().map(|x| x.as_slice())
|
||||
});
|
||||
let mut ast_dsr = reader::Decoder(ast_doc);
|
||||
let from_id_range = Decodable::decode(&mut ast_dsr);
|
||||
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
|
||||
|
@ -140,30 +142,26 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
|
|||
to_id_range: to_id_range
|
||||
};
|
||||
let raw_ii = decode_ast(ast_doc);
|
||||
let ii = renumber_and_map_ast(xcx,
|
||||
tcx.sess.diagnostic(),
|
||||
dcx.tcx.items,
|
||||
path.to_owned(),
|
||||
raw_ii);
|
||||
let ii = renumber_and_map_ast(xcx, &dcx.tcx.map, path, raw_ii);
|
||||
let ident = match ii {
|
||||
ast::IIItem(i) => i.ident,
|
||||
ast::IIForeign(i) => i.ident,
|
||||
ast::IIMethod(_, _, m) => m.ident,
|
||||
};
|
||||
debug!("Fn named: {}", tcx.sess.str_of(ident));
|
||||
debug!("Fn named: {}", token::get_ident(ident));
|
||||
debug!("< Decoded inlined fn: {}::{}",
|
||||
ast_map::path_to_str(path, token::get_ident_interner()),
|
||||
tcx.sess.str_of(ident));
|
||||
path_as_str.unwrap(),
|
||||
token::get_ident(ident));
|
||||
region::resolve_inlined_item(tcx.sess, &tcx.region_maps, &ii);
|
||||
decode_side_tables(xcx, ast_doc);
|
||||
match ii {
|
||||
ast::IIItem(i) => {
|
||||
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
|
||||
syntax::print::pprust::item_to_str(i, tcx.sess.intr()));
|
||||
syntax::print::pprust::item_to_str(i));
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
Some(ii)
|
||||
Ok(ii)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -381,7 +379,12 @@ struct AstRenumberer {
|
|||
|
||||
impl ast_map::FoldOps for AstRenumberer {
|
||||
fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
|
||||
self.xcx.tr_id(id)
|
||||
if id == ast::DUMMY_NODE_ID {
|
||||
// Used by ast_map to map the NodeInlinedParent.
|
||||
self.xcx.dcx.tcx.sess.next_node_id()
|
||||
} else {
|
||||
self.xcx.tr_id(id)
|
||||
}
|
||||
}
|
||||
fn new_span(&self, span: Span) -> Span {
|
||||
self.xcx.tr_span(span)
|
||||
|
@ -389,11 +392,10 @@ impl ast_map::FoldOps for AstRenumberer {
|
|||
}
|
||||
|
||||
fn renumber_and_map_ast(xcx: @ExtendedDecodeContext,
|
||||
diag: @SpanHandler,
|
||||
map: ast_map::Map,
|
||||
path: ast_map::Path,
|
||||
map: &ast_map::Map,
|
||||
path: ~[ast_map::PathElem],
|
||||
ii: ast::InlinedItem) -> ast::InlinedItem {
|
||||
ast_map::map_decoded_item(diag, map, path, AstRenumberer { xcx: xcx }, |fld| {
|
||||
ast_map::map_decoded_item(map, path, AstRenumberer { xcx: xcx }, |fld| {
|
||||
match ii {
|
||||
ast::IIItem(i) => {
|
||||
ast::IIItem(fld.fold_item(i).expect_one("expected one item"))
|
||||
|
@ -1508,10 +1510,7 @@ fn test_simplification() {
|
|||
).unwrap());
|
||||
match (item_out, item_exp) {
|
||||
(ast::IIItem(item_out), ast::IIItem(item_exp)) => {
|
||||
assert!(pprust::item_to_str(item_out,
|
||||
token::get_ident_interner())
|
||||
== pprust::item_to_str(item_exp,
|
||||
token::get_ident_interner()));
|
||||
assert!(pprust::item_to_str(item_out) == pprust::item_to_str(item_exp));
|
||||
}
|
||||
_ => fail!()
|
||||
}
|
||||
|
|
|
@ -23,10 +23,8 @@ use middle::borrowck::*;
|
|||
use middle::moves;
|
||||
use middle::ty;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token;
|
||||
use syntax::visit::Visitor;
|
||||
use syntax::visit;
|
||||
use util::ppaux::Repr;
|
||||
|
@ -336,8 +334,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
old_loan.span,
|
||||
format!("{}; {}", borrow_summary, rule_summary));
|
||||
|
||||
let old_loan_span = ast_map::node_span(self.tcx().items,
|
||||
old_loan.kill_scope);
|
||||
let old_loan_span = self.tcx().map.span(old_loan.kill_scope);
|
||||
self.bccx.span_end_note(old_loan_span,
|
||||
"previous borrow ends here");
|
||||
|
||||
|
@ -759,9 +756,7 @@ impl<'a> CheckLoanCtxt<'a> {
|
|||
mut move_path: @LoanPath)
|
||||
-> MoveError {
|
||||
debug!("analyze_move_out_from(expr_id={:?}, move_path={})",
|
||||
ast_map::node_id_to_str(self.tcx().items,
|
||||
expr_id,
|
||||
token::get_ident_interner()),
|
||||
self.tcx().map.node_to_str(expr_id),
|
||||
move_path.repr(self.tcx()));
|
||||
|
||||
// We must check every element of a move path. See
|
||||
|
|
|
@ -185,7 +185,7 @@ fn gather_loans_in_expr(this: &mut GatherLoanCtxt,
|
|||
let tcx = bccx.tcx;
|
||||
|
||||
debug!("gather_loans_in_expr(expr={:?}/{})",
|
||||
ex.id, pprust::expr_to_str(ex, tcx.sess.intr()));
|
||||
ex.id, pprust::expr_to_str(ex));
|
||||
|
||||
this.id_range.add(ex.id);
|
||||
|
||||
|
|
|
@ -552,7 +552,7 @@ impl BorrowckCtxt {
|
|||
move_data::Declared => {}
|
||||
|
||||
move_data::MoveExpr => {
|
||||
let (expr_ty, expr_span) = match self.tcx.items.find(move.id) {
|
||||
let (expr_ty, expr_span) = match self.tcx.map.find(move.id) {
|
||||
Some(ast_map::NodeExpr(expr)) => {
|
||||
(ty::expr_ty_adjusted(self.tcx, expr), expr.span)
|
||||
}
|
||||
|
@ -570,8 +570,7 @@ impl BorrowckCtxt {
|
|||
|
||||
move_data::MovePat => {
|
||||
let pat_ty = ty::node_id_to_type(self.tcx, move.id);
|
||||
self.tcx.sess.span_note(
|
||||
ast_map::node_span(self.tcx.items, move.id),
|
||||
self.tcx.sess.span_note(self.tcx.map.span(move.id),
|
||||
format!("`{}` moved here because it has type `{}`, \
|
||||
which is moved by default (use `ref` to override)",
|
||||
self.loan_path_to_str(moved_lp),
|
||||
|
@ -579,7 +578,7 @@ impl BorrowckCtxt {
|
|||
}
|
||||
|
||||
move_data::Captured => {
|
||||
let (expr_ty, expr_span) = match self.tcx.items.find(move.id) {
|
||||
let (expr_ty, expr_span) = match self.tcx.map.find(move.id) {
|
||||
Some(ast_map::NodeExpr(expr)) => {
|
||||
(ty::expr_ty_adjusted(self.tcx, expr), expr.span)
|
||||
}
|
||||
|
@ -793,10 +792,9 @@ impl BorrowckCtxt {
|
|||
LpExtend(lp_base, _, LpInterior(mc::InteriorField(fname))) => {
|
||||
self.append_autoderefd_loan_path_to_str(lp_base, out);
|
||||
match fname {
|
||||
mc::NamedField(ref fname) => {
|
||||
let string = token::get_ident(*fname);
|
||||
mc::NamedField(fname) => {
|
||||
out.push_char('.');
|
||||
out.push_str(string.get());
|
||||
out.push_str(token::get_name(fname).get());
|
||||
}
|
||||
mc::PositionalField(idx) => {
|
||||
out.push_char('#'); // invent a notation here
|
||||
|
@ -892,10 +890,7 @@ impl Repr for LoanPath {
|
|||
fn repr(&self, tcx: ty::ctxt) -> ~str {
|
||||
match self {
|
||||
&LpVar(id) => {
|
||||
format!("$({})",
|
||||
ast_map::node_id_to_str(tcx.items,
|
||||
id,
|
||||
token::get_ident_interner()))
|
||||
format!("$({})", tcx.map.node_to_str(id))
|
||||
}
|
||||
|
||||
&LpExtend(lp, _, LpDeref(_)) => {
|
||||
|
|
|
@ -22,7 +22,6 @@ use syntax::visit;
|
|||
|
||||
struct CheckCrateVisitor {
|
||||
sess: Session,
|
||||
ast_map: ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
method_map: typeck::method_map,
|
||||
tcx: ty::ctxt,
|
||||
|
@ -30,7 +29,7 @@ struct CheckCrateVisitor {
|
|||
|
||||
impl Visitor<bool> for CheckCrateVisitor {
|
||||
fn visit_item(&mut self, i: &Item, env: bool) {
|
||||
check_item(self, self.sess, self.ast_map, self.def_map, i, env);
|
||||
check_item(self, self.sess, self.def_map, i, env);
|
||||
}
|
||||
fn visit_pat(&mut self, p: &Pat, env: bool) {
|
||||
check_pat(self, p, env);
|
||||
|
@ -43,13 +42,11 @@ impl Visitor<bool> for CheckCrateVisitor {
|
|||
|
||||
pub fn check_crate(sess: Session,
|
||||
krate: &Crate,
|
||||
ast_map: ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
method_map: typeck::method_map,
|
||||
tcx: ty::ctxt) {
|
||||
let mut v = CheckCrateVisitor {
|
||||
sess: sess,
|
||||
ast_map: ast_map,
|
||||
def_map: def_map,
|
||||
method_map: method_map,
|
||||
tcx: tcx,
|
||||
|
@ -60,14 +57,13 @@ pub fn check_crate(sess: Session,
|
|||
|
||||
pub fn check_item(v: &mut CheckCrateVisitor,
|
||||
sess: Session,
|
||||
ast_map: ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
it: &Item,
|
||||
_is_const: bool) {
|
||||
match it.node {
|
||||
ItemStatic(_, _, ex) => {
|
||||
v.visit_expr(ex, true);
|
||||
check_item_recursion(sess, ast_map, def_map, it);
|
||||
check_item_recursion(sess, &v.tcx.map, def_map, it);
|
||||
}
|
||||
ItemEnum(ref enum_definition, _) => {
|
||||
for var in (*enum_definition).variants.iter() {
|
||||
|
@ -209,17 +205,17 @@ pub fn check_expr(v: &mut CheckCrateVisitor,
|
|||
struct CheckItemRecursionVisitor<'a> {
|
||||
root_it: &'a Item,
|
||||
sess: Session,
|
||||
ast_map: ast_map::Map,
|
||||
ast_map: &'a ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
idstack: ~[NodeId]
|
||||
}
|
||||
|
||||
// Make sure a const item doesn't recursively refer to itself
|
||||
// FIXME: Should use the dependency graph when it's available (#1356)
|
||||
pub fn check_item_recursion(sess: Session,
|
||||
ast_map: ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
it: &Item) {
|
||||
pub fn check_item_recursion<'a>(sess: Session,
|
||||
ast_map: &'a ast_map::Map,
|
||||
def_map: resolve::DefMap,
|
||||
it: &'a Item) {
|
||||
|
||||
let mut visitor = CheckItemRecursionVisitor {
|
||||
root_it: it,
|
||||
|
@ -248,12 +244,7 @@ impl<'a> Visitor<()> for CheckItemRecursionVisitor<'a> {
|
|||
match def_map.get().find(&e.id) {
|
||||
Some(&DefStatic(def_id, _)) if
|
||||
ast_util::is_local(def_id) => {
|
||||
match self.ast_map.get(def_id.node) {
|
||||
ast_map::NodeItem(it, _) => {
|
||||
self.visit_item(it, ());
|
||||
}
|
||||
_ => fail!("const not bound to an item")
|
||||
}
|
||||
self.visit_item(self.ast_map.expect_item(def_id.node), ());
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ use std::vec;
|
|||
use syntax::ast::*;
|
||||
use syntax::ast_util::{unguarded_pat, walk_pat};
|
||||
use syntax::codemap::{DUMMY_SP, Span};
|
||||
use syntax::parse::token;
|
||||
use syntax::visit;
|
||||
use syntax::visit::{Visitor, FnKind};
|
||||
|
||||
|
@ -189,7 +190,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
|
|||
let variants = ty::enum_variants(cx.tcx, id);
|
||||
|
||||
match variants.iter().find(|v| v.id == vid) {
|
||||
Some(v) => Some(cx.tcx.sess.str_of(v.name)),
|
||||
Some(v) => Some(token::get_ident(v.name).get().to_str()),
|
||||
None => {
|
||||
fail!("check_exhaustive: bad variant in ctor")
|
||||
}
|
||||
|
|
|
@ -112,9 +112,9 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt,
|
|||
|
||||
if ast_util::is_local(enum_def) {
|
||||
{
|
||||
match tcx.items.find(enum_def.node) {
|
||||
match tcx.map.find(enum_def.node) {
|
||||
None => None,
|
||||
Some(ast_map::NodeItem(it, _)) => match it.node {
|
||||
Some(ast_map::NodeItem(it)) => match it.node {
|
||||
ItemEnum(ast::EnumDef { variants: ref variants }, _) => {
|
||||
variant_expr(*variants, variant_def.node)
|
||||
}
|
||||
|
@ -138,11 +138,9 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt,
|
|||
capture_map: @RefCell::new(HashMap::new())
|
||||
};
|
||||
let e = match csearch::maybe_get_item_ast(tcx, enum_def,
|
||||
|a, b, c, d| astencode::decode_inlined_item(a,
|
||||
b,
|
||||
|a, b, c, d| astencode::decode_inlined_item(a, b,
|
||||
maps,
|
||||
/*bad*/ c.clone(),
|
||||
d)) {
|
||||
c, d)) {
|
||||
csearch::found(ast::IIItem(item)) => match item.node {
|
||||
ItemEnum(ast::EnumDef { variants: ref variants }, _) => {
|
||||
variant_expr(*variants, variant_def.node)
|
||||
|
@ -164,9 +162,9 @@ pub fn lookup_const_by_id(tcx: ty::ctxt, def_id: ast::DefId)
|
|||
-> Option<@Expr> {
|
||||
if ast_util::is_local(def_id) {
|
||||
{
|
||||
match tcx.items.find(def_id.node) {
|
||||
match tcx.map.find(def_id.node) {
|
||||
None => None,
|
||||
Some(ast_map::NodeItem(it, _)) => match it.node {
|
||||
Some(ast_map::NodeItem(it)) => match it.node {
|
||||
ItemStatic(_, ast::MutImmutable, const_expr) => {
|
||||
Some(const_expr)
|
||||
}
|
||||
|
|
|
@ -350,8 +350,7 @@ impl<O:DataFlowOperator+Clone+'static> DataFlowContext<O> {
|
|||
|
||||
fn pretty_print_to(&self, wr: ~io::Writer,
|
||||
blk: &ast::Block) -> io::IoResult<()> {
|
||||
let mut ps = pprust::rust_printer_annotated(wr, self.tcx.sess.intr(),
|
||||
self as &pprust::PpAnn);
|
||||
let mut ps = pprust::rust_printer_annotated(wr, self);
|
||||
if_ok!(pprust::cbox(&mut ps, pprust::indent_unit));
|
||||
if_ok!(pprust::ibox(&mut ps, 0u));
|
||||
if_ok!(pprust::print_block(&mut ps, blk));
|
||||
|
|
|
@ -38,7 +38,7 @@ fn should_explore(tcx: ty::ctxt, def_id: ast::DefId) -> bool {
|
|||
return false;
|
||||
}
|
||||
|
||||
match tcx.items.find(def_id.node) {
|
||||
match tcx.map.find(def_id.node) {
|
||||
Some(ast_map::NodeItem(..))
|
||||
| Some(ast_map::NodeMethod(..))
|
||||
| Some(ast_map::NodeForeignItem(..))
|
||||
|
@ -135,7 +135,7 @@ impl MarkSymbolVisitor {
|
|||
}
|
||||
scanned.insert(id);
|
||||
|
||||
match self.tcx.items.find(id) {
|
||||
match self.tcx.map.find(id) {
|
||||
Some(ref node) => {
|
||||
self.live_symbols.insert(id);
|
||||
self.visit_node(node);
|
||||
|
@ -147,7 +147,7 @@ impl MarkSymbolVisitor {
|
|||
|
||||
fn visit_node(&mut self, node: &ast_map::Node) {
|
||||
match *node {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemFn(..)
|
||||
| ast::ItemTy(..)
|
||||
|
@ -159,13 +159,13 @@ impl MarkSymbolVisitor {
|
|||
_ => ()
|
||||
}
|
||||
}
|
||||
ast_map::NodeTraitMethod(trait_method, _, _) => {
|
||||
ast_map::NodeTraitMethod(trait_method) => {
|
||||
visit::walk_trait_method(self, trait_method, ());
|
||||
}
|
||||
ast_map::NodeMethod(method, _, _) => {
|
||||
ast_map::NodeMethod(method) => {
|
||||
visit::walk_block(self, method.body, ());
|
||||
}
|
||||
ast_map::NodeForeignItem(foreign_item, _, _, _) => {
|
||||
ast_map::NodeForeignItem(foreign_item) => {
|
||||
visit::walk_foreign_item(self, foreign_item, ());
|
||||
}
|
||||
_ => ()
|
||||
|
@ -359,11 +359,10 @@ impl DeadVisitor {
|
|||
}
|
||||
|
||||
fn warn_dead_code(&mut self, id: ast::NodeId,
|
||||
span: codemap::Span, ident: &ast::Ident) {
|
||||
let string = token::get_ident(ident.name);
|
||||
span: codemap::Span, ident: ast::Ident) {
|
||||
self.tcx.sess.add_lint(DeadCode, id, span,
|
||||
format!("code is never used: `{}`",
|
||||
string.get()));
|
||||
token::get_ident(ident)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,14 +370,14 @@ impl Visitor<()> for DeadVisitor {
|
|||
fn visit_item(&mut self, item: &ast::Item, _: ()) {
|
||||
let ctor_id = get_struct_ctor_id(item);
|
||||
if !self.symbol_is_live(item.id, ctor_id) && should_warn(item) {
|
||||
self.warn_dead_code(item.id, item.span, &item.ident);
|
||||
self.warn_dead_code(item.id, item.span, item.ident);
|
||||
}
|
||||
visit::walk_item(self, item, ());
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, fi: &ast::ForeignItem, _: ()) {
|
||||
if !self.symbol_is_live(fi.id, None) {
|
||||
self.warn_dead_code(fi.id, fi.span, &fi.ident);
|
||||
self.warn_dead_code(fi.id, fi.span, fi.ident);
|
||||
}
|
||||
visit::walk_foreign_item(self, fi, ());
|
||||
}
|
||||
|
@ -391,7 +390,7 @@ impl Visitor<()> for DeadVisitor {
|
|||
visit::FkMethod(..) => {
|
||||
let ident = visit::name_of_fn(fk);
|
||||
if !self.symbol_is_live(id, None) {
|
||||
self.warn_dead_code(id, span, &ident);
|
||||
self.warn_dead_code(id, span, ident);
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
|
|
|
@ -11,18 +11,21 @@
|
|||
|
||||
use driver::session;
|
||||
use driver::session::Session;
|
||||
use syntax::ast::{Crate, NodeId, Item, ItemFn};
|
||||
use syntax::ast::{Crate, Name, NodeId, Item, ItemFn};
|
||||
use syntax::ast_map;
|
||||
use syntax::attr;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::special_idents;
|
||||
use syntax::parse::token;
|
||||
use syntax::visit;
|
||||
use syntax::visit::Visitor;
|
||||
|
||||
struct EntryContext {
|
||||
struct EntryContext<'a> {
|
||||
session: Session,
|
||||
|
||||
ast_map: ast_map::Map,
|
||||
ast_map: &'a ast_map::Map,
|
||||
|
||||
// The interned Name for "main".
|
||||
main_name: Name,
|
||||
|
||||
// The top-level function called 'main'
|
||||
main_fn: Option<(NodeId, Span)>,
|
||||
|
@ -38,13 +41,13 @@ struct EntryContext {
|
|||
non_main_fns: ~[(NodeId, Span)],
|
||||
}
|
||||
|
||||
impl Visitor<()> for EntryContext {
|
||||
impl<'a> Visitor<()> for EntryContext<'a> {
|
||||
fn visit_item(&mut self, item: &Item, _:()) {
|
||||
find_item(item, self);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_entry_point(session: Session, krate: &Crate, ast_map: ast_map::Map) {
|
||||
pub fn find_entry_point(session: Session, krate: &Crate, ast_map: &ast_map::Map) {
|
||||
if session.building_library.get() {
|
||||
// No need to find a main function
|
||||
return;
|
||||
|
@ -58,6 +61,7 @@ pub fn find_entry_point(session: Session, krate: &Crate, ast_map: ast_map::Map)
|
|||
|
||||
let mut ctxt = EntryContext {
|
||||
session: session,
|
||||
main_name: token::intern("main"),
|
||||
ast_map: ast_map,
|
||||
main_fn: None,
|
||||
attr_main_fn: None,
|
||||
|
@ -73,27 +77,22 @@ pub fn find_entry_point(session: Session, krate: &Crate, ast_map: ast_map::Map)
|
|||
fn find_item(item: &Item, ctxt: &mut EntryContext) {
|
||||
match item.node {
|
||||
ItemFn(..) => {
|
||||
if item.ident.name == special_idents::main.name {
|
||||
{
|
||||
match ctxt.ast_map.find(item.id) {
|
||||
Some(ast_map::NodeItem(_, path)) => {
|
||||
if path.len() == 0 {
|
||||
// This is a top-level function so can be 'main'
|
||||
if ctxt.main_fn.is_none() {
|
||||
ctxt.main_fn = Some((item.id, item.span));
|
||||
} else {
|
||||
ctxt.session.span_err(
|
||||
item.span,
|
||||
"multiple 'main' functions");
|
||||
}
|
||||
if item.ident.name == ctxt.main_name {
|
||||
ctxt.ast_map.with_path(item.id, |mut path| {
|
||||
if path.len() == 1 {
|
||||
// This is a top-level function so can be 'main'
|
||||
if ctxt.main_fn.is_none() {
|
||||
ctxt.main_fn = Some((item.id, item.span));
|
||||
} else {
|
||||
// This isn't main
|
||||
ctxt.non_main_fns.push((item.id, item.span));
|
||||
ctxt.session.span_err(
|
||||
item.span,
|
||||
"multiple 'main' functions");
|
||||
}
|
||||
} else {
|
||||
// This isn't main
|
||||
ctxt.non_main_fns.push((item.id, item.span));
|
||||
}
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if attr::contains_name(item.attrs, "main") {
|
||||
|
|
|
@ -261,7 +261,7 @@ fn check_fn(
|
|||
}
|
||||
|
||||
pub fn check_expr(cx: &mut Context, e: &Expr) {
|
||||
debug!("kind::check_expr({})", expr_to_str(e, cx.tcx.sess.intr()));
|
||||
debug!("kind::check_expr({})", expr_to_str(e));
|
||||
|
||||
// Handle any kind bounds on type parameters
|
||||
let type_parameter_id = match e.get_callee_id() {
|
||||
|
|
|
@ -1067,8 +1067,8 @@ fn check_unused_result(cx: &Context, s: &ast::Stmt) {
|
|||
ty::ty_struct(did, _) |
|
||||
ty::ty_enum(did, _) => {
|
||||
if ast_util::is_local(did) {
|
||||
match cx.tcx.items.get(did.node) {
|
||||
ast_map::NodeItem(it, _) => {
|
||||
match cx.tcx.map.get(did.node) {
|
||||
ast_map::NodeItem(it) => {
|
||||
if attr::contains_name(it.attrs, "must_use") {
|
||||
cx.span_lint(UnusedMustUse, s.span,
|
||||
"unused result which must be used");
|
||||
|
@ -1095,23 +1095,22 @@ fn check_unused_result(cx: &Context, s: &ast::Stmt) {
|
|||
}
|
||||
|
||||
fn check_item_non_camel_case_types(cx: &Context, it: &ast::Item) {
|
||||
fn is_camel_case(cx: ty::ctxt, ident: ast::Ident) -> bool {
|
||||
let ident = cx.sess.str_of(ident);
|
||||
assert!(!ident.is_empty());
|
||||
let ident = ident.trim_chars(&'_');
|
||||
fn is_camel_case(ident: ast::Ident) -> bool {
|
||||
let ident = token::get_ident(ident);
|
||||
assert!(!ident.get().is_empty());
|
||||
let ident = ident.get().trim_chars(&'_');
|
||||
|
||||
// start with a non-lowercase letter rather than non-uppercase
|
||||
// ones (some scripts don't have a concept of upper/lowercase)
|
||||
!ident.char_at(0).is_lowercase() &&
|
||||
!ident.contains_char('_')
|
||||
!ident.char_at(0).is_lowercase() && !ident.contains_char('_')
|
||||
}
|
||||
|
||||
fn check_case(cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
|
||||
if !is_camel_case(cx.tcx, ident) {
|
||||
if !is_camel_case(ident) {
|
||||
cx.span_lint(
|
||||
NonCamelCaseTypes, span,
|
||||
format!("{} `{}` should have a camel case identifier",
|
||||
sort, cx.tcx.sess.str_of(ident)));
|
||||
sort, token::get_ident(ident)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1136,11 +1135,11 @@ fn check_item_non_uppercase_statics(cx: &Context, it: &ast::Item) {
|
|||
match it.node {
|
||||
// only check static constants
|
||||
ast::ItemStatic(_, ast::MutImmutable, _) => {
|
||||
let s = cx.tcx.sess.str_of(it.ident);
|
||||
let s = token::get_ident(it.ident);
|
||||
// check for lowercase letters rather than non-uppercase
|
||||
// ones (some scripts don't have a concept of
|
||||
// upper/lowercase)
|
||||
if s.chars().any(|c| c.is_lowercase()) {
|
||||
if s.get().chars().any(|c| c.is_lowercase()) {
|
||||
cx.span_lint(NonUppercaseStatics, it.span,
|
||||
"static constant should have an uppercase identifier");
|
||||
}
|
||||
|
@ -1156,8 +1155,8 @@ fn check_pat_non_uppercase_statics(cx: &Context, p: &ast::Pat) {
|
|||
(&ast::PatIdent(_, ref path, _), Some(&ast::DefStatic(_, false))) => {
|
||||
// last identifier alone is right choice for this lint.
|
||||
let ident = path.segments.last().unwrap().identifier;
|
||||
let s = cx.tcx.sess.str_of(ident);
|
||||
if s.chars().any(|c| c.is_lowercase()) {
|
||||
let s = token::get_ident(ident);
|
||||
if s.get().chars().any(|c| c.is_lowercase()) {
|
||||
cx.span_lint(NonUppercasePatternStatics, path.span,
|
||||
"static constant in pattern should be all caps");
|
||||
}
|
||||
|
@ -1216,7 +1215,7 @@ fn check_unused_mut_pat(cx: &Context, p: &ast::Pat) {
|
|||
// `let mut _a = 1;` doesn't need a warning.
|
||||
let initial_underscore = match path.segments {
|
||||
[ast::PathSegment { identifier: id, .. }] => {
|
||||
cx.tcx.sess.str_of(id).starts_with("_")
|
||||
token::get_ident(id).get().starts_with("_")
|
||||
}
|
||||
_ => {
|
||||
cx.tcx.sess.span_bug(p.span,
|
||||
|
@ -1433,24 +1432,18 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
|
|||
|
||||
let stability = if ast_util::is_local(id) {
|
||||
// this crate
|
||||
match cx.tcx.items.find(id.node) {
|
||||
Some(ast_node) => {
|
||||
let s = ast_node.with_attrs(|attrs| {
|
||||
attrs.map(|a| {
|
||||
attr::find_stability(a.iter().map(|a| a.meta()))
|
||||
})
|
||||
});
|
||||
match s {
|
||||
Some(s) => s,
|
||||
let s = cx.tcx.map.with_attrs(id.node, |attrs| {
|
||||
attrs.map(|a| {
|
||||
attr::find_stability(a.iter().map(|a| a.meta()))
|
||||
})
|
||||
});
|
||||
match s {
|
||||
Some(s) => s,
|
||||
|
||||
// no possibility of having attributes
|
||||
// (e.g. it's a local variable), so just
|
||||
// ignore it.
|
||||
None => return
|
||||
}
|
||||
}
|
||||
_ => cx.tcx.sess.span_bug(e.span,
|
||||
format!("handle_def: {:?} not found", id))
|
||||
// no possibility of having attributes
|
||||
// (e.g. it's a local variable), so just
|
||||
// ignore it.
|
||||
None => return
|
||||
}
|
||||
} else {
|
||||
// cross-crate
|
||||
|
@ -1685,11 +1678,7 @@ pub fn check_crate(tcx: ty::ctxt,
|
|||
for (id, v) in lints.get().iter() {
|
||||
for &(lint, span, ref msg) in v.iter() {
|
||||
tcx.sess.span_bug(span, format!("unprocessed lint {:?} at {}: {}",
|
||||
lint,
|
||||
ast_map::node_id_to_str(tcx.items,
|
||||
*id,
|
||||
token::get_ident_interner()),
|
||||
*msg))
|
||||
lint, tcx.map.node_to_str(*id), *msg))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -337,8 +337,7 @@ impl IrMaps {
|
|||
let var_kinds = self.var_kinds.borrow();
|
||||
match var_kinds.get()[var.get()] {
|
||||
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
|
||||
let string = token::get_ident(nm.name);
|
||||
string.get().to_str()
|
||||
token::get_ident(nm).get().to_str()
|
||||
},
|
||||
ImplicitRet => ~"<implicit-ret>"
|
||||
}
|
||||
|
@ -929,8 +928,7 @@ impl Liveness {
|
|||
// effectively a return---this only occurs in `for` loops,
|
||||
// where the body is really a closure.
|
||||
|
||||
debug!("compute: using id for block, {}", block_to_str(body,
|
||||
self.tcx.sess.intr()));
|
||||
debug!("compute: using id for block, {}", block_to_str(body));
|
||||
|
||||
let entry_ln: LiveNode =
|
||||
self.with_loop_nodes(body.id, self.s.exit_ln, self.s.exit_ln,
|
||||
|
@ -1034,8 +1032,7 @@ impl Liveness {
|
|||
|
||||
pub fn propagate_through_expr(&self, expr: @Expr, succ: LiveNode)
|
||||
-> LiveNode {
|
||||
debug!("propagate_through_expr: {}",
|
||||
expr_to_str(expr, self.tcx.sess.intr()));
|
||||
debug!("propagate_through_expr: {}", expr_to_str(expr));
|
||||
|
||||
match expr.node {
|
||||
// Interesting cases with control flow or which gen/kill
|
||||
|
@ -1049,8 +1046,7 @@ impl Liveness {
|
|||
}
|
||||
|
||||
ExprFnBlock(_, blk) | ExprProc(_, blk) => {
|
||||
debug!("{} is an ExprFnBlock or ExprProc",
|
||||
expr_to_str(expr, self.tcx.sess.intr()));
|
||||
debug!("{} is an ExprFnBlock or ExprProc", expr_to_str(expr));
|
||||
|
||||
/*
|
||||
The next-node for a break is the successor of the entire
|
||||
|
@ -1412,7 +1408,7 @@ impl Liveness {
|
|||
first_merge = false;
|
||||
}
|
||||
debug!("propagate_through_loop: using id for loop body {} {}",
|
||||
expr.id, block_to_str(body, self.tcx.sess.intr()));
|
||||
expr.id, block_to_str(body));
|
||||
|
||||
let cond_ln = self.propagate_through_opt_expr(cond, ln);
|
||||
let body_ln = self.with_loop_nodes(expr.id, succ, ln, || {
|
||||
|
|
|
@ -433,9 +433,7 @@ impl<TYPER:Typer> MemCategorizationContext<TYPER> {
|
|||
}
|
||||
|
||||
pub fn cat_expr_unadjusted(&mut self, expr: &ast::Expr) -> McResult<cmt> {
|
||||
debug!("cat_expr: id={} expr={}",
|
||||
expr.id,
|
||||
expr.repr(self.tcx()));
|
||||
debug!("cat_expr: id={} expr={}", expr.id, expr.repr(self.tcx()));
|
||||
|
||||
let expr_ty = if_ok!(self.expr_ty(expr));
|
||||
match expr.node {
|
||||
|
@ -1004,7 +1002,7 @@ impl<TYPER:Typer> MemCategorizationContext<TYPER> {
|
|||
|
||||
let tcx = self.tcx();
|
||||
debug!("cat_pattern: id={} pat={} cmt={}",
|
||||
pat.id, pprust::pat_to_str(pat, tcx.sess.intr()),
|
||||
pat.id, pprust::pat_to_str(pat),
|
||||
cmt.repr(tcx));
|
||||
|
||||
op(self, cmt, pat);
|
||||
|
@ -1374,8 +1372,7 @@ impl Repr for InteriorKind {
|
|||
fn repr(&self, _tcx: ty::ctxt) -> ~str {
|
||||
match *self {
|
||||
InteriorField(NamedField(fld)) => {
|
||||
let string = token::get_ident(fld);
|
||||
string.get().to_owned()
|
||||
token::get_name(fld).get().to_str()
|
||||
}
|
||||
InteriorField(PositionalField(i)) => format!("\\#{:?}", i),
|
||||
InteriorElement(_) => ~"[]",
|
||||
|
|
|
@ -357,7 +357,7 @@ enum PrivacyResult {
|
|||
impl<'a> PrivacyVisitor<'a> {
|
||||
// used when debugging
|
||||
fn nodestr(&self, id: ast::NodeId) -> ~str {
|
||||
ast_map::node_id_to_str(self.tcx.items, id, token::get_ident_interner())
|
||||
self.tcx.map.node_to_str(id)
|
||||
}
|
||||
|
||||
// Determines whether the given definition is public from the point of view
|
||||
|
@ -417,7 +417,7 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
let mut closest_private_id = did.node;
|
||||
loop {
|
||||
debug!("privacy - examining {}", self.nodestr(closest_private_id));
|
||||
let vis = match self.tcx.items.find(closest_private_id) {
|
||||
let vis = match self.tcx.map.find(closest_private_id) {
|
||||
// If this item is a method, then we know for sure that it's an
|
||||
// actual method and not a static method. The reason for this is
|
||||
// that these cases are only hit in the ExprMethodCall
|
||||
|
@ -434,22 +434,25 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
// invocation.
|
||||
// FIXME(#10573) is this the right behavior? Why not consider
|
||||
// where the method was defined?
|
||||
Some(ast_map::NodeMethod(ref m, imp, _)) => {
|
||||
Some(ast_map::NodeMethod(ref m)) => {
|
||||
let imp = self.tcx.map.get_parent_did(closest_private_id);
|
||||
match ty::impl_trait_ref(self.tcx, imp) {
|
||||
Some(..) => return Allowable,
|
||||
_ if m.vis == ast::Public => return Allowable,
|
||||
_ => m.vis
|
||||
}
|
||||
}
|
||||
Some(ast_map::NodeTraitMethod(..)) => {
|
||||
Some(ast_map::NodeTraitMethod(_)) => {
|
||||
return Allowable;
|
||||
}
|
||||
|
||||
// This is not a method call, extract the visibility as one
|
||||
// would normally look at it
|
||||
Some(ast_map::NodeItem(it, _)) => it.vis,
|
||||
Some(ast_map::NodeForeignItem(_, _, v, _)) => v,
|
||||
Some(ast_map::NodeVariant(ref v, _, _)) => {
|
||||
Some(ast_map::NodeItem(it)) => it.vis,
|
||||
Some(ast_map::NodeForeignItem(_)) => {
|
||||
self.tcx.map.get_foreign_vis(closest_private_id)
|
||||
}
|
||||
Some(ast_map::NodeVariant(ref v)) => {
|
||||
// sadly enum variants still inherit visibility, so only
|
||||
// break out of this is explicitly private
|
||||
if v.node.vis == ast::Private { break }
|
||||
|
@ -523,17 +526,16 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
self.tcx.sess.span_err(span, format!("{} is inaccessible",
|
||||
msg));
|
||||
}
|
||||
match self.tcx.items.find(id) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match self.tcx.map.find(id) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
let desc = match item.node {
|
||||
ast::ItemMod(..) => "module",
|
||||
ast::ItemTrait(..) => "trait",
|
||||
_ => return false,
|
||||
};
|
||||
let string = token::get_ident(item.ident.name);
|
||||
let msg = format!("{} `{}` is private",
|
||||
desc,
|
||||
string.get());
|
||||
token::get_ident(item.ident));
|
||||
self.tcx.sess.span_note(span, msg);
|
||||
}
|
||||
Some(..) | None => {}
|
||||
|
@ -550,10 +552,15 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
enum_id: Option<ast::DefId>) {
|
||||
let fields = ty::lookup_struct_fields(self.tcx, id);
|
||||
let struct_vis = if is_local(id) {
|
||||
match self.tcx.items.get(id.node) {
|
||||
ast_map::NodeItem(ref it, _) => it.vis,
|
||||
ast_map::NodeVariant(ref v, ref it, _) => {
|
||||
if v.node.vis == ast::Inherited {it.vis} else {v.node.vis}
|
||||
match self.tcx.map.get(id.node) {
|
||||
ast_map::NodeItem(ref it) => it.vis,
|
||||
ast_map::NodeVariant(ref v) => {
|
||||
if v.node.vis == ast::Inherited {
|
||||
let parent = self.tcx.map.get_parent(id.node);
|
||||
self.tcx.map.expect_item(parent).vis
|
||||
} else {
|
||||
v.node.vis
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.tcx.sess.span_bug(span,
|
||||
|
@ -590,10 +597,9 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
if struct_vis != ast::Public && field.vis == ast::Public { break }
|
||||
if !is_local(field.id) ||
|
||||
!self.private_accessible(field.id.node) {
|
||||
let string = token::get_ident(ident.name);
|
||||
self.tcx.sess.span_err(span,
|
||||
format!("field `{}` is private",
|
||||
string.get()))
|
||||
token::get_ident(ident)))
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -601,17 +607,16 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
|
||||
// Given the ID of a method, checks to ensure it's in scope.
|
||||
fn check_static_method(&mut self, span: Span, method_id: ast::DefId,
|
||||
name: &ast::Ident) {
|
||||
name: ast::Ident) {
|
||||
// If the method is a default method, we need to use the def_id of
|
||||
// the default implementation.
|
||||
let method_id = ty::method(self.tcx, method_id).provided_source
|
||||
.unwrap_or(method_id);
|
||||
|
||||
let string = token::get_ident(name.name);
|
||||
self.ensure_public(span,
|
||||
method_id,
|
||||
None,
|
||||
format!("method `{}`", string.get()));
|
||||
format!("method `{}`", token::get_ident(name)));
|
||||
}
|
||||
|
||||
// Checks that a path is in scope.
|
||||
|
@ -627,14 +632,12 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
let name = token::get_ident(path.segments
|
||||
.last()
|
||||
.unwrap()
|
||||
.identifier
|
||||
.name);
|
||||
.identifier);
|
||||
self.ensure_public(span,
|
||||
def,
|
||||
Some(origdid),
|
||||
format!("{} `{}`",
|
||||
tyname,
|
||||
name.get()));
|
||||
tyname, name));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -659,7 +662,7 @@ impl<'a> PrivacyVisitor<'a> {
|
|||
ident: ast::Ident) {
|
||||
match *origin {
|
||||
method_static(method_id) => {
|
||||
self.check_static_method(span, method_id, &ident)
|
||||
self.check_static_method(span, method_id, ident)
|
||||
}
|
||||
// Trait methods are always all public. The only controlling factor
|
||||
// is whether the trait itself is accessible or not.
|
||||
|
|
|
@ -25,7 +25,6 @@ use syntax::ast;
|
|||
use syntax::ast_map;
|
||||
use syntax::ast_util::{def_id_of_def, is_local};
|
||||
use syntax::attr;
|
||||
use syntax::parse::token;
|
||||
use syntax::visit::Visitor;
|
||||
use syntax::visit;
|
||||
|
||||
|
@ -66,8 +65,8 @@ fn method_might_be_inlined(tcx: ty::ctxt, method: &ast::Method,
|
|||
}
|
||||
if is_local(impl_src) {
|
||||
{
|
||||
match tcx.items.find(impl_src.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match tcx.map.find(impl_src.node) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
item_might_be_inlined(item)
|
||||
}
|
||||
Some(..) | None => {
|
||||
|
@ -212,47 +211,33 @@ impl ReachableContext {
|
|||
}
|
||||
|
||||
let node_id = def_id.node;
|
||||
match tcx.items.find(node_id) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match tcx.map.find(node_id) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
match item.node {
|
||||
ast::ItemFn(..) => item_might_be_inlined(item),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
Some(ast_map::NodeTraitMethod(trait_method, _, _)) => {
|
||||
Some(ast_map::NodeTraitMethod(trait_method)) => {
|
||||
match *trait_method {
|
||||
ast::Required(_) => false,
|
||||
ast::Provided(_) => true,
|
||||
}
|
||||
}
|
||||
Some(ast_map::NodeMethod(method, impl_did, _)) => {
|
||||
Some(ast_map::NodeMethod(method)) => {
|
||||
if generics_require_inlining(&method.generics) ||
|
||||
attributes_specify_inlining(method.attrs) {
|
||||
true
|
||||
} else {
|
||||
let impl_did = tcx.map.get_parent_did(node_id);
|
||||
// Check the impl. If the generics on the self type of the
|
||||
// impl require inlining, this method does too.
|
||||
assert!(impl_did.krate == ast::LOCAL_CRATE);
|
||||
match tcx.items.find(impl_did.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match item.node {
|
||||
ast::ItemImpl(ref generics, _, _, _) => {
|
||||
generics_require_inlining(generics)
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
tcx.sess.span_bug(method.span,
|
||||
"method is not inside an \
|
||||
impl?!")
|
||||
}
|
||||
None => {
|
||||
tcx.sess.span_bug(method.span,
|
||||
"the impl that this method is \
|
||||
supposedly inside of doesn't \
|
||||
exist in the AST map?!")
|
||||
match tcx.map.expect_item(impl_did.node).node {
|
||||
ast::ItemImpl(ref generics, _, _, _) => {
|
||||
generics_require_inlining(generics)
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -292,7 +277,7 @@ impl ReachableContext {
|
|||
};
|
||||
|
||||
scanned.insert(search_item);
|
||||
match self.tcx.items.find(search_item) {
|
||||
match self.tcx.map.find(search_item) {
|
||||
Some(ref item) => self.propagate_node(item, search_item,
|
||||
&mut visitor),
|
||||
None if search_item == ast::CRATE_NODE_ID => {}
|
||||
|
@ -315,7 +300,7 @@ impl ReachableContext {
|
|||
// but all other rust-only interfaces can be private (they will not
|
||||
// participate in linkage after this product is produced)
|
||||
match *node {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemFn(_, ast::ExternFn, _, _, _) => {
|
||||
let mut reachable_symbols =
|
||||
|
@ -337,7 +322,7 @@ impl ReachableContext {
|
|||
}
|
||||
|
||||
match *node {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemFn(_, _, _, _, search_block) => {
|
||||
if item_might_be_inlined(item) {
|
||||
|
@ -371,7 +356,7 @@ impl ReachableContext {
|
|||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeTraitMethod(trait_method, _, _) => {
|
||||
ast_map::NodeTraitMethod(trait_method) => {
|
||||
match *trait_method {
|
||||
ast::Required(..) => {
|
||||
// Keep going, nothing to get exported
|
||||
|
@ -381,23 +366,20 @@ impl ReachableContext {
|
|||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeMethod(method, did, _) => {
|
||||
ast_map::NodeMethod(method) => {
|
||||
let did = self.tcx.map.get_parent_did(search_item);
|
||||
if method_might_be_inlined(self.tcx, method, did) {
|
||||
visit::walk_block(visitor, method.body, ())
|
||||
}
|
||||
}
|
||||
// Nothing to recurse on for these
|
||||
ast_map::NodeForeignItem(..) |
|
||||
ast_map::NodeVariant(..) |
|
||||
ast_map::NodeStructCtor(..) => {}
|
||||
ast_map::NodeForeignItem(_) |
|
||||
ast_map::NodeVariant(_) |
|
||||
ast_map::NodeStructCtor(_) => {}
|
||||
_ => {
|
||||
let ident_interner = token::get_ident_interner();
|
||||
let desc = ast_map::node_id_to_str(self.tcx.items,
|
||||
search_item,
|
||||
ident_interner);
|
||||
self.tcx.sess.bug(format!("found unexpected thingy in \
|
||||
worklist: {}",
|
||||
desc))
|
||||
self.tcx.map.node_to_str(search_item)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ use syntax::ast::*;
|
|||
use syntax::ast;
|
||||
use syntax::ast_util::{def_id_of_def, local_def, mtwt_resolve};
|
||||
use syntax::ast_util::{path_to_ident, walk_pat, trait_method_to_ty_method};
|
||||
use syntax::parse::token::{IdentInterner, special_idents};
|
||||
use syntax::parse::token::special_idents;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::path_to_str;
|
||||
use syntax::codemap::{Span, DUMMY_SP, Pos};
|
||||
|
@ -813,7 +813,6 @@ fn Resolver(session: Session,
|
|||
last_private: HashMap::new(),
|
||||
|
||||
emit_errors: true,
|
||||
intr: session.intr()
|
||||
};
|
||||
|
||||
this
|
||||
|
@ -824,8 +823,6 @@ struct Resolver {
|
|||
session: @Session,
|
||||
lang_items: @LanguageItems,
|
||||
|
||||
intr: @IdentInterner,
|
||||
|
||||
graph_root: @NameBindings,
|
||||
|
||||
method_map: @RefCell<HashMap<Name, HashSet<DefId>>>,
|
||||
|
@ -1066,14 +1063,14 @@ impl Resolver {
|
|||
self.resolve_error(sp,
|
||||
format!("duplicate definition of {} `{}`",
|
||||
namespace_error_to_str(duplicate_type),
|
||||
self.session.str_of(name)));
|
||||
token::get_ident(name)));
|
||||
{
|
||||
let r = child.span_for_namespace(ns);
|
||||
for sp in r.iter() {
|
||||
self.session.span_note(*sp,
|
||||
format!("first definition of {} `{}` here",
|
||||
namespace_error_to_str(duplicate_type),
|
||||
self.session.str_of(name)));
|
||||
token::get_ident(name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1679,7 +1676,7 @@ impl Resolver {
|
|||
debug!("(building reduced graph for \
|
||||
external crate) ... adding \
|
||||
trait method '{}'",
|
||||
self.session.str_of(method_name));
|
||||
token::get_ident(method_name));
|
||||
|
||||
// Add it to the trait info if not static.
|
||||
if explicit_self != SelfStatic {
|
||||
|
@ -1776,7 +1773,7 @@ impl Resolver {
|
|||
self.handle_external_def(def,
|
||||
visibility,
|
||||
child_name_bindings,
|
||||
self.session.str_of(ident),
|
||||
token::get_ident(ident).get(),
|
||||
ident,
|
||||
new_parent);
|
||||
}
|
||||
|
@ -1795,8 +1792,7 @@ impl Resolver {
|
|||
debug!("(building reduced graph for \
|
||||
external crate) processing \
|
||||
static methods for type name {}",
|
||||
self.session.str_of(
|
||||
final_ident));
|
||||
token::get_ident(final_ident));
|
||||
|
||||
let (child_name_bindings, new_parent) =
|
||||
self.add_child(
|
||||
|
@ -1847,7 +1843,7 @@ impl Resolver {
|
|||
debug!("(building reduced graph for \
|
||||
external crate) creating \
|
||||
static method '{}'",
|
||||
self.session.str_of(ident));
|
||||
token::get_ident(ident));
|
||||
|
||||
let (method_name_bindings, _) =
|
||||
self.add_child(ident,
|
||||
|
@ -1893,9 +1889,8 @@ impl Resolver {
|
|||
csearch::each_child_of_item(self.session.cstore,
|
||||
def_id,
|
||||
|def_like, child_ident, visibility| {
|
||||
let child_ident_string = token::get_ident(child_ident.name);
|
||||
debug!("(populating external module) ... found ident: {}",
|
||||
child_ident_string.get());
|
||||
token::get_ident(child_ident));
|
||||
self.build_reduced_graph_for_external_crate_def(module,
|
||||
def_like,
|
||||
child_ident,
|
||||
|
@ -1955,7 +1950,7 @@ impl Resolver {
|
|||
debug!("(building import directive) building import \
|
||||
directive: {}::{}",
|
||||
self.idents_to_str(directive.module_path),
|
||||
self.session.str_of(target));
|
||||
token::get_ident(target));
|
||||
|
||||
let mut import_resolutions = module_.import_resolutions
|
||||
.borrow_mut();
|
||||
|
@ -2100,7 +2095,7 @@ impl Resolver {
|
|||
} else {
|
||||
result.push_str("::")
|
||||
}
|
||||
result.push_str(self.session.str_of(*ident));
|
||||
result.push_str(token::get_ident(*ident).get());
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
@ -2117,8 +2112,8 @@ impl Resolver {
|
|||
subclass: ImportDirectiveSubclass)
|
||||
-> ~str {
|
||||
match subclass {
|
||||
SingleImport(_target, source) => {
|
||||
self.session.str_of(source).to_str()
|
||||
SingleImport(_, source) => {
|
||||
token::get_ident(source).get().to_str()
|
||||
}
|
||||
GlobImport => ~"*"
|
||||
}
|
||||
|
@ -2255,9 +2250,9 @@ impl Resolver {
|
|||
-> ResolveResult<()> {
|
||||
debug!("(resolving single import) resolving `{}` = `{}::{}` from \
|
||||
`{}` id {}, last private {:?}",
|
||||
self.session.str_of(target),
|
||||
token::get_ident(target),
|
||||
self.module_to_str(containing_module),
|
||||
self.session.str_of(source),
|
||||
token::get_ident(source),
|
||||
self.module_to_str(module_),
|
||||
directive.id,
|
||||
lp);
|
||||
|
@ -2443,7 +2438,7 @@ impl Resolver {
|
|||
import_resolution.type_target.get().is_none() {
|
||||
let msg = format!("unresolved import: there is no \
|
||||
`{}` in `{}`",
|
||||
self.session.str_of(source),
|
||||
token::get_ident(source),
|
||||
self.module_to_str(containing_module));
|
||||
self.resolve_error(directive.span, msg);
|
||||
return Failed;
|
||||
|
@ -2630,7 +2625,7 @@ impl Resolver {
|
|||
|
||||
debug!("(resolving glob import) writing resolution `{}` in `{}` \
|
||||
to `{}`",
|
||||
token::get_ident(name).get().to_str(),
|
||||
token::get_name(name).get().to_str(),
|
||||
self.module_to_str(containing_module),
|
||||
self.module_to_str(module_));
|
||||
|
||||
|
@ -2674,12 +2669,12 @@ impl Resolver {
|
|||
TypeNS,
|
||||
name_search_type) {
|
||||
Failed => {
|
||||
let segment_name = self.session.str_of(name);
|
||||
let segment_name = token::get_ident(name);
|
||||
let module_name = self.module_to_str(search_module);
|
||||
if "???" == module_name {
|
||||
let span = Span {
|
||||
lo: span.lo,
|
||||
hi: span.lo + Pos::from_uint(segment_name.len()),
|
||||
hi: span.lo + Pos::from_uint(segment_name.get().len()),
|
||||
expn_info: span.expn_info,
|
||||
};
|
||||
self.resolve_error(span,
|
||||
|
@ -2696,7 +2691,7 @@ impl Resolver {
|
|||
Indeterminate => {
|
||||
debug!("(resolving module path for import) module \
|
||||
resolution is indeterminate: {}",
|
||||
self.session.str_of(name));
|
||||
token::get_ident(name));
|
||||
return Indeterminate;
|
||||
}
|
||||
Success((target, used_proxy)) => {
|
||||
|
@ -2707,12 +2702,8 @@ impl Resolver {
|
|||
match type_def.module_def {
|
||||
None => {
|
||||
// Not a module.
|
||||
self.resolve_error(span,
|
||||
format!("not a \
|
||||
module `{}`",
|
||||
self.session.
|
||||
str_of(
|
||||
name)));
|
||||
self.resolve_error(span, format!("not a module `{}`",
|
||||
token::get_ident(name)));
|
||||
return Failed;
|
||||
}
|
||||
Some(module_def) => {
|
||||
|
@ -2755,8 +2746,7 @@ impl Resolver {
|
|||
// There are no type bindings at all.
|
||||
self.resolve_error(span,
|
||||
format!("not a module `{}`",
|
||||
self.session.str_of(
|
||||
name)));
|
||||
token::get_ident(name)));
|
||||
return Failed;
|
||||
}
|
||||
}
|
||||
|
@ -2883,7 +2873,7 @@ impl Resolver {
|
|||
-> ResolveResult<(Target, bool)> {
|
||||
debug!("(resolving item in lexical scope) resolving `{}` in \
|
||||
namespace {:?} in `{}`",
|
||||
self.session.str_of(name),
|
||||
token::get_ident(name),
|
||||
namespace,
|
||||
self.module_to_str(module_));
|
||||
|
||||
|
@ -3112,7 +3102,7 @@ impl Resolver {
|
|||
// top of the crate otherwise.
|
||||
let mut containing_module;
|
||||
let mut i;
|
||||
let first_module_path_string = token::get_ident(module_path[0].name);
|
||||
let first_module_path_string = token::get_ident(module_path[0]);
|
||||
if "self" == first_module_path_string.get() {
|
||||
containing_module =
|
||||
self.get_nearest_normal_module_parent_or_self(module_);
|
||||
|
@ -3127,7 +3117,7 @@ impl Resolver {
|
|||
|
||||
// Now loop through all the `super`s we find.
|
||||
while i < module_path.len() {
|
||||
let string = token::get_ident(module_path[i].name);
|
||||
let string = token::get_ident(module_path[i]);
|
||||
if "super" != string.get() {
|
||||
break
|
||||
}
|
||||
|
@ -3161,7 +3151,7 @@ impl Resolver {
|
|||
name_search_type: NameSearchType)
|
||||
-> ResolveResult<(Target, bool)> {
|
||||
debug!("(resolving name in module) resolving `{}` in `{}`",
|
||||
self.session.str_of(name),
|
||||
token::get_ident(name),
|
||||
self.module_to_str(module_));
|
||||
|
||||
// First, check the direct children of the module.
|
||||
|
@ -3236,7 +3226,7 @@ impl Resolver {
|
|||
|
||||
// We're out of luck.
|
||||
debug!("(resolving name in module) failed to resolve `{}`",
|
||||
self.session.str_of(name));
|
||||
token::get_ident(name));
|
||||
return Failed;
|
||||
}
|
||||
|
||||
|
@ -3368,11 +3358,11 @@ impl Resolver {
|
|||
ns: Namespace) {
|
||||
match namebindings.def_for_namespace(ns) {
|
||||
Some(d) => {
|
||||
let name = token::get_name(name);
|
||||
debug!("(computing exports) YES: export '{}' => {:?}",
|
||||
token::get_ident(name).get().to_str(),
|
||||
def_id_of_def(d));
|
||||
name, def_id_of_def(d));
|
||||
exports2.push(Export2 {
|
||||
name: token::get_ident(name).get().to_str(),
|
||||
name: name.get().to_str(),
|
||||
def_id: def_id_of_def(d)
|
||||
});
|
||||
}
|
||||
|
@ -3395,7 +3385,7 @@ impl Resolver {
|
|||
match importresolution.target_for_namespace(ns) {
|
||||
Some(target) => {
|
||||
debug!("(computing exports) maybe export '{}'",
|
||||
token::get_ident(*name).get().to_str());
|
||||
token::get_name(*name));
|
||||
self.add_exports_of_namebindings(exports2,
|
||||
*name,
|
||||
target.bindings,
|
||||
|
@ -3440,7 +3430,7 @@ impl Resolver {
|
|||
match children.get().find(&name.name) {
|
||||
None => {
|
||||
debug!("!!! (with scope) didn't find `{}` in `{}`",
|
||||
self.session.str_of(name),
|
||||
token::get_ident(name),
|
||||
self.module_to_str(orig_module));
|
||||
}
|
||||
Some(name_bindings) => {
|
||||
|
@ -3448,7 +3438,7 @@ impl Resolver {
|
|||
None => {
|
||||
debug!("!!! (with scope) didn't find module \
|
||||
for `{}` in `{}`",
|
||||
self.session.str_of(name),
|
||||
token::get_ident(name),
|
||||
self.module_to_str(orig_module));
|
||||
}
|
||||
Some(module_) => {
|
||||
|
@ -3618,7 +3608,7 @@ impl Resolver {
|
|||
|
||||
fn resolve_item(&mut self, item: &Item) {
|
||||
debug!("(resolving item) resolving {}",
|
||||
self.session.str_of(item.ident));
|
||||
token::get_ident(item.ident));
|
||||
|
||||
match item.node {
|
||||
|
||||
|
@ -3997,7 +3987,7 @@ impl Resolver {
|
|||
NamedField(ident, _) => {
|
||||
match ident_map.find(&ident) {
|
||||
Some(&prev_field) => {
|
||||
let ident_str = self.session.str_of(ident);
|
||||
let ident_str = token::get_ident(ident);
|
||||
self.resolve_error(field.span,
|
||||
format!("field `{}` is already declared", ident_str));
|
||||
self.session.span_note(prev_field.span,
|
||||
|
@ -4170,22 +4160,20 @@ impl Resolver {
|
|||
for (&key, &binding_0) in map_0.iter() {
|
||||
match map_i.find(&key) {
|
||||
None => {
|
||||
let string = token::get_ident(key);
|
||||
self.resolve_error(
|
||||
p.span,
|
||||
format!("variable `{}` from pattern \\#1 is \
|
||||
not bound in pattern \\#{}",
|
||||
string.get(),
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
}
|
||||
Some(binding_i) => {
|
||||
if binding_0.binding_mode != binding_i.binding_mode {
|
||||
let string = token::get_ident(key);
|
||||
self.resolve_error(
|
||||
binding_i.span,
|
||||
format!("variable `{}` is bound with different \
|
||||
mode in pattern \\#{} than in pattern \\#1",
|
||||
string.get(),
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
}
|
||||
}
|
||||
|
@ -4194,12 +4182,11 @@ impl Resolver {
|
|||
|
||||
for (&key, &binding) in map_i.iter() {
|
||||
if !map_0.contains_key(&key) {
|
||||
let string = token::get_ident(key);
|
||||
self.resolve_error(
|
||||
binding.span,
|
||||
format!("variable `{}` from pattern \\#{} is \
|
||||
not bound in pattern \\#1",
|
||||
string.get(),
|
||||
token::get_name(key),
|
||||
i + 1));
|
||||
}
|
||||
}
|
||||
|
@ -4312,9 +4299,9 @@ impl Resolver {
|
|||
Some(def) => {
|
||||
debug!("(resolving type) resolved `{}` to \
|
||||
type {:?}",
|
||||
self.session.str_of(path.segments
|
||||
.last().unwrap()
|
||||
.identifier),
|
||||
token::get_ident(path.segments
|
||||
.last().unwrap()
|
||||
.identifier),
|
||||
def);
|
||||
result_def = Some(def);
|
||||
}
|
||||
|
@ -4392,10 +4379,9 @@ impl Resolver {
|
|||
match self.resolve_bare_identifier_pattern(ident) {
|
||||
FoundStructOrEnumVariant(def, lp)
|
||||
if mode == RefutableMode => {
|
||||
let string = token::get_ident(renamed);
|
||||
debug!("(resolving pattern) resolving `{}` to \
|
||||
struct or enum variant",
|
||||
string.get());
|
||||
token::get_name(renamed));
|
||||
|
||||
self.enforce_default_binding_mode(
|
||||
pattern,
|
||||
|
@ -4404,19 +4390,17 @@ impl Resolver {
|
|||
self.record_def(pattern.id, (def, lp));
|
||||
}
|
||||
FoundStructOrEnumVariant(..) => {
|
||||
let string = token::get_ident(renamed);
|
||||
self.resolve_error(pattern.span,
|
||||
format!("declaration of `{}` \
|
||||
shadows an enum \
|
||||
variant or unit-like \
|
||||
struct in scope",
|
||||
string.get()));
|
||||
token::get_name(renamed)));
|
||||
}
|
||||
FoundConst(def, lp) if mode == RefutableMode => {
|
||||
let string = token::get_ident(renamed);
|
||||
debug!("(resolving pattern) resolving `{}` to \
|
||||
constant",
|
||||
string.get());
|
||||
token::get_name(renamed));
|
||||
|
||||
self.enforce_default_binding_mode(
|
||||
pattern,
|
||||
|
@ -4430,9 +4414,8 @@ impl Resolver {
|
|||
allowed here");
|
||||
}
|
||||
BareIdentifierPatternUnresolved => {
|
||||
let string = token::get_ident(renamed);
|
||||
debug!("(resolving pattern) binding `{}`",
|
||||
string.get());
|
||||
token::get_name(renamed));
|
||||
|
||||
let def = match mode {
|
||||
RefutableMode => {
|
||||
|
@ -4487,8 +4470,7 @@ impl Resolver {
|
|||
self.resolve_error(pattern.span,
|
||||
format!("identifier `{}` is bound more \
|
||||
than once in the same pattern",
|
||||
path_to_str(path, self.session
|
||||
.intr())));
|
||||
path_to_str(path)));
|
||||
}
|
||||
// Not bound in the same pattern: do nothing
|
||||
}
|
||||
|
@ -4535,7 +4517,7 @@ impl Resolver {
|
|||
self.resolve_error(
|
||||
path.span,
|
||||
format!("`{}` is not an enum variant or constant",
|
||||
self.session.str_of(
|
||||
token::get_ident(
|
||||
path.segments.last().unwrap().identifier)))
|
||||
}
|
||||
None => {
|
||||
|
@ -4562,22 +4544,18 @@ impl Resolver {
|
|||
self.record_def(pattern.id, def);
|
||||
}
|
||||
Some(_) => {
|
||||
self.resolve_error(
|
||||
path.span,
|
||||
self.resolve_error(path.span,
|
||||
format!("`{}` is not an enum variant, struct or const",
|
||||
self.session
|
||||
.str_of(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
token::get_ident(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
}
|
||||
None => {
|
||||
self.resolve_error(path.span,
|
||||
format!("unresolved enum variant, \
|
||||
struct or const `{}`",
|
||||
self.session
|
||||
.str_of(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
format!("unresolved enum variant, struct or const `{}`",
|
||||
token::get_ident(path.segments
|
||||
.last().unwrap()
|
||||
.identifier)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4641,7 +4619,7 @@ impl Resolver {
|
|||
Success((target, _)) => {
|
||||
debug!("(resolve bare identifier pattern) succeeded in \
|
||||
finding {} at {:?}",
|
||||
self.session.str_of(name),
|
||||
token::get_ident(name),
|
||||
target.bindings.value_def.get());
|
||||
match target.bindings.value_def.get() {
|
||||
None => {
|
||||
|
@ -4673,7 +4651,7 @@ impl Resolver {
|
|||
|
||||
Failed => {
|
||||
debug!("(resolve bare identifier pattern) failed to find {}",
|
||||
self.session.str_of(name));
|
||||
token::get_ident(name));
|
||||
return BareIdentifierPatternUnresolved;
|
||||
}
|
||||
}
|
||||
|
@ -4969,7 +4947,7 @@ impl Resolver {
|
|||
Some(DlDef(def)) => {
|
||||
debug!("(resolving path in local ribs) resolved `{}` to \
|
||||
local: {:?}",
|
||||
self.session.str_of(ident),
|
||||
token::get_ident(ident),
|
||||
def);
|
||||
return Some(def);
|
||||
}
|
||||
|
@ -4995,13 +4973,13 @@ impl Resolver {
|
|||
// found a module instead. Modules don't have defs.
|
||||
debug!("(resolving item path by identifier in lexical \
|
||||
scope) failed to resolve {} after success...",
|
||||
self.session.str_of(ident));
|
||||
token::get_ident(ident));
|
||||
return None;
|
||||
}
|
||||
Some(def) => {
|
||||
debug!("(resolving item path in lexical scope) \
|
||||
resolved `{}` to item",
|
||||
self.session.str_of(ident));
|
||||
token::get_ident(ident));
|
||||
// This lookup is "all public" because it only searched
|
||||
// for one identifier in the current module (couldn't
|
||||
// have passed through reexports or anything like that.
|
||||
|
@ -5014,7 +4992,7 @@ impl Resolver {
|
|||
}
|
||||
Failed => {
|
||||
debug!("(resolving item path by identifier in lexical scope) \
|
||||
failed to resolve {}", self.session.str_of(ident));
|
||||
failed to resolve {}", token::get_ident(ident));
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -5037,7 +5015,7 @@ impl Resolver {
|
|||
-> Option<~str> {
|
||||
let this = &mut *self;
|
||||
|
||||
let mut maybes: ~[~str] = ~[];
|
||||
let mut maybes: ~[token::InternedString] = ~[];
|
||||
let mut values: ~[uint] = ~[];
|
||||
|
||||
let mut j = {
|
||||
|
@ -5049,15 +5027,14 @@ impl Resolver {
|
|||
let value_ribs = this.value_ribs.borrow();
|
||||
let bindings = value_ribs.get()[j].bindings.borrow();
|
||||
for (&k, _) in bindings.get().iter() {
|
||||
let string = token::get_ident(k);
|
||||
maybes.push(string.get().to_str());
|
||||
maybes.push(token::get_name(k));
|
||||
values.push(uint::MAX);
|
||||
}
|
||||
}
|
||||
|
||||
let mut smallest = 0;
|
||||
for (i, other) in maybes.iter().enumerate() {
|
||||
values[i] = name.lev_distance(*other);
|
||||
values[i] = name.lev_distance(other.get());
|
||||
|
||||
if values[i] <= values[smallest] {
|
||||
smallest = i;
|
||||
|
@ -5068,9 +5045,9 @@ impl Resolver {
|
|||
values[smallest] != uint::MAX &&
|
||||
values[smallest] < name.len() + 2 &&
|
||||
values[smallest] <= max_distance &&
|
||||
name != maybes[smallest] {
|
||||
name != maybes[smallest].get() {
|
||||
|
||||
Some(maybes.swap_remove(smallest))
|
||||
Some(maybes[smallest].get().to_str())
|
||||
|
||||
} else {
|
||||
None
|
||||
|
@ -5214,11 +5191,8 @@ impl Resolver {
|
|||
match self.search_ribs(label_ribs.get(), label, expr.span) {
|
||||
None =>
|
||||
self.resolve_error(expr.span,
|
||||
format!("use of undeclared label \
|
||||
`{}`",
|
||||
token::get_ident(label)
|
||||
.get()
|
||||
.to_str())),
|
||||
format!("use of undeclared label `{}`",
|
||||
token::get_name(label))),
|
||||
Some(DlDef(def @ DefLabel(_))) => {
|
||||
// FIXME: is AllPublic correct?
|
||||
self.record_def(expr.id, (def, AllPublic))
|
||||
|
@ -5323,7 +5297,7 @@ impl Resolver {
|
|||
|
||||
fn search_for_traits_containing_method(&mut self, name: Ident) -> ~[DefId] {
|
||||
debug!("(searching for traits containing method) looking for '{}'",
|
||||
self.session.str_of(name));
|
||||
token::get_ident(name));
|
||||
|
||||
let mut found_traits = ~[];
|
||||
let mut search_module = self.current_module;
|
||||
|
@ -5403,7 +5377,7 @@ impl Resolver {
|
|||
debug!("(adding trait info) found trait {}:{} for method '{}'",
|
||||
trait_def_id.krate,
|
||||
trait_def_id.node,
|
||||
self.session.str_of(name));
|
||||
token::get_ident(name));
|
||||
found_traits.push(trait_def_id);
|
||||
}
|
||||
|
||||
|
@ -5538,12 +5512,12 @@ impl Resolver {
|
|||
self.populate_module_if_necessary(module_);
|
||||
let children = module_.children.borrow();
|
||||
for (&name, _) in children.get().iter() {
|
||||
debug!("* {}", token::get_ident(name).get().to_str());
|
||||
debug!("* {}", token::get_name(name));
|
||||
}
|
||||
|
||||
debug!("Import resolutions:");
|
||||
let import_resolutions = module_.import_resolutions.borrow();
|
||||
for (name, import_resolution) in import_resolutions.get().iter() {
|
||||
for (&name, import_resolution) in import_resolutions.get().iter() {
|
||||
let value_repr;
|
||||
match import_resolution.target_for_namespace(ValueNS) {
|
||||
None => { value_repr = ~""; }
|
||||
|
@ -5562,8 +5536,7 @@ impl Resolver {
|
|||
}
|
||||
}
|
||||
|
||||
debug!("* {}:{}{}", token::get_ident(*name).get().to_str(),
|
||||
value_repr, type_repr);
|
||||
debug!("* {}:{}{}", token::get_name(name), value_repr, type_repr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ use syntax::ast;
|
|||
use syntax::codemap::Span;
|
||||
use syntax::opt_vec::OptVec;
|
||||
use syntax::parse::token::special_idents;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::{lifetime_to_str};
|
||||
use syntax::visit;
|
||||
use syntax::visit::Visitor;
|
||||
|
@ -261,7 +262,7 @@ impl LifetimeContext {
|
|||
self.sess.span_err(
|
||||
lifetime_ref.span,
|
||||
format!("use of undeclared lifetime name `'{}`",
|
||||
self.sess.str_of(lifetime_ref.ident)));
|
||||
token::get_ident(lifetime_ref.ident)));
|
||||
}
|
||||
|
||||
fn check_lifetime_names(&self, lifetimes: &OptVec<ast::Lifetime>) {
|
||||
|
@ -274,7 +275,7 @@ impl LifetimeContext {
|
|||
self.sess.span_err(
|
||||
lifetime.span,
|
||||
format!("illegal lifetime parameter name: `{}`",
|
||||
self.sess.str_of(lifetime.ident)));
|
||||
token::get_ident(lifetime.ident)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -286,7 +287,7 @@ impl LifetimeContext {
|
|||
lifetime_j.span,
|
||||
format!("lifetime name `'{}` declared twice in \
|
||||
the same scope",
|
||||
self.sess.str_of(lifetime_j.ident)));
|
||||
token::get_ident(lifetime_j.ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -302,8 +303,7 @@ impl LifetimeContext {
|
|||
}
|
||||
|
||||
debug!("lifetime_ref={} id={} resolved to {:?}",
|
||||
lifetime_to_str(lifetime_ref,
|
||||
self.sess.intr()),
|
||||
lifetime_to_str(lifetime_ref),
|
||||
lifetime_ref.id,
|
||||
def);
|
||||
let mut named_region_map = self.named_region_map.borrow_mut();
|
||||
|
|
|
@ -75,10 +75,9 @@ use std::c_str::ToCStr;
|
|||
use std::cell::{Cell, RefCell};
|
||||
use std::hashmap::HashMap;
|
||||
use std::libc::c_uint;
|
||||
use std::vec;
|
||||
use std::local_data;
|
||||
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
|
||||
use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str};
|
||||
use syntax::ast_map::PathName;
|
||||
use syntax::ast_util::{local_def, is_local};
|
||||
use syntax::attr::AttrMetaMethods;
|
||||
use syntax::attr;
|
||||
|
@ -131,16 +130,15 @@ pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
|
|||
_InsnCtxt { _x: () }
|
||||
}
|
||||
|
||||
pub struct StatRecorder<'a> {
|
||||
pub struct StatRecorder {
|
||||
ccx: @CrateContext,
|
||||
name: &'a str,
|
||||
name: Option<~str>,
|
||||
start: u64,
|
||||
istart: uint,
|
||||
}
|
||||
|
||||
impl<'a> StatRecorder<'a> {
|
||||
pub fn new(ccx: @CrateContext,
|
||||
name: &'a str) -> StatRecorder<'a> {
|
||||
impl StatRecorder {
|
||||
pub fn new(ccx: @CrateContext, name: ~str) -> StatRecorder {
|
||||
let start = if ccx.sess.trans_stats() {
|
||||
time::precise_time_ns()
|
||||
} else {
|
||||
|
@ -149,7 +147,7 @@ impl<'a> StatRecorder<'a> {
|
|||
let istart = ccx.stats.n_llvm_insns.get();
|
||||
StatRecorder {
|
||||
ccx: ccx,
|
||||
name: name,
|
||||
name: Some(name),
|
||||
start: start,
|
||||
istart: istart,
|
||||
}
|
||||
|
@ -157,7 +155,7 @@ impl<'a> StatRecorder<'a> {
|
|||
}
|
||||
|
||||
#[unsafe_destructor]
|
||||
impl<'a> Drop for StatRecorder<'a> {
|
||||
impl Drop for StatRecorder {
|
||||
fn drop(&mut self) {
|
||||
if self.ccx.sess.trans_stats() {
|
||||
let end = time::precise_time_ns();
|
||||
|
@ -165,7 +163,7 @@ impl<'a> Drop for StatRecorder<'a> {
|
|||
let iend = self.ccx.stats.n_llvm_insns.get();
|
||||
{
|
||||
let mut fn_stats = self.ccx.stats.fn_stats.borrow_mut();
|
||||
fn_stats.get().push((self.name.to_owned(),
|
||||
fn_stats.get().push((self.name.take_unwrap(),
|
||||
elapsed,
|
||||
iend - self.istart));
|
||||
}
|
||||
|
@ -589,15 +587,14 @@ pub fn compare_scalar_types<'a>(
|
|||
t: ty::t,
|
||||
op: ast::BinOp)
|
||||
-> Result<'a> {
|
||||
let f = |a| compare_scalar_values(cx, lhs, rhs, a, op);
|
||||
let f = |a| rslt(cx, compare_scalar_values(cx, lhs, rhs, a, op));
|
||||
|
||||
match ty::get(t).sty {
|
||||
ty::ty_nil => rslt(cx, f(nil_type)),
|
||||
ty::ty_bool | ty::ty_ptr(_) => rslt(cx, f(unsigned_int)),
|
||||
ty::ty_char => rslt(cx, f(unsigned_int)),
|
||||
ty::ty_int(_) => rslt(cx, f(signed_int)),
|
||||
ty::ty_uint(_) => rslt(cx, f(unsigned_int)),
|
||||
ty::ty_float(_) => rslt(cx, f(floating_point)),
|
||||
ty::ty_nil => f(nil_type),
|
||||
ty::ty_bool | ty::ty_ptr(_) |
|
||||
ty::ty_uint(_) | ty::ty_char => f(unsigned_int),
|
||||
ty::ty_int(_) => f(signed_int),
|
||||
ty::ty_float(_) => f(floating_point),
|
||||
// Should never get here, because t is scalar.
|
||||
_ => cx.sess().bug("non-scalar type passed to compare_scalar_types")
|
||||
}
|
||||
|
@ -914,10 +911,7 @@ pub fn invoke<'a>(
|
|||
debug!("invoke at ???");
|
||||
}
|
||||
Some(id) => {
|
||||
debug!("invoke at {}",
|
||||
ast_map::node_id_to_str(bcx.tcx().items,
|
||||
id,
|
||||
token::get_ident_interner()));
|
||||
debug!("invoke at {}", bcx.tcx().map.node_to_str(id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1219,7 +1213,6 @@ pub fn make_return_pointer(fcx: &FunctionContext, output_type: ty::t)
|
|||
// Be warned! You must call `init_function` before doing anything with the
|
||||
// returned function context.
|
||||
pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
|
||||
path: ast_map::Path,
|
||||
llfndecl: ValueRef,
|
||||
id: ast::NodeId,
|
||||
has_env: bool,
|
||||
|
@ -1230,12 +1223,9 @@ pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
|
|||
-> FunctionContext<'a> {
|
||||
for p in param_substs.iter() { p.validate(); }
|
||||
|
||||
debug!("new_fn_ctxt(path={},
|
||||
id={:?}, \
|
||||
param_substs={})",
|
||||
path_str(ccx.sess, path),
|
||||
id,
|
||||
param_substs.repr(ccx.tcx));
|
||||
debug!("new_fn_ctxt(path={}, id={}, param_substs={})",
|
||||
if id == -1 { ~"" } else { ccx.tcx.map.path_to_str(id) },
|
||||
id, param_substs.repr(ccx.tcx));
|
||||
|
||||
let substd_output_type = match param_substs {
|
||||
None => output_type,
|
||||
|
@ -1261,7 +1251,6 @@ pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
|
|||
id: id,
|
||||
param_substs: param_substs,
|
||||
span: sp,
|
||||
path: path,
|
||||
block_arena: block_arena,
|
||||
ccx: ccx,
|
||||
debug_context: debug_context,
|
||||
|
@ -1439,7 +1428,6 @@ pub fn build_return_block(fcx: &FunctionContext, ret_cx: &Block) {
|
|||
// If the function closes over its environment a closure will be
|
||||
// returned.
|
||||
pub fn trans_closure<'a>(ccx: @CrateContext,
|
||||
path: ast_map::Path,
|
||||
decl: &ast::FnDecl,
|
||||
body: &ast::Block,
|
||||
llfndecl: ValueRef,
|
||||
|
@ -1463,7 +1451,6 @@ pub fn trans_closure<'a>(ccx: @CrateContext,
|
|||
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx,
|
||||
path,
|
||||
llfndecl,
|
||||
id,
|
||||
has_env,
|
||||
|
@ -1537,19 +1524,17 @@ pub fn trans_closure<'a>(ccx: @CrateContext,
|
|||
// trans_fn: creates an LLVM function corresponding to a source language
|
||||
// function.
|
||||
pub fn trans_fn(ccx: @CrateContext,
|
||||
path: ast_map::Path,
|
||||
decl: &ast::FnDecl,
|
||||
body: &ast::Block,
|
||||
llfndecl: ValueRef,
|
||||
param_substs: Option<@param_substs>,
|
||||
id: ast::NodeId,
|
||||
attrs: &[ast::Attribute]) {
|
||||
let the_path_str = path_str(ccx.sess, path);
|
||||
let _s = StatRecorder::new(ccx, the_path_str);
|
||||
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id));
|
||||
debug!("trans_fn(param_substs={})", param_substs.repr(ccx.tcx));
|
||||
let _icx = push_ctxt("trans_fn");
|
||||
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, id));
|
||||
trans_closure(ccx, path.clone(), decl, body, llfndecl,
|
||||
trans_closure(ccx, decl, body, llfndecl,
|
||||
param_substs, id, attrs, output_type, |bcx| bcx);
|
||||
}
|
||||
|
||||
|
@ -1616,15 +1601,8 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
|
|||
};
|
||||
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx,
|
||||
~[],
|
||||
llfndecl,
|
||||
ctor_id,
|
||||
false,
|
||||
result_ty,
|
||||
param_substs,
|
||||
None,
|
||||
&arena);
|
||||
let fcx = new_fn_ctxt(ccx, llfndecl, ctor_id, false, result_ty,
|
||||
param_substs, None, &arena);
|
||||
init_function(&fcx, false, result_ty, param_substs);
|
||||
|
||||
let arg_tys = ty::ty_fn_args(ctor_ty);
|
||||
|
@ -1684,29 +1662,15 @@ impl Visitor<()> for TransItemVisitor {
|
|||
|
||||
pub fn trans_item(ccx: @CrateContext, item: &ast::Item) {
|
||||
let _icx = push_ctxt("trans_item");
|
||||
let path = {
|
||||
match ccx.tcx.items.get(item.id) {
|
||||
ast_map::NodeItem(_, p) => p,
|
||||
// tjc: ?
|
||||
_ => fail!("trans_item"),
|
||||
}
|
||||
};
|
||||
match item.node {
|
||||
ast::ItemFn(decl, purity, _abis, ref generics, body) => {
|
||||
if purity == ast::ExternFn {
|
||||
let llfndecl = get_item_val(ccx, item.id);
|
||||
foreign::trans_rust_fn_with_foreign_abi(
|
||||
ccx,
|
||||
&vec::append_one((*path).clone(), PathName(item.ident)),
|
||||
decl,
|
||||
body,
|
||||
item.attrs,
|
||||
llfndecl,
|
||||
item.id);
|
||||
ccx, decl, body, item.attrs, llfndecl, item.id);
|
||||
} else if !generics.is_type_parameterized() {
|
||||
let path = vec::append_one((*path).clone(), PathName(item.ident));
|
||||
let llfn = get_item_val(ccx, item.id);
|
||||
trans_fn(ccx, path, decl, body, llfn, None, item.id, item.attrs);
|
||||
trans_fn(ccx, decl, body, llfn, None, item.id, item.attrs);
|
||||
} else {
|
||||
// Be sure to travel more than just one layer deep to catch nested
|
||||
// items in blocks and such.
|
||||
|
@ -1715,12 +1679,7 @@ pub fn trans_item(ccx: @CrateContext, item: &ast::Item) {
|
|||
}
|
||||
}
|
||||
ast::ItemImpl(ref generics, _, _, ref ms) => {
|
||||
meth::trans_impl(ccx,
|
||||
(*path).clone(),
|
||||
item.ident,
|
||||
*ms,
|
||||
generics,
|
||||
item.id);
|
||||
meth::trans_impl(ccx, item.ident, *ms, generics, item.id);
|
||||
}
|
||||
ast::ItemMod(ref m) => {
|
||||
trans_mod(ccx, m);
|
||||
|
@ -1844,9 +1803,7 @@ pub fn register_fn_llvmty(ccx: @CrateContext,
|
|||
cc: lib::llvm::CallConv,
|
||||
fn_ty: Type,
|
||||
output: ty::t) -> ValueRef {
|
||||
debug!("register_fn_fuller creating fn for item {} with path {}",
|
||||
node_id,
|
||||
ast_map::path_to_str(item_path(ccx, &node_id), token::get_ident_interner()));
|
||||
debug!("register_fn_llvmty id={} sym={}", node_id, sym);
|
||||
|
||||
let llfn = decl_fn(ccx.llmod, sym, cc, fn_ty, output);
|
||||
finish_register_fn(ccx, sp, sym, node_id, llfn);
|
||||
|
@ -1934,22 +1891,21 @@ pub fn create_entry_wrapper(ccx: @CrateContext,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn item_path(ccx: &CrateContext, id: &ast::NodeId) -> ast_map::Path {
|
||||
ty::item_path(ccx.tcx, ast_util::local_def(*id))
|
||||
}
|
||||
|
||||
fn exported_name(ccx: &CrateContext, path: ast_map::Path,
|
||||
fn exported_name(ccx: &CrateContext, id: ast::NodeId,
|
||||
ty: ty::t, attrs: &[ast::Attribute]) -> ~str {
|
||||
match attr::first_attr_value_str_by_name(attrs, "export_name") {
|
||||
// Use provided name
|
||||
Some(name) => name.get().to_owned(),
|
||||
|
||||
// Don't mangle
|
||||
_ if attr::contains_name(attrs, "no_mangle")
|
||||
=> path_elem_to_str(*path.last().unwrap(), token::get_ident_interner()),
|
||||
|
||||
// Usual name mangling
|
||||
_ => mangle_exported_name(ccx, path, ty)
|
||||
_ => ccx.tcx.map.with_path(id, |mut path| {
|
||||
if attr::contains_name(attrs, "no_mangle") {
|
||||
// Don't mangle
|
||||
path.last().unwrap().to_str()
|
||||
} else {
|
||||
// Usual name mangling
|
||||
mangle_exported_name(ccx, path, ty, id)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1965,14 +1921,11 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
Some(v) => v,
|
||||
None => {
|
||||
let mut foreign = false;
|
||||
let item = ccx.tcx.items.get(id);
|
||||
let item = ccx.tcx.map.get(id);
|
||||
let val = match item {
|
||||
ast_map::NodeItem(i, pth) => {
|
||||
|
||||
let elt = PathPrettyName(i.ident, id as u64);
|
||||
let my_path = vec::append_one((*pth).clone(), elt);
|
||||
ast_map::NodeItem(i) => {
|
||||
let ty = ty::node_id_to_type(ccx.tcx, i.id);
|
||||
let sym = exported_name(ccx, my_path, ty, i.attrs);
|
||||
let sym = exported_name(ccx, id, ty, i.attrs);
|
||||
|
||||
let v = match i.node {
|
||||
ast::ItemStatic(_, _, expr) => {
|
||||
|
@ -2100,7 +2053,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
v
|
||||
}
|
||||
|
||||
ast_map::NodeTraitMethod(trait_method, _, pth) => {
|
||||
ast_map::NodeTraitMethod(trait_method) => {
|
||||
debug!("get_item_val(): processing a NodeTraitMethod");
|
||||
match *trait_method {
|
||||
ast::Required(_) => {
|
||||
|
@ -2108,23 +2061,23 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
get_item_val()");
|
||||
}
|
||||
ast::Provided(m) => {
|
||||
register_method(ccx, id, pth, m)
|
||||
register_method(ccx, id, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_map::NodeMethod(m, _, pth) => {
|
||||
register_method(ccx, id, pth, m)
|
||||
ast_map::NodeMethod(m) => {
|
||||
register_method(ccx, id, m)
|
||||
}
|
||||
|
||||
ast_map::NodeForeignItem(ni, abis, _, pth) => {
|
||||
ast_map::NodeForeignItem(ni) => {
|
||||
let ty = ty::node_id_to_type(ccx.tcx, ni.id);
|
||||
foreign = true;
|
||||
|
||||
match ni.node {
|
||||
ast::ForeignItemFn(..) => {
|
||||
let path = vec::append_one((*pth).clone(), PathName(ni.ident));
|
||||
foreign::register_foreign_item_fn(ccx, abis, &path, ni)
|
||||
let abis = ccx.tcx.map.get_foreign_abis(id);
|
||||
foreign::register_foreign_item_fn(ccx, abis, ni)
|
||||
}
|
||||
ast::ForeignItemStatic(..) => {
|
||||
// Treat the crate map static specially in order to
|
||||
|
@ -2165,16 +2118,15 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
}
|
||||
}
|
||||
|
||||
ast_map::NodeVariant(ref v, enm, pth) => {
|
||||
ast_map::NodeVariant(ref v) => {
|
||||
let llfn;
|
||||
match v.node.kind {
|
||||
ast::TupleVariantKind(ref args) => {
|
||||
assert!(args.len() != 0u);
|
||||
let pth = vec::append((*pth).clone(),
|
||||
[PathName(enm.ident),
|
||||
PathName((*v).node.name)]);
|
||||
let ty = ty::node_id_to_type(ccx.tcx, id);
|
||||
let sym = exported_name(ccx, pth, ty, enm.attrs);
|
||||
let parent = ccx.tcx.map.get_parent(id);
|
||||
let enm = ccx.tcx.map.expect_item(parent);
|
||||
let sym = exported_name(ccx, id, ty, enm.attrs);
|
||||
|
||||
llfn = match enm.node {
|
||||
ast::ItemEnum(_, _) => {
|
||||
|
@ -2191,7 +2143,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
llfn
|
||||
}
|
||||
|
||||
ast_map::NodeStructCtor(struct_def, struct_item, struct_path) => {
|
||||
ast_map::NodeStructCtor(struct_def) => {
|
||||
// Only register the constructor if this is a tuple-like struct.
|
||||
match struct_def.ctor_id {
|
||||
None => {
|
||||
|
@ -2199,9 +2151,10 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
a non-tuple-like struct")
|
||||
}
|
||||
Some(ctor_id) => {
|
||||
let parent = ccx.tcx.map.get_parent(id);
|
||||
let struct_item = ccx.tcx.map.expect_item(parent);
|
||||
let ty = ty::node_id_to_type(ccx.tcx, ctor_id);
|
||||
let sym = exported_name(ccx, (*struct_path).clone(), ty,
|
||||
struct_item.attrs);
|
||||
let sym = exported_name(ccx, id, ty, struct_item.attrs);
|
||||
let llfn = register_fn(ccx, struct_item.span,
|
||||
sym, ctor_id, ty);
|
||||
set_inline_hint(llfn);
|
||||
|
@ -2233,16 +2186,11 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
}
|
||||
}
|
||||
|
||||
fn register_method(ccx: @CrateContext,
|
||||
id: ast::NodeId,
|
||||
path: @ast_map::Path,
|
||||
fn register_method(ccx: @CrateContext, id: ast::NodeId,
|
||||
m: &ast::Method) -> ValueRef {
|
||||
let mty = ty::node_id_to_type(ccx.tcx, id);
|
||||
|
||||
let mut path = (*path).clone();
|
||||
path.push(PathPrettyName(m.ident, token::gensym("meth") as u64));
|
||||
|
||||
let sym = exported_name(ccx, path, mty, m.attrs);
|
||||
let sym = exported_name(ccx, id, mty, m.attrs);
|
||||
|
||||
let llfn = register_fn(ccx, m.span, sym, id, mty);
|
||||
set_llvm_fn_attrs(m.attrs, llfn);
|
||||
|
@ -2489,10 +2437,9 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
|
|||
return (map, keys.len())
|
||||
}
|
||||
|
||||
pub fn symname(sess: session::Session, name: &str,
|
||||
hash: &str, vers: &str) -> ~str {
|
||||
let elt = PathName(sess.ident_of(name));
|
||||
link::exported_name(sess, ~[elt], hash, vers)
|
||||
pub fn symname(name: &str, hash: &str, vers: &str) -> ~str {
|
||||
let path = [PathName(token::intern(name))];
|
||||
link::exported_name(ast_map::Values(path.iter()).chain(None), hash, vers)
|
||||
}
|
||||
|
||||
pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
|
||||
|
@ -2506,7 +2453,7 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
|
|||
let sym_name = if is_top {
|
||||
~"_rust_crate_map_toplevel"
|
||||
} else {
|
||||
symname(sess, "_rust_crate_map_" + mapmeta.crateid.name, mapmeta.crate_hash,
|
||||
symname("_rust_crate_map_" + mapmeta.crateid.name, mapmeta.crate_hash,
|
||||
mapmeta.crateid.version_or_default())
|
||||
};
|
||||
|
||||
|
@ -2539,7 +2486,7 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
|
|||
let cstore = ccx.sess.cstore;
|
||||
while cstore.have_crate_data(i) {
|
||||
let cdata = cstore.get_crate_data(i);
|
||||
let nm = symname(ccx.sess, format!("_rust_crate_map_{}", cdata.name),
|
||||
let nm = symname(format!("_rust_crate_map_{}", cdata.name),
|
||||
cstore.get_crate_hash(i),
|
||||
cstore.get_crate_vers(i));
|
||||
let cr = nm.with_c_str(|buf| {
|
||||
|
@ -2590,7 +2537,7 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn crate_ctxt_to_encode_parms<'r>(cx: &'r CrateContext, ie: encoder::encode_inlined_item<'r>)
|
||||
pub fn crate_ctxt_to_encode_parms<'r>(cx: &'r CrateContext, ie: encoder::EncodeInlinedItem<'r>)
|
||||
-> encoder::EncodeParams<'r> {
|
||||
|
||||
let diag = cx.sess.diagnostic();
|
||||
|
@ -2617,9 +2564,8 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> ~[u8] {
|
|||
return ~[]
|
||||
}
|
||||
|
||||
let encode_inlined_item: encoder::encode_inlined_item =
|
||||
|ecx, ebml_w, path, ii|
|
||||
astencode::encode_inlined_item(ecx, ebml_w, path, ii, cx.maps);
|
||||
let encode_inlined_item: encoder::EncodeInlinedItem =
|
||||
|ecx, ebml_w, ii| astencode::encode_inlined_item(ecx, ebml_w, ii, cx.maps);
|
||||
|
||||
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
|
||||
let metadata = encoder::encode_metadata(encode_parms, krate);
|
||||
|
|
|
@ -357,28 +357,23 @@ pub fn trans_fn_ref_with_vtables(
|
|||
// intrinsic, or is a default method. In particular, if we see an
|
||||
// intrinsic that is inlined from a different crate, we want to reemit the
|
||||
// intrinsic instead of trying to call it in the other crate.
|
||||
let must_monomorphise;
|
||||
if type_params.len() > 0 || is_default {
|
||||
must_monomorphise = true;
|
||||
let must_monomorphise = if type_params.len() > 0 || is_default {
|
||||
true
|
||||
} else if def_id.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
let map_node = session::expect(
|
||||
ccx.sess,
|
||||
ccx.tcx.items.find(def_id.node),
|
||||
|| format!("local item should be in ast map"));
|
||||
let map_node = session::expect(
|
||||
ccx.sess,
|
||||
ccx.tcx.map.find(def_id.node),
|
||||
|| format!("local item should be in ast map"));
|
||||
|
||||
match map_node {
|
||||
ast_map::NodeForeignItem(_, abis, _, _) => {
|
||||
must_monomorphise = abis.is_intrinsic()
|
||||
}
|
||||
_ => {
|
||||
must_monomorphise = false;
|
||||
}
|
||||
match map_node {
|
||||
ast_map::NodeForeignItem(_) => {
|
||||
ccx.tcx.map.get_foreign_abis(def_id.node).is_intrinsic()
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
} else {
|
||||
must_monomorphise = false;
|
||||
}
|
||||
false
|
||||
};
|
||||
|
||||
// Create a monomorphic verison of generic functions
|
||||
if must_monomorphise {
|
||||
|
|
|
@ -24,8 +24,6 @@ use middle::trans::glue;
|
|||
use middle::trans::type_::Type;
|
||||
use middle::ty;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::parse::token;
|
||||
use syntax::opt_vec;
|
||||
use syntax::opt_vec::OptVec;
|
||||
use util::ppaux::Repr;
|
||||
|
@ -89,8 +87,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
*/
|
||||
|
||||
debug!("push_ast_cleanup_scope({})",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, id,
|
||||
token::get_ident_interner()));
|
||||
self.ccx.tcx.map.node_to_str(id));
|
||||
|
||||
// FIXME(#2202) -- currently closure bodies have a parent
|
||||
// region, which messes up the assertion below, since there
|
||||
|
@ -114,8 +111,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
id: ast::NodeId,
|
||||
exits: [&'a Block<'a>, ..EXIT_MAX]) {
|
||||
debug!("push_loop_cleanup_scope({})",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, id,
|
||||
token::get_ident_interner()));
|
||||
self.ccx.tcx.map.node_to_str(id));
|
||||
assert_eq!(Some(id), self.top_ast_scope());
|
||||
|
||||
self.push_scope(CleanupScope::new(LoopScopeKind(id, exits)));
|
||||
|
@ -139,8 +135,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
*/
|
||||
|
||||
debug!("pop_and_trans_ast_cleanup_scope({})",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
|
||||
token::get_ident_interner()));
|
||||
self.ccx.tcx.map.node_to_str(cleanup_scope));
|
||||
|
||||
assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope)));
|
||||
|
||||
|
@ -159,8 +154,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
*/
|
||||
|
||||
debug!("pop_loop_cleanup_scope({})",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
|
||||
token::get_ident_interner()));
|
||||
self.ccx.tcx.map.node_to_str(cleanup_scope));
|
||||
|
||||
assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope)));
|
||||
|
||||
|
@ -338,8 +332,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
|
||||
self.ccx.tcx.sess.bug(
|
||||
format!("no cleanup scope {} found",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
|
||||
token::get_ident_interner())));
|
||||
self.ccx.tcx.map.node_to_str(cleanup_scope)));
|
||||
}
|
||||
|
||||
fn schedule_clean_in_custom_scope(&self,
|
||||
|
|
|
@ -26,11 +26,8 @@ use util::ppaux::Repr;
|
|||
use util::ppaux::ty_to_str;
|
||||
|
||||
use arena::TypedArena;
|
||||
use std::vec;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::PathName;
|
||||
use syntax::ast_util;
|
||||
use syntax::parse::token::special_idents;
|
||||
|
||||
// ___Good to know (tm)__________________________________________________
|
||||
//
|
||||
|
@ -353,8 +350,7 @@ pub fn trans_expr_fn<'a>(
|
|||
sigil: ast::Sigil,
|
||||
decl: &ast::FnDecl,
|
||||
body: &ast::Block,
|
||||
outer_id: ast::NodeId,
|
||||
user_id: ast::NodeId,
|
||||
id: ast::NodeId,
|
||||
dest: expr::Dest)
|
||||
-> &'a Block<'a> {
|
||||
/*!
|
||||
|
@ -364,13 +360,7 @@ pub fn trans_expr_fn<'a>(
|
|||
* - `sigil`
|
||||
* - `decl`
|
||||
* - `body`
|
||||
* - `outer_id`: The id of the closure expression with the correct
|
||||
* type. This is usually the same as `user_id`, but in the
|
||||
* case of a `for` loop, the `outer_id` will have the return
|
||||
* type of boolean, and the `user_id` will have the return type
|
||||
* of `nil`.
|
||||
* - `user_id`: The id of the closure as the user expressed it.
|
||||
Generally the same as `outer_id`
|
||||
* - `id`: The id of the closure expression.
|
||||
* - `cap_clause`: information about captured variables, if any.
|
||||
* - `dest`: where to write the closure value, which must be a
|
||||
(fn ptr, env) pair
|
||||
|
@ -386,18 +376,16 @@ pub fn trans_expr_fn<'a>(
|
|||
};
|
||||
|
||||
let ccx = bcx.ccx();
|
||||
let fty = node_id_type(bcx, outer_id);
|
||||
let fty = node_id_type(bcx, id);
|
||||
let f = match ty::get(fty).sty {
|
||||
ty::ty_closure(ref f) => f,
|
||||
_ => fail!("expected closure")
|
||||
};
|
||||
|
||||
let sub_path = vec::append_one(bcx.fcx.path.clone(),
|
||||
PathName(special_idents::anon));
|
||||
// FIXME: Bad copy.
|
||||
let s = mangle_internal_name_by_path_and_seq(ccx,
|
||||
sub_path.clone(),
|
||||
"expr_fn");
|
||||
let tcx = bcx.tcx();
|
||||
let s = tcx.map.with_path(id, |path| {
|
||||
mangle_internal_name_by_path_and_seq(path, "closure")
|
||||
});
|
||||
let llfn = decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, s);
|
||||
|
||||
// set an inline hint for all closures
|
||||
|
@ -405,11 +393,11 @@ pub fn trans_expr_fn<'a>(
|
|||
|
||||
let cap_vars = {
|
||||
let capture_map = ccx.maps.capture_map.borrow();
|
||||
capture_map.get().get_copy(&user_id)
|
||||
capture_map.get().get_copy(&id)
|
||||
};
|
||||
let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, *cap_vars.borrow(), sigil);
|
||||
trans_closure(ccx, sub_path, decl, body, llfn,
|
||||
bcx.fcx.param_substs, user_id,
|
||||
trans_closure(ccx, decl, body, llfn,
|
||||
bcx.fcx.param_substs, id,
|
||||
[], ty::ty_fn_ret(fty),
|
||||
|bcx| load_environment(bcx, cdata_ty, *cap_vars.borrow(), sigil));
|
||||
fill_fn_pair(bcx, dest_addr, llfn, llbox);
|
||||
|
@ -454,8 +442,9 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
|
|||
}
|
||||
};
|
||||
|
||||
let path = ty::item_path(tcx, def_id);
|
||||
let name = mangle_internal_name_by_path_and_seq(ccx, path, "as_closure");
|
||||
let name = ty::with_path(tcx, def_id, |path| {
|
||||
mangle_internal_name_by_path_and_seq(path, "as_closure")
|
||||
});
|
||||
let llfn = if is_local {
|
||||
decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, name)
|
||||
} else {
|
||||
|
@ -476,8 +465,7 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
|
|||
let _icx = push_ctxt("closure::get_wrapper_for_bare_fn");
|
||||
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx, ~[], llfn, -1, true, f.sig.output, None, None,
|
||||
&arena);
|
||||
let fcx = new_fn_ctxt(ccx, llfn, -1, true, f.sig.output, None, None, &arena);
|
||||
init_function(&fcx, true, f.sig.output, None);
|
||||
let bcx = fcx.entry_bcx.get().unwrap();
|
||||
|
||||
|
|
|
@ -10,8 +10,6 @@
|
|||
|
||||
//! Code that is useful in various trans modules.
|
||||
|
||||
|
||||
use driver::session;
|
||||
use driver::session::Session;
|
||||
use lib::llvm::{ValueRef, BasicBlockRef, BuilderRef};
|
||||
use lib::llvm::{True, False, Bool};
|
||||
|
@ -37,12 +35,12 @@ use std::cast;
|
|||
use std::cell::{Cell, RefCell};
|
||||
use std::hashmap::HashMap;
|
||||
use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
|
||||
use syntax::ast::{Ident};
|
||||
use syntax::ast_map::{Path, PathElem, PathPrettyName};
|
||||
use syntax::ast::Ident;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::{PathElem, PathName};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::parse::token;
|
||||
use syntax::{ast, ast_map};
|
||||
|
||||
pub use middle::trans::context::CrateContext;
|
||||
|
||||
|
@ -102,10 +100,8 @@ pub fn return_type_is_void(ccx: &CrateContext, ty: ty::t) -> bool {
|
|||
ty::type_is_nil(ty) || ty::type_is_bot(ty) || ty::type_is_empty(ccx.tcx, ty)
|
||||
}
|
||||
|
||||
pub fn gensym_name(name: &str) -> (Ident, PathElem) {
|
||||
let name = token::gensym(name);
|
||||
let ident = Ident::new(name);
|
||||
(ident, PathPrettyName(ident, name as u64))
|
||||
pub fn gensym_name(name: &str) -> PathElem {
|
||||
PathName(token::gensym(name))
|
||||
}
|
||||
|
||||
pub struct tydesc_info {
|
||||
|
@ -277,7 +273,6 @@ pub struct FunctionContext<'a> {
|
|||
// The source span and nesting context where this function comes from, for
|
||||
// error reporting and symbol generation.
|
||||
span: Option<Span>,
|
||||
path: Path,
|
||||
|
||||
// The arena that blocks are allocated from.
|
||||
block_arena: &'a TypedArena<Block<'a>>,
|
||||
|
@ -446,12 +441,11 @@ impl<'a> Block<'a> {
|
|||
pub fn sess(&self) -> Session { self.fcx.ccx.sess }
|
||||
|
||||
pub fn ident(&self, ident: Ident) -> ~str {
|
||||
let string = token::get_ident(ident.name);
|
||||
string.get().to_str()
|
||||
token::get_ident(ident).get().to_str()
|
||||
}
|
||||
|
||||
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
|
||||
ast_map::node_id_to_str(self.tcx().items, id, self.sess().intr())
|
||||
self.tcx().map.node_to_str(id)
|
||||
}
|
||||
|
||||
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str {
|
||||
|
@ -785,25 +779,6 @@ pub fn align_to(cx: &Block, off: ValueRef, align: ValueRef) -> ValueRef {
|
|||
return build::And(cx, bumped, build::Not(cx, mask));
|
||||
}
|
||||
|
||||
pub fn path_str(sess: session::Session, p: &[PathElem]) -> ~str {
|
||||
let mut r = ~"";
|
||||
let mut first = true;
|
||||
for e in p.iter() {
|
||||
match *e {
|
||||
ast_map::PathName(s) | ast_map::PathMod(s) |
|
||||
ast_map::PathPrettyName(s, _) => {
|
||||
if first {
|
||||
first = false
|
||||
} else {
|
||||
r.push_str("::")
|
||||
}
|
||||
r.push_str(sess.str_of(s));
|
||||
}
|
||||
}
|
||||
}
|
||||
r
|
||||
}
|
||||
|
||||
pub fn monomorphize_type(bcx: &Block, t: ty::t) -> ty::t {
|
||||
match bcx.fcx.param_substs {
|
||||
Some(substs) => {
|
||||
|
|
|
@ -34,7 +34,7 @@ use middle::trans::type_::Type;
|
|||
use std::c_str::ToCStr;
|
||||
use std::libc::c_uint;
|
||||
use std::vec;
|
||||
use syntax::{ast, ast_util, ast_map};
|
||||
use syntax::{ast, ast_util};
|
||||
|
||||
pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
|
||||
-> ValueRef {
|
||||
|
@ -170,18 +170,11 @@ pub fn get_const_val(cx: @CrateContext,
|
|||
def_id = inline::maybe_instantiate_inline(cx, def_id);
|
||||
}
|
||||
|
||||
let opt_item = cx.tcx.items.get(def_id.node);
|
||||
|
||||
match opt_item {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
match item.node {
|
||||
ast::ItemStatic(_, ast::MutImmutable, _) => {
|
||||
trans_const(cx, ast::MutImmutable, def_id.node);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match cx.tcx.map.expect_item(def_id.node).node {
|
||||
ast::ItemStatic(_, ast::MutImmutable, _) => {
|
||||
trans_const(cx, ast::MutImmutable, def_id.node);
|
||||
}
|
||||
_ => cx.tcx.sess.bug("expected a const to be an item")
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -176,7 +176,7 @@ pub struct CrateDebugContext {
|
|||
priv current_debug_location: Cell<DebugLocation>,
|
||||
priv created_files: RefCell<HashMap<~str, DIFile>>,
|
||||
priv created_types: RefCell<HashMap<uint, DIType>>,
|
||||
priv namespace_map: RefCell<HashMap<~[ast::Ident], @NamespaceTreeNode>>,
|
||||
priv namespace_map: RefCell<HashMap<~[ast::Name], @NamespaceTreeNode>>,
|
||||
// This collection is used to assert that composite types (structs, enums, ...) have their
|
||||
// members only set once:
|
||||
priv composite_types_completed: RefCell<HashSet<DIType>>,
|
||||
|
@ -332,7 +332,7 @@ pub fn create_captured_var_metadata(bcx: &Block,
|
|||
|
||||
let cx = bcx.ccx();
|
||||
|
||||
let ast_item = cx.tcx.items.find(node_id);
|
||||
let ast_item = cx.tcx.map.find(node_id);
|
||||
|
||||
let variable_ident = match ast_item {
|
||||
None => {
|
||||
|
@ -540,10 +540,10 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
|
||||
let empty_generics = ast::Generics { lifetimes: opt_vec::Empty, ty_params: opt_vec::Empty };
|
||||
|
||||
let fnitem = cx.tcx.items.get(fn_ast_id);
|
||||
let fnitem = cx.tcx.map.get(fn_ast_id);
|
||||
|
||||
let (ident, fn_decl, generics, top_level_block, span, has_path) = match fnitem {
|
||||
ast_map::NodeItem(ref item, _) => {
|
||||
ast_map::NodeItem(ref item) => {
|
||||
match item.node {
|
||||
ast::ItemFn(fn_decl, _, _, ref generics, top_level_block) => {
|
||||
(item.ident, fn_decl, generics, top_level_block, item.span, true)
|
||||
|
@ -554,7 +554,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeMethod(method, _, _) => {
|
||||
ast_map::NodeMethod(method) => {
|
||||
(method.ident,
|
||||
method.decl,
|
||||
&method.generics,
|
||||
|
@ -581,7 +581,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
"create_function_debug_context: expected an expr_fn_block here")
|
||||
}
|
||||
}
|
||||
ast_map::NodeTraitMethod(trait_method, _, _) => {
|
||||
ast_map::NodeTraitMethod(trait_method) => {
|
||||
match *trait_method {
|
||||
ast::Provided(method) => {
|
||||
(method.ident,
|
||||
|
@ -622,8 +622,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
};
|
||||
|
||||
// get_template_parameters() will append a `<...>` clause to the function name if necessary.
|
||||
let function_name_string = token::get_ident(ident.name);
|
||||
let mut function_name = function_name_string.get().to_owned();
|
||||
let mut function_name = token::get_ident(ident).get().to_str();
|
||||
let template_parameters = get_template_parameters(cx,
|
||||
generics,
|
||||
param_substs,
|
||||
|
@ -634,7 +633,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
// into a namespace. In the future this could be improved somehow (storing a path in the
|
||||
// ast_map, or construct a path using the enclosing function).
|
||||
let (linkage_name, containing_scope) = if has_path {
|
||||
let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id), span);
|
||||
let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id));
|
||||
let linkage_name = namespace_node.mangled_name_of_contained_item(function_name);
|
||||
let containing_scope = namespace_node.scope;
|
||||
(linkage_name, containing_scope)
|
||||
|
@ -792,9 +791,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
|
||||
let ident = special_idents::type_self;
|
||||
|
||||
let param_metadata_string = token::get_ident(ident.name);
|
||||
let param_metadata = param_metadata_string.get()
|
||||
.with_c_str(|name| {
|
||||
let param_metadata = token::get_ident(ident).get()
|
||||
.with_c_str(|name| {
|
||||
unsafe {
|
||||
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
|
||||
DIB(cx),
|
||||
|
@ -832,9 +830,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
|||
// Again, only create type information if debuginfo is enabled
|
||||
if cx.sess.opts.debuginfo {
|
||||
let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP);
|
||||
let param_metadata_string = token::get_ident(ident.name);
|
||||
let param_metadata = param_metadata_string.get()
|
||||
.with_c_str(|name| {
|
||||
let param_metadata = token::get_ident(ident).get()
|
||||
.with_c_str(|name| {
|
||||
unsafe {
|
||||
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
|
||||
DIB(cx),
|
||||
|
@ -939,8 +936,7 @@ fn declare_local(bcx: &Block,
|
|||
let filename = span_start(cx, span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, filename);
|
||||
|
||||
let variable_ident_string = token::get_ident(variable_ident.name);
|
||||
let name: &str = variable_ident_string.get();
|
||||
let name = token::get_ident(variable_ident);
|
||||
let loc = span_start(cx, span);
|
||||
let type_metadata = type_metadata(cx, variable_type, span);
|
||||
|
||||
|
@ -950,7 +946,7 @@ fn declare_local(bcx: &Block,
|
|||
CapturedVariable => (0, DW_TAG_auto_variable)
|
||||
};
|
||||
|
||||
let (var_alloca, var_metadata) = name.with_c_str(|name| {
|
||||
let (var_alloca, var_metadata) = name.get().with_c_str(|name| {
|
||||
match variable_access {
|
||||
DirectVariable { alloca } => (
|
||||
alloca,
|
||||
|
@ -1056,7 +1052,7 @@ fn scope_metadata(fcx: &FunctionContext,
|
|||
match scope_map.get().find_copy(&node_id) {
|
||||
Some(scope_metadata) => scope_metadata,
|
||||
None => {
|
||||
let node = fcx.ccx.tcx.items.get(node_id);
|
||||
let node = fcx.ccx.tcx.map.get(node_id);
|
||||
|
||||
fcx.ccx.sess.span_bug(span,
|
||||
format!("debuginfo: Could not find scope info for node {:?}", node));
|
||||
|
@ -1169,8 +1165,7 @@ impl StructMemberDescriptionFactory {
|
|||
let name = if field.ident.name == special_idents::unnamed_field.name {
|
||||
~""
|
||||
} else {
|
||||
let string = token::get_ident(field.ident.name);
|
||||
string.get().to_str()
|
||||
token::get_ident(field.ident).get().to_str()
|
||||
};
|
||||
|
||||
MemberDescription {
|
||||
|
@ -1192,7 +1187,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
|
|||
let struct_name = ppaux::ty_to_str(cx.tcx, struct_type);
|
||||
let struct_llvm_type = type_of::type_of(cx, struct_type);
|
||||
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, span);
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
||||
|
||||
let file_name = span_start(cx, definition_span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, file_name);
|
||||
|
@ -1391,26 +1386,13 @@ fn describe_enum_variant(cx: &CrateContext,
|
|||
file_metadata: DIFile,
|
||||
span: Span)
|
||||
-> (DICompositeType, Type, MemberDescriptionFactory) {
|
||||
let variant_info_string = token::get_ident(variant_info.name.name);
|
||||
let variant_name = variant_info_string.get();
|
||||
let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)),
|
||||
struct_def.packed);
|
||||
// Could some consistency checks here: size, align, field count, discr type
|
||||
|
||||
// Find the source code location of the variant's definition
|
||||
let variant_definition_span = if variant_info.id.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match cx.tcx.items.find(variant_info.id.node) {
|
||||
Some(ast_map::NodeVariant(ref variant, _, _)) => variant.span,
|
||||
ref node => {
|
||||
cx.sess.span_warn(span,
|
||||
format!("debuginfo::enum_metadata()::\
|
||||
adt_struct_metadata() - Unexpected node \
|
||||
type: {:?}. This is a bug.", node));
|
||||
codemap::DUMMY_SP
|
||||
}
|
||||
}
|
||||
}
|
||||
cx.tcx.map.span(variant_info.id.node)
|
||||
} else {
|
||||
// For definitions from other crates we have no location information available.
|
||||
codemap::DUMMY_SP
|
||||
|
@ -1418,7 +1400,7 @@ fn describe_enum_variant(cx: &CrateContext,
|
|||
|
||||
let metadata_stub = create_struct_stub(cx,
|
||||
variant_llvm_type,
|
||||
variant_name,
|
||||
token::get_ident(variant_info.name).get(),
|
||||
containing_scope,
|
||||
file_metadata,
|
||||
variant_definition_span);
|
||||
|
@ -1426,10 +1408,7 @@ fn describe_enum_variant(cx: &CrateContext,
|
|||
// Get the argument names from the enum variant info
|
||||
let mut arg_names = match variant_info.arg_names {
|
||||
Some(ref names) => {
|
||||
names.map(|ident| {
|
||||
let string = token::get_ident(ident.name);
|
||||
string.get().to_str()
|
||||
})
|
||||
names.map(|ident| token::get_ident(*ident).get().to_str())
|
||||
}
|
||||
None => variant_info.args.map(|_| ~"")
|
||||
};
|
||||
|
@ -1462,9 +1441,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
|||
-> RecursiveTypeDescription {
|
||||
let enum_name = ppaux::ty_to_str(cx.tcx, enum_type);
|
||||
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx,
|
||||
enum_def_id,
|
||||
span);
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
|
||||
let loc = span_start(cx, definition_span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
|
||||
|
@ -1487,16 +1464,12 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
|||
let enumerators_metadata: ~[DIDescriptor] = variants
|
||||
.iter()
|
||||
.map(|v| {
|
||||
let string = token::get_ident(v.name.name);
|
||||
let name: &str = string.get();
|
||||
let discriminant_value = v.disr_val as c_ulonglong;
|
||||
|
||||
name.with_c_str(|name| {
|
||||
token::get_ident(v.name).get().with_c_str(|name| {
|
||||
unsafe {
|
||||
llvm::LLVMDIBuilderCreateEnumerator(
|
||||
DIB(cx),
|
||||
name,
|
||||
discriminant_value)
|
||||
v.disr_val as c_ulonglong)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -2007,15 +1980,13 @@ fn trait_metadata(cx: &CrateContext,
|
|||
substs: &ty::substs,
|
||||
trait_store: ty::TraitStore,
|
||||
mutability: ast::Mutability,
|
||||
_: &ty::BuiltinBounds,
|
||||
usage_site_span: Span)
|
||||
_: &ty::BuiltinBounds)
|
||||
-> DIType {
|
||||
// The implementation provided here is a stub. It makes sure that the trait type is
|
||||
// assigned the correct name, size, namespace, and source location. But it does not describe
|
||||
// the trait's methods.
|
||||
let path = ty::item_path(cx.tcx, def_id);
|
||||
let ident = path.last().unwrap().ident();
|
||||
let ident_string = token::get_ident(ident.name);
|
||||
let last = ty::with_path(cx.tcx, def_id, |mut path| path.last().unwrap());
|
||||
let ident_string = token::get_name(last.name());
|
||||
let name = ppaux::trait_store_to_str(cx.tcx, trait_store) +
|
||||
ppaux::mutability_to_str(mutability) +
|
||||
ident_string.get();
|
||||
|
@ -2023,8 +1994,7 @@ fn trait_metadata(cx: &CrateContext,
|
|||
let name = ppaux::parameterized(cx.tcx, name, &substs.regions,
|
||||
substs.tps, def_id, true);
|
||||
|
||||
let (containing_scope, definition_span) =
|
||||
get_namespace_and_span_for_item(cx, def_id, usage_site_span);
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
||||
|
||||
let file_name = span_start(cx, definition_span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, file_name);
|
||||
|
@ -2138,7 +2108,7 @@ fn type_metadata(cx: &CrateContext,
|
|||
subroutine_type_metadata(cx, &closurety.sig, usage_site_span)
|
||||
},
|
||||
ty::ty_trait(def_id, ref substs, trait_store, mutability, ref bounds) => {
|
||||
trait_metadata(cx, def_id, t, substs, trait_store, mutability, bounds, usage_site_span)
|
||||
trait_metadata(cx, def_id, t, substs, trait_store, mutability, bounds)
|
||||
},
|
||||
ty::ty_struct(def_id, ref substs) => {
|
||||
prepare_struct_metadata(cx, t, def_id, substs, usage_site_span).finalize(cx)
|
||||
|
@ -2256,25 +2226,11 @@ fn assert_type_for_node_id(cx: &CrateContext, node_id: ast::NodeId, error_span:
|
|||
}
|
||||
}
|
||||
|
||||
fn get_namespace_and_span_for_item(cx: &CrateContext,
|
||||
def_id: ast::DefId,
|
||||
warning_span: Span)
|
||||
-> (DIScope, Span) {
|
||||
let containing_scope = namespace_for_item(cx, def_id, warning_span).scope;
|
||||
fn get_namespace_and_span_for_item(cx: &CrateContext, def_id: ast::DefId)
|
||||
-> (DIScope, Span) {
|
||||
let containing_scope = namespace_for_item(cx, def_id).scope;
|
||||
let definition_span = if def_id.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
let definition_span = match cx.tcx.items.find(def_id.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => item.span,
|
||||
ref node => {
|
||||
cx.sess.span_warn(warning_span,
|
||||
format!("debuginfo::\
|
||||
get_namespace_and_span_for_item() \
|
||||
- Unexpected node type: {:?}", *node));
|
||||
codemap::DUMMY_SP
|
||||
}
|
||||
};
|
||||
definition_span
|
||||
}
|
||||
cx.tcx.map.span(def_id.node)
|
||||
} else {
|
||||
// For external items there is no span information
|
||||
codemap::DUMMY_SP
|
||||
|
@ -2745,122 +2701,112 @@ fn populate_scope_map(cx: &CrateContext,
|
|||
//=-------------------------------------------------------------------------------------------------
|
||||
|
||||
struct NamespaceTreeNode {
|
||||
ident: ast::Ident,
|
||||
name: ast::Name,
|
||||
scope: DIScope,
|
||||
parent: Option<@NamespaceTreeNode>,
|
||||
}
|
||||
|
||||
impl NamespaceTreeNode {
|
||||
fn mangled_name_of_contained_item(&self, item_name: &str) -> ~str {
|
||||
let mut name = ~"_ZN";
|
||||
fill_nested(self, &mut name);
|
||||
|
||||
name.push_str(format!("{}{}", item_name.len(), item_name));
|
||||
name.push_char('E');
|
||||
|
||||
return name;
|
||||
|
||||
fn fill_nested(node: &NamespaceTreeNode, output: &mut ~str) {
|
||||
match node.parent {
|
||||
Some(parent) => {
|
||||
fill_nested(parent, output);
|
||||
}
|
||||
Some(parent) => fill_nested(parent, output),
|
||||
None => {}
|
||||
}
|
||||
let string = token::get_ident(node.ident.name);
|
||||
output.push_str(format!("{}{}",
|
||||
string.get().len(),
|
||||
string.get()));
|
||||
let string = token::get_name(node.name);
|
||||
output.push_str(format!("{}", string.get().len()));
|
||||
output.push_str(string.get());
|
||||
}
|
||||
|
||||
let mut name = ~"_ZN";
|
||||
fill_nested(self, &mut name);
|
||||
name.push_str(format!("{}", item_name.len()));
|
||||
name.push_str(item_name);
|
||||
name.push_char('E');
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
fn namespace_for_item(cx: &CrateContext,
|
||||
def_id: ast::DefId,
|
||||
warning_span: Span)
|
||||
-> @NamespaceTreeNode {
|
||||
let namespace_path = {
|
||||
let mut item_path = ty::item_path(cx.tcx, def_id);
|
||||
|
||||
if (def_id.krate == ast::LOCAL_CRATE && item_path.len() < 1) ||
|
||||
(def_id.krate != ast::LOCAL_CRATE && item_path.len() < 2) {
|
||||
cx.sess.bug(format!("debuginfo::namespace_for_item() - Item path too short: {}",
|
||||
ast_map::path_to_str(item_path, token::get_ident_interner())));
|
||||
}
|
||||
|
||||
// remove the name of the item
|
||||
item_path.pop();
|
||||
|
||||
if def_id.krate == ast::LOCAL_CRATE {
|
||||
// prepend crate name if not already present
|
||||
fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> @NamespaceTreeNode {
|
||||
ty::with_path(cx.tcx, def_id, |path| {
|
||||
// prepend crate name if not already present
|
||||
let krate = if def_id.krate == ast::LOCAL_CRATE {
|
||||
let crate_namespace_ident = token::str_to_ident(cx.link_meta.crateid.name);
|
||||
item_path.insert(0, ast_map::PathMod(crate_namespace_ident));
|
||||
}
|
||||
|
||||
item_path
|
||||
};
|
||||
|
||||
let mut current_key = vec::with_capacity(namespace_path.len());
|
||||
let mut parent_node: Option<@NamespaceTreeNode> = None;
|
||||
let last_index = namespace_path.len() - 1;
|
||||
|
||||
// Create/Lookup namespace for each element of the path.
|
||||
for (i, &path_element) in namespace_path.iter().enumerate() {
|
||||
let ident = path_element.ident();
|
||||
current_key.push(ident);
|
||||
|
||||
let existing_node = {
|
||||
let namespace_map = debug_context(cx).namespace_map.borrow();
|
||||
namespace_map.get().find_copy(¤t_key)
|
||||
};
|
||||
let current_node = match existing_node {
|
||||
Some(existing_node) => existing_node,
|
||||
None => {
|
||||
// create and insert
|
||||
let parent_scope = match parent_node {
|
||||
Some(node) => node.scope,
|
||||
None => ptr::null()
|
||||
};
|
||||
let namespace_name_string = token::get_ident(ident.name);
|
||||
let namespace_name = namespace_name_string.get();
|
||||
|
||||
let namespace_metadata = unsafe {
|
||||
namespace_name.with_c_str(|namespace_name| {
|
||||
llvm::LLVMDIBuilderCreateNameSpace(
|
||||
DIB(cx),
|
||||
parent_scope,
|
||||
namespace_name,
|
||||
ptr::null(), // cannot reconstruct file ...
|
||||
0) // ... or line information, but that's not so important.
|
||||
})
|
||||
};
|
||||
|
||||
let node = @NamespaceTreeNode {
|
||||
ident: ident,
|
||||
scope: namespace_metadata,
|
||||
parent: parent_node,
|
||||
};
|
||||
|
||||
{
|
||||
let mut namespace_map = debug_context(cx).namespace_map
|
||||
.borrow_mut();
|
||||
namespace_map.get().insert(current_key.clone(), node);
|
||||
}
|
||||
|
||||
node
|
||||
}
|
||||
};
|
||||
|
||||
if i == last_index {
|
||||
return current_node;
|
||||
Some(ast_map::PathMod(crate_namespace_ident.name))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut path = krate.move_iter().chain(path).peekable();
|
||||
|
||||
let mut current_key = ~[];
|
||||
let mut parent_node: Option<@NamespaceTreeNode> = None;
|
||||
|
||||
// Create/Lookup namespace for each element of the path.
|
||||
loop {
|
||||
// Emulate a for loop so we can use peek below.
|
||||
let path_element = match path.next() {
|
||||
Some(e) => e,
|
||||
None => break
|
||||
};
|
||||
// Ignore the name of the item (the last path element).
|
||||
if path.peek().is_none() {
|
||||
break;
|
||||
}
|
||||
|
||||
let name = path_element.name();
|
||||
current_key.push(name);
|
||||
|
||||
let existing_node = {
|
||||
let namespace_map = debug_context(cx).namespace_map.borrow();
|
||||
namespace_map.get().find_copy(¤t_key)
|
||||
};
|
||||
let current_node = match existing_node {
|
||||
Some(existing_node) => existing_node,
|
||||
None => {
|
||||
// create and insert
|
||||
let parent_scope = match parent_node {
|
||||
Some(node) => node.scope,
|
||||
None => ptr::null()
|
||||
};
|
||||
let namespace_name = token::get_name(name);
|
||||
let scope = namespace_name.get().with_c_str(|namespace_name| {
|
||||
unsafe {
|
||||
llvm::LLVMDIBuilderCreateNameSpace(
|
||||
DIB(cx),
|
||||
parent_scope,
|
||||
namespace_name,
|
||||
// cannot reconstruct file ...
|
||||
ptr::null(),
|
||||
// ... or line information, but that's not so important.
|
||||
0)
|
||||
}
|
||||
});
|
||||
|
||||
let node = @NamespaceTreeNode {
|
||||
name: name,
|
||||
scope: scope,
|
||||
parent: parent_node,
|
||||
};
|
||||
|
||||
{
|
||||
let mut namespace_map = debug_context(cx).namespace_map
|
||||
.borrow_mut();
|
||||
namespace_map.get().insert(current_key.clone(), node);
|
||||
}
|
||||
|
||||
node
|
||||
}
|
||||
};
|
||||
|
||||
parent_node = Some(current_node);
|
||||
}
|
||||
}
|
||||
|
||||
// Should be unreachable:
|
||||
let error_message = format!("debuginfo::namespace_for_item() - Code path should be \
|
||||
unreachable. namespace_path was {}",
|
||||
ast_map::path_to_str(namespace_path, token::get_ident_interner()));
|
||||
cx.sess.span_bug(warning_span, error_message);
|
||||
match parent_node {
|
||||
Some(node) => node,
|
||||
None => {
|
||||
cx.sess.bug(format!("debuginfo::namespace_for_item(): \
|
||||
path too short for {:?}", def_id));
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -70,10 +70,11 @@ use middle::trans::type_::Type;
|
|||
|
||||
use std::hashmap::HashMap;
|
||||
use std::vec;
|
||||
use syntax::print::pprust::{expr_to_str};
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::PathMod;
|
||||
use syntax::ast_map;
|
||||
use syntax::codemap;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::{expr_to_str};
|
||||
|
||||
// Destinations
|
||||
|
||||
|
@ -773,10 +774,8 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
|
|||
let expr_ty = expr_ty(bcx, expr);
|
||||
let sigil = ty::ty_closure_sigil(expr_ty);
|
||||
debug!("translating block function {} with type {}",
|
||||
expr_to_str(expr, tcx.sess.intr()),
|
||||
expr_ty.repr(tcx));
|
||||
closure::trans_expr_fn(bcx, sigil, decl, body,
|
||||
expr.id, expr.id, dest)
|
||||
expr_to_str(expr), expr_ty.repr(tcx));
|
||||
closure::trans_expr_fn(bcx, sigil, decl, body, expr.id, dest)
|
||||
}
|
||||
ast::ExprCall(f, ref args, _) => {
|
||||
callee::trans_call(bcx, expr, f,
|
||||
|
@ -1699,31 +1698,32 @@ fn trans_assign_op<'a>(
|
|||
return result_datum.store_to(bcx, dst_datum.val);
|
||||
}
|
||||
|
||||
fn trans_log_level<'a>(bcx: &'a Block<'a>)
|
||||
-> DatumBlock<'a, Expr> {
|
||||
fn trans_log_level<'a>(bcx: &'a Block<'a>) -> DatumBlock<'a, Expr> {
|
||||
let _icx = push_ctxt("trans_log_level");
|
||||
let ccx = bcx.ccx();
|
||||
|
||||
let (modpath, modname) = {
|
||||
let srccrate;
|
||||
{
|
||||
let srccrate = {
|
||||
let external_srcs = ccx.external_srcs.borrow();
|
||||
srccrate = match external_srcs.get().find(&bcx.fcx.id) {
|
||||
match external_srcs.get().find(&bcx.fcx.id) {
|
||||
Some(&src) => {
|
||||
ccx.sess.cstore.get_crate_data(src.krate).name.clone()
|
||||
}
|
||||
None => ccx.link_meta.crateid.name.to_str(),
|
||||
};
|
||||
};
|
||||
let mut modpath = ~[PathMod(ccx.sess.ident_of(srccrate))];
|
||||
for e in bcx.fcx.path.iter() {
|
||||
match *e {
|
||||
PathMod(_) => { modpath.push(*e) }
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let modname = path_str(ccx.sess, modpath);
|
||||
(modpath, modname)
|
||||
};
|
||||
bcx.tcx().map.with_path(bcx.fcx.id, |path| {
|
||||
let first = ast_map::PathMod(token::intern(srccrate));
|
||||
let mut path = Some(first).move_iter().chain(path).filter(|e| {
|
||||
match *e {
|
||||
ast_map::PathMod(_) => true,
|
||||
_ => false
|
||||
}
|
||||
});
|
||||
let modpath: ~[ast_map::PathElem] = path.collect();
|
||||
let modname = ast_map::path_to_str(ast_map::Values(modpath.iter()));
|
||||
(modpath, modname)
|
||||
})
|
||||
};
|
||||
|
||||
let module_data_exists;
|
||||
|
@ -1737,7 +1737,7 @@ fn trans_log_level<'a>(bcx: &'a Block<'a>)
|
|||
module_data.get().get_copy(&modname)
|
||||
} else {
|
||||
let s = link::mangle_internal_name_by_path_and_seq(
|
||||
ccx, modpath, "loglevel");
|
||||
ast_map::Values(modpath.iter()).chain(None), "loglevel");
|
||||
let global;
|
||||
unsafe {
|
||||
global = s.with_c_str(|buf| {
|
||||
|
|
|
@ -27,7 +27,6 @@ use middle::ty::FnSig;
|
|||
use middle::ty;
|
||||
use std::cmp;
|
||||
use std::libc::c_uint;
|
||||
use std::vec;
|
||||
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
|
||||
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
|
||||
use syntax::codemap::Span;
|
||||
|
@ -106,9 +105,7 @@ pub fn llvm_calling_convention(ccx: &CrateContext,
|
|||
}
|
||||
|
||||
|
||||
pub fn register_foreign_item_fn(ccx: @CrateContext,
|
||||
abis: AbiSet,
|
||||
path: &ast_map::Path,
|
||||
pub fn register_foreign_item_fn(ccx: @CrateContext, abis: AbiSet,
|
||||
foreign_item: @ast::ForeignItem) -> ValueRef {
|
||||
/*!
|
||||
* Registers a foreign function found in a library.
|
||||
|
@ -117,21 +114,18 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
|
|||
|
||||
debug!("register_foreign_item_fn(abis={}, \
|
||||
path={}, \
|
||||
foreign_item.id={:?})",
|
||||
foreign_item.id={})",
|
||||
abis.repr(ccx.tcx),
|
||||
path.repr(ccx.tcx),
|
||||
ccx.tcx.map.path_to_str(foreign_item.id),
|
||||
foreign_item.id);
|
||||
|
||||
let cc = match llvm_calling_convention(ccx, abis) {
|
||||
Some(cc) => cc,
|
||||
None => {
|
||||
ccx.sess.span_fatal(foreign_item.span,
|
||||
format!("ABI `{}` has no suitable ABI \
|
||||
for target architecture \
|
||||
in module {}",
|
||||
abis.user_string(ccx.tcx),
|
||||
ast_map::path_to_str(*path,
|
||||
ccx.sess.intr())));
|
||||
format!("ABI `{}` has no suitable calling convention \
|
||||
for target architecture",
|
||||
abis.user_string(ccx.tcx)));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -160,7 +154,7 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
|
|||
};
|
||||
add_argument_attributes(&tys, llfn);
|
||||
|
||||
return llfn;
|
||||
llfn
|
||||
}
|
||||
|
||||
pub fn trans_native_call<'a>(
|
||||
|
@ -353,28 +347,17 @@ pub fn trans_native_call<'a>(
|
|||
return bcx;
|
||||
}
|
||||
|
||||
pub fn trans_foreign_mod(ccx: @CrateContext,
|
||||
foreign_mod: &ast::ForeignMod) {
|
||||
pub fn trans_foreign_mod(ccx: @CrateContext, foreign_mod: &ast::ForeignMod) {
|
||||
let _icx = push_ctxt("foreign::trans_foreign_mod");
|
||||
for &foreign_item in foreign_mod.items.iter() {
|
||||
match foreign_item.node {
|
||||
ast::ForeignItemFn(..) => {
|
||||
let (abis, mut path) =
|
||||
match ccx.tcx.items.get(foreign_item.id) {
|
||||
ast_map::NodeForeignItem(_, abis, _, path) => {
|
||||
(abis, (*path).clone())
|
||||
}
|
||||
_ => {
|
||||
fail!("unable to find foreign item in tcx.items \
|
||||
table.")
|
||||
}
|
||||
};
|
||||
let abis = foreign_mod.abis;
|
||||
if !(abis.is_rust() || abis.is_intrinsic()) {
|
||||
path.push(ast_map::PathName(foreign_item.ident));
|
||||
register_foreign_item_fn(ccx, abis, &path, foreign_item);
|
||||
register_foreign_item_fn(ccx, abis, foreign_item);
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let lname = link_name(foreign_item);
|
||||
|
@ -433,7 +416,6 @@ pub fn register_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
|||
}
|
||||
|
||||
pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
||||
path: &ast_map::Path,
|
||||
decl: &ast::FnDecl,
|
||||
body: &ast::Block,
|
||||
attrs: &[ast::Attribute],
|
||||
|
@ -444,14 +426,13 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
|||
|
||||
unsafe { // unsafe because we call LLVM operations
|
||||
// Build up the Rust function (`foo0` above).
|
||||
let llrustfn = build_rust_fn(ccx, path, decl, body, attrs, id);
|
||||
let llrustfn = build_rust_fn(ccx, decl, body, attrs, id);
|
||||
|
||||
// Build up the foreign wrapper (`foo` above).
|
||||
return build_wrap_fn(ccx, llrustfn, llwrapfn, &tys);
|
||||
}
|
||||
|
||||
fn build_rust_fn(ccx: @CrateContext,
|
||||
path: &ast_map::Path,
|
||||
decl: &ast::FnDecl,
|
||||
body: &ast::Block,
|
||||
attrs: &[ast::Attribute],
|
||||
|
@ -460,10 +441,11 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
|||
let _icx = push_ctxt("foreign::foreign::build_rust_fn");
|
||||
let tcx = ccx.tcx;
|
||||
let t = ty::node_id_to_type(tcx, id);
|
||||
let ps = link::mangle_internal_name_by_path(
|
||||
ccx, vec::append_one((*path).clone(), ast_map::PathName(
|
||||
special_idents::clownshoe_abi
|
||||
)));
|
||||
|
||||
let ps = ccx.tcx.map.with_path(id, |path| {
|
||||
let abi = Some(ast_map::PathName(special_idents::clownshoe_abi.name));
|
||||
link::mangle(path.chain(abi.move_iter()), None, None)
|
||||
});
|
||||
|
||||
// Compute the type that the function would have if it were just a
|
||||
// normal Rust function. This will be the type of the wrappee fn.
|
||||
|
@ -475,19 +457,18 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
|||
_ => {
|
||||
ccx.sess.bug(format!("build_rust_fn: extern fn {} has ty {}, \
|
||||
expected a bare fn ty",
|
||||
path.repr(tcx),
|
||||
ccx.tcx.map.path_to_str(id),
|
||||
t.repr(tcx)));
|
||||
}
|
||||
};
|
||||
|
||||
debug!("build_rust_fn: path={} id={:?} t={}",
|
||||
path.repr(tcx),
|
||||
id,
|
||||
t.repr(tcx));
|
||||
debug!("build_rust_fn: path={} id={} t={}",
|
||||
ccx.tcx.map.path_to_str(id),
|
||||
id, t.repr(tcx));
|
||||
|
||||
let llfn = base::decl_internal_rust_fn(ccx, false, f.sig.inputs, f.sig.output, ps);
|
||||
base::set_llvm_fn_attrs(attrs, llfn);
|
||||
base::trans_fn(ccx, (*path).clone(), decl, body, llfn, None, id, []);
|
||||
base::trans_fn(ccx, decl, body, llfn, None, id, []);
|
||||
llfn
|
||||
}
|
||||
|
||||
|
@ -733,7 +714,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
|||
|
||||
pub fn link_name(i: @ast::ForeignItem) -> InternedString {
|
||||
match attr::first_attr_value_str_by_name(i.attrs, "link_name") {
|
||||
None => token::get_ident(i.ident.name),
|
||||
None => token::get_ident(i.ident),
|
||||
Some(ln) => ln.clone(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -456,8 +456,7 @@ fn make_generic_glue(ccx: @CrateContext,
|
|||
let _s = StatRecorder::new(ccx, glue_name);
|
||||
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx, ~[], llfn, -1, false, ty::mk_nil(), None, None,
|
||||
&arena);
|
||||
let fcx = new_fn_ctxt(ccx, llfn, -1, false, ty::mk_nil(), None, None, &arena);
|
||||
|
||||
init_function(&fcx, false, ty::mk_nil(), None);
|
||||
|
||||
|
|
|
@ -15,9 +15,7 @@ use middle::trans::base::{push_ctxt, trans_item, get_item_val, trans_fn};
|
|||
use middle::trans::common::*;
|
||||
use middle::ty;
|
||||
|
||||
use std::vec;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::PathName;
|
||||
use syntax::ast_util::local_def;
|
||||
use syntax::attr;
|
||||
|
||||
|
@ -45,9 +43,7 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId)
|
|||
let csearch_result =
|
||||
csearch::maybe_get_item_ast(
|
||||
ccx.tcx, fn_id,
|
||||
|a,b,c,d| {
|
||||
astencode::decode_inlined_item(a, b, ccx.maps, c.clone(), d)
|
||||
});
|
||||
|a,b,c,d| astencode::decode_inlined_item(a, b, ccx.maps, c, d));
|
||||
return match csearch_result {
|
||||
csearch::not_found => {
|
||||
let mut external = ccx.external.borrow_mut();
|
||||
|
@ -157,9 +153,7 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId)
|
|||
|
||||
if num_type_params == 0 {
|
||||
let llfn = get_item_val(ccx, mth.id);
|
||||
let path = vec::append_one(
|
||||
ty::item_path(ccx.tcx, impl_did), PathName(mth.ident));
|
||||
trans_fn(ccx, path, mth.decl, mth.body, llfn, None, mth.id, []);
|
||||
trans_fn(ccx, mth.decl, mth.body, llfn, None, mth.id, []);
|
||||
}
|
||||
local_def(mth.id)
|
||||
}
|
||||
|
|
|
@ -18,78 +18,76 @@ use middle::trans::base::*;
|
|||
use middle::trans::build::*;
|
||||
use middle::trans::common::*;
|
||||
use middle::trans::datum::*;
|
||||
use middle::trans::glue;
|
||||
use middle::trans::type_of::*;
|
||||
use middle::trans::type_of;
|
||||
use middle::trans::machine;
|
||||
use middle::trans::glue;
|
||||
use middle::trans::machine::llsize_of;
|
||||
use middle::trans::type_::Type;
|
||||
use middle::ty;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::parse::token;
|
||||
use util::ppaux::ty_to_str;
|
||||
use middle::trans::machine::llsize_of;
|
||||
use middle::trans::type_::Type;
|
||||
|
||||
pub fn get_simple_intrinsic(ccx: @CrateContext, item: &ast::ForeignItem) -> Option<ValueRef> {
|
||||
let nm = ccx.sess.str_of(item.ident);
|
||||
let name = nm.as_slice();
|
||||
|
||||
match name {
|
||||
"sqrtf32" => Some(ccx.intrinsics.get_copy(&("llvm.sqrt.f32"))),
|
||||
"sqrtf64" => Some(ccx.intrinsics.get_copy(&("llvm.sqrt.f64"))),
|
||||
"powif32" => Some(ccx.intrinsics.get_copy(&("llvm.powi.f32"))),
|
||||
"powif64" => Some(ccx.intrinsics.get_copy(&("llvm.powi.f64"))),
|
||||
"sinf32" => Some(ccx.intrinsics.get_copy(&("llvm.sin.f32"))),
|
||||
"sinf64" => Some(ccx.intrinsics.get_copy(&("llvm.sin.f64"))),
|
||||
"cosf32" => Some(ccx.intrinsics.get_copy(&("llvm.cos.f32"))),
|
||||
"cosf64" => Some(ccx.intrinsics.get_copy(&("llvm.cos.f64"))),
|
||||
"powf32" => Some(ccx.intrinsics.get_copy(&("llvm.pow.f32"))),
|
||||
"powf64" => Some(ccx.intrinsics.get_copy(&("llvm.pow.f64"))),
|
||||
"expf32" => Some(ccx.intrinsics.get_copy(&("llvm.exp.f32"))),
|
||||
"expf64" => Some(ccx.intrinsics.get_copy(&("llvm.exp.f64"))),
|
||||
"exp2f32" => Some(ccx.intrinsics.get_copy(&("llvm.exp2.f32"))),
|
||||
"exp2f64" => Some(ccx.intrinsics.get_copy(&("llvm.exp2.f64"))),
|
||||
"logf32" => Some(ccx.intrinsics.get_copy(&("llvm.log.f32"))),
|
||||
"logf64" => Some(ccx.intrinsics.get_copy(&("llvm.log.f64"))),
|
||||
"log10f32" => Some(ccx.intrinsics.get_copy(&("llvm.log10.f32"))),
|
||||
"log10f64" => Some(ccx.intrinsics.get_copy(&("llvm.log10.f64"))),
|
||||
"log2f32" => Some(ccx.intrinsics.get_copy(&("llvm.log2.f32"))),
|
||||
"log2f64" => Some(ccx.intrinsics.get_copy(&("llvm.log2.f64"))),
|
||||
"fmaf32" => Some(ccx.intrinsics.get_copy(&("llvm.fma.f32"))),
|
||||
"fmaf64" => Some(ccx.intrinsics.get_copy(&("llvm.fma.f64"))),
|
||||
"fabsf32" => Some(ccx.intrinsics.get_copy(&("llvm.fabs.f32"))),
|
||||
"fabsf64" => Some(ccx.intrinsics.get_copy(&("llvm.fabs.f64"))),
|
||||
"copysignf32" => Some(ccx.intrinsics.get_copy(&("llvm.copysign.f32"))),
|
||||
"copysignf64" => Some(ccx.intrinsics.get_copy(&("llvm.copysign.f64"))),
|
||||
"floorf32" => Some(ccx.intrinsics.get_copy(&("llvm.floor.f32"))),
|
||||
"floorf64" => Some(ccx.intrinsics.get_copy(&("llvm.floor.f64"))),
|
||||
"ceilf32" => Some(ccx.intrinsics.get_copy(&("llvm.ceil.f32"))),
|
||||
"ceilf64" => Some(ccx.intrinsics.get_copy(&("llvm.ceil.f64"))),
|
||||
"truncf32" => Some(ccx.intrinsics.get_copy(&("llvm.trunc.f32"))),
|
||||
"truncf64" => Some(ccx.intrinsics.get_copy(&("llvm.trunc.f64"))),
|
||||
"rintf32" => Some(ccx.intrinsics.get_copy(&("llvm.rint.f32"))),
|
||||
"rintf64" => Some(ccx.intrinsics.get_copy(&("llvm.rint.f64"))),
|
||||
"nearbyintf32" => Some(ccx.intrinsics.get_copy(&("llvm.nearbyint.f32"))),
|
||||
"nearbyintf64" => Some(ccx.intrinsics.get_copy(&("llvm.nearbyint.f64"))),
|
||||
"roundf32" => Some(ccx.intrinsics.get_copy(&("llvm.round.f32"))),
|
||||
"roundf64" => Some(ccx.intrinsics.get_copy(&("llvm.round.f64"))),
|
||||
"ctpop8" => Some(ccx.intrinsics.get_copy(&("llvm.ctpop.i8"))),
|
||||
"ctpop16" => Some(ccx.intrinsics.get_copy(&("llvm.ctpop.i16"))),
|
||||
"ctpop32" => Some(ccx.intrinsics.get_copy(&("llvm.ctpop.i32"))),
|
||||
"ctpop64" => Some(ccx.intrinsics.get_copy(&("llvm.ctpop.i64"))),
|
||||
"bswap16" => Some(ccx.intrinsics.get_copy(&("llvm.bswap.i16"))),
|
||||
"bswap32" => Some(ccx.intrinsics.get_copy(&("llvm.bswap.i32"))),
|
||||
"bswap64" => Some(ccx.intrinsics.get_copy(&("llvm.bswap.i64"))),
|
||||
_ => None
|
||||
}
|
||||
let name = match token::get_ident(item.ident).get() {
|
||||
"sqrtf32" => "llvm.sqrt.f32",
|
||||
"sqrtf64" => "llvm.sqrt.f64",
|
||||
"powif32" => "llvm.powi.f32",
|
||||
"powif64" => "llvm.powi.f64",
|
||||
"sinf32" => "llvm.sin.f32",
|
||||
"sinf64" => "llvm.sin.f64",
|
||||
"cosf32" => "llvm.cos.f32",
|
||||
"cosf64" => "llvm.cos.f64",
|
||||
"powf32" => "llvm.pow.f32",
|
||||
"powf64" => "llvm.pow.f64",
|
||||
"expf32" => "llvm.exp.f32",
|
||||
"expf64" => "llvm.exp.f64",
|
||||
"exp2f32" => "llvm.exp2.f32",
|
||||
"exp2f64" => "llvm.exp2.f64",
|
||||
"logf32" => "llvm.log.f32",
|
||||
"logf64" => "llvm.log.f64",
|
||||
"log10f32" => "llvm.log10.f32",
|
||||
"log10f64" => "llvm.log10.f64",
|
||||
"log2f32" => "llvm.log2.f32",
|
||||
"log2f64" => "llvm.log2.f64",
|
||||
"fmaf32" => "llvm.fma.f32",
|
||||
"fmaf64" => "llvm.fma.f64",
|
||||
"fabsf32" => "llvm.fabs.f32",
|
||||
"fabsf64" => "llvm.fabs.f64",
|
||||
"copysignf32" => "llvm.copysign.f32",
|
||||
"copysignf64" => "llvm.copysign.f64",
|
||||
"floorf32" => "llvm.floor.f32",
|
||||
"floorf64" => "llvm.floor.f64",
|
||||
"ceilf32" => "llvm.ceil.f32",
|
||||
"ceilf64" => "llvm.ceil.f64",
|
||||
"truncf32" => "llvm.trunc.f32",
|
||||
"truncf64" => "llvm.trunc.f64",
|
||||
"rintf32" => "llvm.rint.f32",
|
||||
"rintf64" => "llvm.rint.f64",
|
||||
"nearbyintf32" => "llvm.nearbyint.f32",
|
||||
"nearbyintf64" => "llvm.nearbyint.f64",
|
||||
"roundf32" => "llvm.round.f32",
|
||||
"roundf64" => "llvm.round.f64",
|
||||
"ctpop8" => "llvm.ctpop.i8",
|
||||
"ctpop16" => "llvm.ctpop.i16",
|
||||
"ctpop32" => "llvm.ctpop.i32",
|
||||
"ctpop64" => "llvm.ctpop.i64",
|
||||
"bswap16" => "llvm.bswap.i16",
|
||||
"bswap32" => "llvm.bswap.i32",
|
||||
"bswap64" => "llvm.bswap.i64",
|
||||
_ => return None
|
||||
};
|
||||
Some(ccx.intrinsics.get_copy(&name))
|
||||
}
|
||||
|
||||
pub fn trans_intrinsic(ccx: @CrateContext,
|
||||
decl: ValueRef,
|
||||
item: &ast::ForeignItem,
|
||||
path: ast_map::Path,
|
||||
substs: @param_substs,
|
||||
ref_id: Option<ast::NodeId>) {
|
||||
debug!("trans_intrinsic(item.ident={})", ccx.sess.str_of(item.ident));
|
||||
debug!("trans_intrinsic(item.ident={})", token::get_ident(item.ident));
|
||||
|
||||
fn with_overflow_instrinsic(bcx: &Block, name: &'static str, t: ty::t) {
|
||||
let first_real_arg = bcx.fcx.arg_pos(0u);
|
||||
|
@ -195,15 +193,8 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id));
|
||||
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx,
|
||||
path,
|
||||
decl,
|
||||
item.id,
|
||||
false,
|
||||
output_type,
|
||||
Some(substs),
|
||||
Some(item.span),
|
||||
&arena);
|
||||
let fcx = new_fn_ctxt(ccx, decl, item.id, false, output_type,
|
||||
Some(substs), Some(item.span), &arena);
|
||||
init_function(&fcx, true, output_type, Some(substs));
|
||||
|
||||
set_always_inline(fcx.llfn);
|
||||
|
@ -211,13 +202,12 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
let mut bcx = fcx.entry_bcx.get().unwrap();
|
||||
let first_real_arg = fcx.arg_pos(0u);
|
||||
|
||||
let nm = ccx.sess.str_of(item.ident);
|
||||
let name = nm.as_slice();
|
||||
let name = token::get_ident(item.ident);
|
||||
|
||||
// This requires that atomic intrinsics follow a specific naming pattern:
|
||||
// "atomic_<operation>[_<ordering>], and no ordering means SeqCst
|
||||
if name.starts_with("atomic_") {
|
||||
let split : ~[&str] = name.split('_').collect();
|
||||
if name.get().starts_with("atomic_") {
|
||||
let split: ~[&str] = name.get().split('_').collect();
|
||||
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
|
||||
let order = if split.len() == 2 {
|
||||
lib::llvm::SequentiallyConsistent
|
||||
|
@ -282,7 +272,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
return;
|
||||
}
|
||||
|
||||
match name {
|
||||
match name.get() {
|
||||
"abort" => {
|
||||
let llfn = bcx.ccx().intrinsics.get_copy(&("llvm.trap"));
|
||||
Call(bcx, llfn, [], []);
|
||||
|
@ -382,11 +372,9 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
let in_type_size = machine::llbitsize_of_real(ccx, llintype);
|
||||
let out_type_size = machine::llbitsize_of_real(ccx, llouttype);
|
||||
if in_type_size != out_type_size {
|
||||
let sp = {
|
||||
match ccx.tcx.items.get(ref_id.unwrap()) {
|
||||
ast_map::NodeExpr(e) => e.span,
|
||||
_ => fail!("transmute has non-expr arg"),
|
||||
}
|
||||
let sp = match ccx.tcx.map.get(ref_id.unwrap()) {
|
||||
ast_map::NodeExpr(e) => e.span,
|
||||
_ => fail!("transmute has non-expr arg"),
|
||||
};
|
||||
let pluralize = |n| if 1 == n { "" } else { "s" };
|
||||
ccx.sess.span_fatal(sp,
|
||||
|
|
|
@ -35,9 +35,8 @@ use middle::trans::type_::Type;
|
|||
|
||||
use std::c_str::ToCStr;
|
||||
use std::vec;
|
||||
use syntax::ast_map::{Path, PathMod, PathName, PathPrettyName};
|
||||
use syntax::parse::token;
|
||||
use syntax::{ast, ast_map, ast_util, visit};
|
||||
use syntax::{ast, ast_map, visit};
|
||||
|
||||
/**
|
||||
The main "translation" pass for methods. Generates code
|
||||
|
@ -46,7 +45,6 @@ be generated once they are invoked with specific type parameters,
|
|||
see `trans::base::lval_static_fn()` or `trans::base::monomorphic_fn()`.
|
||||
*/
|
||||
pub fn trans_impl(ccx: @CrateContext,
|
||||
path: Path,
|
||||
name: ast::Ident,
|
||||
methods: &[@ast::Method],
|
||||
generics: &ast::Generics,
|
||||
|
@ -54,8 +52,7 @@ pub fn trans_impl(ccx: @CrateContext,
|
|||
let _icx = push_ctxt("meth::trans_impl");
|
||||
let tcx = ccx.tcx;
|
||||
|
||||
debug!("trans_impl(path={}, name={}, id={:?})",
|
||||
path.repr(tcx), name.repr(tcx), id);
|
||||
debug!("trans_impl(name={}, id={:?})", name.repr(tcx), id);
|
||||
|
||||
// Both here and below with generic methods, be sure to recurse and look for
|
||||
// items that we need to translate.
|
||||
|
@ -66,14 +63,10 @@ pub fn trans_impl(ccx: @CrateContext,
|
|||
}
|
||||
return;
|
||||
}
|
||||
let sub_path = vec::append_one(path, PathName(name));
|
||||
for method in methods.iter() {
|
||||
if method.generics.ty_params.len() == 0u {
|
||||
let llfn = get_item_val(ccx, method.id);
|
||||
let path = vec::append_one(sub_path.clone(),
|
||||
PathName(method.ident));
|
||||
|
||||
trans_fn(ccx, path, method.decl, method.body,
|
||||
trans_fn(ccx, method.decl, method.body,
|
||||
llfn, None, method.id, []);
|
||||
} else {
|
||||
let mut v = TransItemVisitor{ ccx: ccx };
|
||||
|
@ -85,17 +78,15 @@ pub fn trans_impl(ccx: @CrateContext,
|
|||
/// Translates a (possibly monomorphized) method body.
|
||||
///
|
||||
/// Parameters:
|
||||
/// * `path`: the path to the method
|
||||
/// * `method`: the AST node for the method
|
||||
/// * `param_substs`: if this is a generic method, the current values for
|
||||
/// type parameters and so forth, else None
|
||||
/// * `llfn`: the LLVM ValueRef for the method
|
||||
///
|
||||
/// FIXME(pcwalton) Can we take `path` by reference?
|
||||
pub fn trans_method(ccx: @CrateContext, path: Path, method: &ast::Method,
|
||||
pub fn trans_method(ccx: @CrateContext, method: &ast::Method,
|
||||
param_substs: Option<@param_substs>,
|
||||
llfn: ValueRef) -> ValueRef {
|
||||
trans_fn(ccx, path, method.decl, method.body,
|
||||
trans_fn(ccx, method.decl, method.body,
|
||||
llfn, param_substs, method.id, []);
|
||||
llfn
|
||||
}
|
||||
|
@ -185,23 +176,21 @@ pub fn trans_static_method_callee(bcx: &Block,
|
|||
generics.type_param_defs().len();
|
||||
|
||||
let mname = if method_id.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match bcx.tcx().items.get(method_id.node) {
|
||||
ast_map::NodeTraitMethod(trait_method, _, _) => {
|
||||
ast_util::trait_method_to_ty_method(trait_method).ident
|
||||
}
|
||||
_ => fail!("callee is not a trait method")
|
||||
match bcx.tcx().map.get(method_id.node) {
|
||||
ast_map::NodeTraitMethod(method) => {
|
||||
let ident = match *method {
|
||||
ast::Required(ref m) => m.ident,
|
||||
ast::Provided(ref m) => m.ident
|
||||
};
|
||||
ident.name
|
||||
}
|
||||
_ => fail!("callee is not a trait method")
|
||||
}
|
||||
} else {
|
||||
let path = csearch::get_item_path(bcx.tcx(), method_id);
|
||||
match path[path.len()-1] {
|
||||
PathPrettyName(s, _) | PathName(s) => { s }
|
||||
PathMod(_) => { fail!("path doesn't have a name?") }
|
||||
}
|
||||
csearch::get_item_path(bcx.tcx(), method_id).last().unwrap().name()
|
||||
};
|
||||
debug!("trans_static_method_callee: method_id={:?}, callee_id={:?}, \
|
||||
name={}", method_id, callee_id, ccx.sess.str_of(mname));
|
||||
name={}", method_id, callee_id, token::get_name(mname));
|
||||
|
||||
let vtbls = {
|
||||
let vtable_map = ccx.maps.vtable_map.borrow();
|
||||
|
@ -213,7 +202,7 @@ pub fn trans_static_method_callee(bcx: &Block,
|
|||
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
|
||||
assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t)));
|
||||
|
||||
let mth_id = method_with_name(ccx, impl_did, mname.name);
|
||||
let mth_id = method_with_name(ccx, impl_did, mname);
|
||||
let (callee_substs, callee_origins) =
|
||||
combine_impl_and_methods_tps(
|
||||
bcx, mth_id, callee_id,
|
||||
|
@ -542,7 +531,7 @@ fn emit_vtable_methods(bcx: &Block,
|
|||
if m.generics.has_type_params() ||
|
||||
ty::type_has_self(ty::mk_bare_fn(tcx, m.fty.clone())) {
|
||||
debug!("(making impl vtable) method has self or type params: {}",
|
||||
tcx.sess.str_of(ident));
|
||||
token::get_ident(ident));
|
||||
C_null(Type::nil().ptr_to())
|
||||
} else {
|
||||
trans_fn_ref_with_vtables(bcx, m_id, 0, substs, Some(vtables))
|
||||
|
|
|
@ -94,60 +94,34 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
// calling a static provided method. This is sort of unfortunate.
|
||||
let mut is_static_provided = None;
|
||||
|
||||
let map_node = {
|
||||
session::expect(
|
||||
ccx.sess,
|
||||
ccx.tcx.items.find(fn_id.node),
|
||||
|| format!("while monomorphizing {:?}, couldn't find it in the \
|
||||
item map (may have attempted to monomorphize an item \
|
||||
defined in a different crate?)", fn_id))
|
||||
};
|
||||
let map_node = session::expect(
|
||||
ccx.sess,
|
||||
ccx.tcx.map.find(fn_id.node),
|
||||
|| format!("while monomorphizing {:?}, couldn't find it in the \
|
||||
item map (may have attempted to monomorphize an item \
|
||||
defined in a different crate?)", fn_id));
|
||||
|
||||
// Get the path so that we can create a symbol
|
||||
let (pt, name, span) = match map_node {
|
||||
ast_map::NodeItem(i, pt) => (pt, i.ident, i.span),
|
||||
ast_map::NodeVariant(ref v, enm, pt) => (pt, (*v).node.name, enm.span),
|
||||
ast_map::NodeMethod(m, _, pt) => (pt, m.ident, m.span),
|
||||
ast_map::NodeForeignItem(i, abis, _, pt) if abis.is_intrinsic()
|
||||
=> (pt, i.ident, i.span),
|
||||
ast_map::NodeForeignItem(..) => {
|
||||
// Foreign externs don't have to be monomorphized.
|
||||
return (get_item_val(ccx, fn_id.node), true);
|
||||
}
|
||||
ast_map::NodeTraitMethod(method, _, pt) => {
|
||||
match *method {
|
||||
ast::Provided(m) => {
|
||||
// If this is a static provided method, indicate that
|
||||
// and stash the number of params on the method.
|
||||
if m.explicit_self.node == ast::SelfStatic {
|
||||
is_static_provided = Some(m.generics.ty_params.len());
|
||||
match map_node {
|
||||
ast_map::NodeForeignItem(_) => {
|
||||
if !ccx.tcx.map.get_foreign_abis(fn_id.node).is_intrinsic() {
|
||||
// Foreign externs don't have to be monomorphized.
|
||||
return (get_item_val(ccx, fn_id.node), true);
|
||||
}
|
||||
}
|
||||
ast_map::NodeTraitMethod(method) => {
|
||||
match *method {
|
||||
ast::Provided(m) => {
|
||||
// If this is a static provided method, indicate that
|
||||
// and stash the number of params on the method.
|
||||
if m.explicit_self.node == ast::SelfStatic {
|
||||
is_static_provided = Some(m.generics.ty_params.len());
|
||||
}
|
||||
}
|
||||
|
||||
(pt, m.ident, m.span)
|
||||
}
|
||||
ast::Required(_) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize a required trait method")
|
||||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeExpr(..) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize an expr")
|
||||
}
|
||||
ast_map::NodeStmt(..) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize a stmt")
|
||||
}
|
||||
ast_map::NodeArg(..) => ccx.tcx.sess.bug("Can't monomorphize an arg"),
|
||||
ast_map::NodeBlock(..) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize a block")
|
||||
}
|
||||
ast_map::NodeLocal(..) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize a local")
|
||||
}
|
||||
ast_map::NodeCalleeScope(..) => {
|
||||
ccx.tcx.sess.bug("Can't monomorphize a callee-scope")
|
||||
}
|
||||
ast_map::NodeStructCtor(_, i, pt) => (pt, i.ident, i.span)
|
||||
};
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
debug!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx));
|
||||
let mono_ty = match is_static_provided {
|
||||
|
@ -202,15 +176,15 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
// to be causing an infinite expansion.
|
||||
if depth > 30 {
|
||||
ccx.sess.span_fatal(
|
||||
span, "overly deep expansion of inlined function");
|
||||
ccx.tcx.map.span(fn_id.node),
|
||||
"overly deep expansion of inlined function");
|
||||
}
|
||||
monomorphizing.get().insert(fn_id, depth + 1);
|
||||
}
|
||||
|
||||
let (_, elt) = gensym_name(ccx.sess.str_of(name));
|
||||
let mut pt = (*pt).clone();
|
||||
pt.push(elt);
|
||||
let s = mangle_exported_name(ccx, pt.clone(), mono_ty);
|
||||
let s = ccx.tcx.map.with_path(fn_id.node, |path| {
|
||||
mangle_exported_name(ccx, path, mono_ty, fn_id.node)
|
||||
});
|
||||
debug!("monomorphize_fn mangled to {}", s);
|
||||
|
||||
let mk_lldecl = || {
|
||||
|
@ -223,7 +197,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
};
|
||||
|
||||
let lldecl = match map_node {
|
||||
ast_map::NodeItem(i, _) => {
|
||||
ast_map::NodeItem(i) => {
|
||||
match *i {
|
||||
ast::Item {
|
||||
node: ast::ItemFn(decl, _, _, _, body),
|
||||
|
@ -231,7 +205,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
} => {
|
||||
let d = mk_lldecl();
|
||||
set_llvm_fn_attrs(i.attrs, d);
|
||||
trans_fn(ccx, pt, decl, body, d, Some(psubsts), fn_id.node, []);
|
||||
trans_fn(ccx, decl, body, d, Some(psubsts), fn_id.node, []);
|
||||
d
|
||||
}
|
||||
_ => {
|
||||
|
@ -239,26 +213,27 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeForeignItem(i, _, _, _) => {
|
||||
ast_map::NodeForeignItem(i) => {
|
||||
let simple = intrinsic::get_simple_intrinsic(ccx, i);
|
||||
match simple {
|
||||
Some(decl) => decl,
|
||||
None => {
|
||||
let d = mk_lldecl();
|
||||
intrinsic::trans_intrinsic(ccx, d, i, pt, psubsts, ref_id);
|
||||
intrinsic::trans_intrinsic(ccx, d, i, psubsts, ref_id);
|
||||
d
|
||||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeVariant(v, enum_item, _) => {
|
||||
let tvs = ty::enum_variants(ccx.tcx, local_def(enum_item.id));
|
||||
ast_map::NodeVariant(v) => {
|
||||
let parent = ccx.tcx.map.get_parent(fn_id.node);
|
||||
let tvs = ty::enum_variants(ccx.tcx, local_def(parent));
|
||||
let this_tv = *tvs.iter().find(|tv| { tv.id.node == fn_id.node}).unwrap();
|
||||
let d = mk_lldecl();
|
||||
set_inline_hint(d);
|
||||
match v.node.kind {
|
||||
ast::TupleVariantKind(ref args) => {
|
||||
trans_enum_variant(ccx,
|
||||
enum_item.id,
|
||||
parent,
|
||||
v,
|
||||
(*args).clone(),
|
||||
this_tv.disr_val,
|
||||
|
@ -270,19 +245,18 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
}
|
||||
d
|
||||
}
|
||||
ast_map::NodeMethod(mth, _, _) => {
|
||||
ast_map::NodeMethod(mth) => {
|
||||
let d = mk_lldecl();
|
||||
set_llvm_fn_attrs(mth.attrs, d);
|
||||
trans_fn(ccx, pt, mth.decl, mth.body, d, Some(psubsts), mth.id, []);
|
||||
trans_fn(ccx, mth.decl, mth.body, d, Some(psubsts), mth.id, []);
|
||||
d
|
||||
}
|
||||
ast_map::NodeTraitMethod(method, _, pt) => {
|
||||
ast_map::NodeTraitMethod(method) => {
|
||||
match *method {
|
||||
ast::Provided(mth) => {
|
||||
let d = mk_lldecl();
|
||||
set_llvm_fn_attrs(mth.attrs, d);
|
||||
trans_fn(ccx, (*pt).clone(), mth.decl, mth.body,
|
||||
d, Some(psubsts), mth.id, []);
|
||||
trans_fn(ccx, mth.decl, mth.body, d, Some(psubsts), mth.id, []);
|
||||
d
|
||||
}
|
||||
_ => {
|
||||
|
@ -291,7 +265,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
}
|
||||
}
|
||||
}
|
||||
ast_map::NodeStructCtor(struct_def, _, _) => {
|
||||
ast_map::NodeStructCtor(struct_def) => {
|
||||
let d = mk_lldecl();
|
||||
set_inline_hint(d);
|
||||
base::trans_tuple_struct(ccx,
|
||||
|
|
|
@ -31,7 +31,7 @@ use std::option::{Some,None};
|
|||
use std::vec;
|
||||
use syntax::ast::DefId;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map::PathName;
|
||||
use syntax::ast_map;
|
||||
use syntax::parse::token::{InternedString, special_idents};
|
||||
use syntax::parse::token;
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl<'a> Reflector<'a> {
|
|||
let fcx = self.bcx.fcx;
|
||||
let tcx = self.bcx.tcx();
|
||||
let mth_idx = ty::method_idx(
|
||||
tcx.sess.ident_of(~"visit_" + ty_name),
|
||||
token::str_to_ident(~"visit_" + ty_name),
|
||||
*self.visitor_methods).expect(format!("couldn't find visit method \
|
||||
for {}", ty_name));
|
||||
let mth_ty =
|
||||
|
@ -269,7 +269,7 @@ impl<'a> Reflector<'a> {
|
|||
for (i, field) in fields.iter().enumerate() {
|
||||
let extra = ~[
|
||||
this.c_uint(i),
|
||||
this.c_slice(token::get_ident(field.ident.name)),
|
||||
this.c_slice(token::get_ident(field.ident)),
|
||||
this.c_bool(named_fields)
|
||||
] + this.c_mt(&field.mt);
|
||||
this.visit("class_field", extra);
|
||||
|
@ -291,22 +291,13 @@ impl<'a> Reflector<'a> {
|
|||
mutbl: ast::MutImmutable });
|
||||
|
||||
let make_get_disr = || {
|
||||
let sub_path = bcx.fcx.path + &[PathName(special_idents::anon)];
|
||||
let sym = mangle_internal_name_by_path_and_seq(ccx,
|
||||
sub_path,
|
||||
"get_disr");
|
||||
let sym = mangle_internal_name_by_path_and_seq(
|
||||
ast_map::Values([].iter()).chain(None), "get_disr");
|
||||
|
||||
let llfdecl = decl_internal_rust_fn(ccx, false, [opaqueptrty], ty::mk_u64(), sym);
|
||||
let arena = TypedArena::new();
|
||||
let fcx = new_fn_ctxt(ccx,
|
||||
~[],
|
||||
llfdecl,
|
||||
-1, // id
|
||||
false,
|
||||
ty::mk_u64(),
|
||||
None,
|
||||
None,
|
||||
&arena);
|
||||
let fcx = new_fn_ctxt(ccx, llfdecl, -1, false,
|
||||
ty::mk_u64(), None, None, &arena);
|
||||
init_function(&fcx, false, ty::mk_u64(), None);
|
||||
|
||||
let arg = unsafe {
|
||||
|
@ -333,7 +324,7 @@ impl<'a> Reflector<'a> {
|
|||
+ self.c_size_and_align(t);
|
||||
self.bracketed("enum", enum_args, |this| {
|
||||
for (i, v) in variants.iter().enumerate() {
|
||||
let name = token::get_ident(v.name.name);
|
||||
let name = token::get_ident(v.name);
|
||||
let variant_args = ~[this.c_uint(i),
|
||||
C_u64(v.disr_val),
|
||||
this.c_uint(v.args.len()),
|
||||
|
|
|
@ -284,9 +284,7 @@ pub struct ctxt_ {
|
|||
trait_refs: RefCell<HashMap<NodeId, @TraitRef>>,
|
||||
trait_defs: RefCell<HashMap<DefId, @TraitDef>>,
|
||||
|
||||
/// Despite its name, `items` does not only map NodeId to an item but
|
||||
/// also to expr/stmt/local/arg/etc
|
||||
items: ast_map::Map,
|
||||
map: ast_map::Map,
|
||||
intrinsic_defs: RefCell<HashMap<ast::DefId, t>>,
|
||||
freevars: RefCell<freevars::freevar_map>,
|
||||
tcache: type_cache,
|
||||
|
@ -1066,7 +1064,7 @@ pub type node_type_table = RefCell<HashMap<uint,t>>;
|
|||
pub fn mk_ctxt(s: session::Session,
|
||||
dm: resolve::DefMap,
|
||||
named_region_map: @RefCell<resolve_lifetime::NamedRegionMap>,
|
||||
amap: ast_map::Map,
|
||||
map: ast_map::Map,
|
||||
freevars: freevars::freevar_map,
|
||||
region_maps: middle::region::RegionMaps,
|
||||
lang_items: @middle::lang_items::LanguageItems)
|
||||
|
@ -1085,7 +1083,7 @@ pub fn mk_ctxt(s: session::Session,
|
|||
node_type_substs: RefCell::new(HashMap::new()),
|
||||
trait_refs: RefCell::new(HashMap::new()),
|
||||
trait_defs: RefCell::new(HashMap::new()),
|
||||
items: amap,
|
||||
map: map,
|
||||
intrinsic_defs: RefCell::new(HashMap::new()),
|
||||
freevars: RefCell::new(freevars),
|
||||
tcache: RefCell::new(HashMap::new()),
|
||||
|
@ -2775,8 +2773,7 @@ pub fn node_id_to_trait_ref(cx: ctxt, id: ast::NodeId) -> @ty::TraitRef {
|
|||
Some(&t) => t,
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_trait_ref: no trait ref for node `{}`",
|
||||
ast_map::node_id_to_str(cx.items, id,
|
||||
token::get_ident_interner())))
|
||||
cx.map.node_to_str(id)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2790,8 +2787,7 @@ pub fn node_id_to_type(cx: ctxt, id: ast::NodeId) -> t {
|
|||
Some(t) => t,
|
||||
None => cx.sess.bug(
|
||||
format!("node_id_to_type: no type for node `{}`",
|
||||
ast_map::node_id_to_str(cx.items, id,
|
||||
token::get_ident_interner())))
|
||||
cx.map.node_to_str(id)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3001,7 +2997,7 @@ pub fn expr_ty_adjusted(cx: ctxt, expr: &ast::Expr) -> t {
|
|||
}
|
||||
|
||||
pub fn expr_span(cx: ctxt, id: NodeId) -> Span {
|
||||
match cx.items.find(id) {
|
||||
match cx.map.find(id) {
|
||||
Some(ast_map::NodeExpr(e)) => {
|
||||
e.span
|
||||
}
|
||||
|
@ -3017,12 +3013,11 @@ pub fn expr_span(cx: ctxt, id: NodeId) -> Span {
|
|||
}
|
||||
|
||||
pub fn local_var_name_str(cx: ctxt, id: NodeId) -> InternedString {
|
||||
match cx.items.find(id) {
|
||||
match cx.map.find(id) {
|
||||
Some(ast_map::NodeLocal(pat)) => {
|
||||
match pat.node {
|
||||
ast::PatIdent(_, ref path, _) => {
|
||||
let ident = ast_util::path_to_ident(path);
|
||||
token::get_ident(ident.name)
|
||||
token::get_ident(ast_util::path_to_ident(path))
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(
|
||||
|
@ -3489,11 +3484,10 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field])
|
|||
-> uint {
|
||||
let mut i = 0u;
|
||||
for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; }
|
||||
let string = token::get_ident(name);
|
||||
tcx.sess.bug(format!(
|
||||
"no field named `{}` found in the list of fields `{:?}`",
|
||||
string.get(),
|
||||
fields.map(|f| tcx.sess.str_of(f.ident))));
|
||||
token::get_name(name),
|
||||
fields.map(|f| token::get_ident(f.ident).get().to_str())));
|
||||
}
|
||||
|
||||
pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
|
||||
|
@ -3639,8 +3633,8 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
|
|||
terr_record_fields(values) => {
|
||||
format!("expected a record with field `{}` but found one with field \
|
||||
`{}`",
|
||||
cx.sess.str_of(values.expected),
|
||||
cx.sess.str_of(values.found))
|
||||
token::get_ident(values.expected),
|
||||
token::get_ident(values.found))
|
||||
}
|
||||
terr_arg_count => ~"incorrect number of function parameters",
|
||||
terr_regions_does_not_outlive(..) => {
|
||||
|
@ -3674,7 +3668,7 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
|
|||
trait_store_to_str(cx, (*values).found))
|
||||
}
|
||||
terr_in_field(err, fname) => {
|
||||
format!("in field `{}`, {}", cx.sess.str_of(fname),
|
||||
format!("in field `{}`, {}", token::get_ident(fname),
|
||||
type_err_to_str(cx, err))
|
||||
}
|
||||
terr_sorts(values) => {
|
||||
|
@ -3768,8 +3762,8 @@ pub fn provided_source(cx: ctxt, id: ast::DefId) -> Option<ast::DefId> {
|
|||
pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] {
|
||||
if is_local(id) {
|
||||
{
|
||||
match cx.items.find(id.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match cx.map.find(id.node) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
match item.node {
|
||||
ItemTrait(_, _, ref ms) => {
|
||||
let (_, p) = ast_util::split_trait_methods(*ms);
|
||||
|
@ -3897,24 +3891,21 @@ pub fn impl_trait_ref(cx: ctxt, id: ast::DefId) -> Option<@TraitRef> {
|
|||
|
||||
let ret = if id.krate == ast::LOCAL_CRATE {
|
||||
debug!("(impl_trait_ref) searching for trait impl {:?}", id);
|
||||
{
|
||||
match cx.items.find(id.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match item.node {
|
||||
ast::ItemImpl(_, ref opt_trait, _, _) => {
|
||||
match opt_trait {
|
||||
&Some(ref t) => {
|
||||
Some(ty::node_id_to_trait_ref(cx,
|
||||
t.ref_id))
|
||||
}
|
||||
&None => None
|
||||
match cx.map.find(id.node) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
match item.node {
|
||||
ast::ItemImpl(_, ref opt_trait, _, _) => {
|
||||
match opt_trait {
|
||||
&Some(ref t) => {
|
||||
Some(ty::node_id_to_trait_ref(cx, t.ref_id))
|
||||
}
|
||||
&None => None
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
} else {
|
||||
csearch::get_impl_trait(cx, id)
|
||||
|
@ -4038,7 +4029,7 @@ pub fn substd_enum_variants(cx: ctxt,
|
|||
}
|
||||
|
||||
pub fn item_path_str(cx: ctxt, id: ast::DefId) -> ~str {
|
||||
ast_map::path_to_str(item_path(cx, id), token::get_ident_interner())
|
||||
with_path(cx, id, |path| ast_map::path_to_str(path))
|
||||
}
|
||||
|
||||
pub enum DtorKind {
|
||||
|
@ -4084,54 +4075,11 @@ pub fn has_dtor(cx: ctxt, struct_id: DefId) -> bool {
|
|||
ty_dtor(cx, struct_id).is_present()
|
||||
}
|
||||
|
||||
pub fn item_path(cx: ctxt, id: ast::DefId) -> ast_map::Path {
|
||||
if id.krate != ast::LOCAL_CRATE {
|
||||
return csearch::get_item_path(cx, id)
|
||||
}
|
||||
|
||||
// FIXME (#5521): uncomment this code and don't have a catch-all at the
|
||||
// end of the match statement. Favor explicitly listing
|
||||
// each variant.
|
||||
// let node = cx.items.get(&id.node);
|
||||
// match *node {
|
||||
match cx.items.get(id.node) {
|
||||
ast_map::NodeItem(item, path) => {
|
||||
let item_elt = match item.node {
|
||||
ItemMod(_) | ItemForeignMod(_) => {
|
||||
ast_map::PathMod(item.ident)
|
||||
}
|
||||
_ => ast_map::PathName(item.ident)
|
||||
};
|
||||
vec::append_one((*path).clone(), item_elt)
|
||||
}
|
||||
|
||||
ast_map::NodeForeignItem(nitem, _, _, path) => {
|
||||
vec::append_one((*path).clone(),
|
||||
ast_map::PathName(nitem.ident))
|
||||
}
|
||||
|
||||
ast_map::NodeMethod(method, _, path) => {
|
||||
vec::append_one((*path).clone(),
|
||||
ast_map::PathName(method.ident))
|
||||
}
|
||||
ast_map::NodeTraitMethod(trait_method, _, path) => {
|
||||
let method = ast_util::trait_method_to_ty_method(&*trait_method);
|
||||
vec::append_one((*path).clone(),
|
||||
ast_map::PathName(method.ident))
|
||||
}
|
||||
|
||||
ast_map::NodeVariant(ref variant, _, path) => {
|
||||
vec::append_one(path.init().to_owned(),
|
||||
ast_map::PathName((*variant).node.name))
|
||||
}
|
||||
|
||||
ast_map::NodeStructCtor(_, item, path) => {
|
||||
vec::append_one((*path).clone(), ast_map::PathName(item.ident))
|
||||
}
|
||||
|
||||
ref node => {
|
||||
cx.sess.bug(format!("cannot find item_path for node {:?}", node));
|
||||
}
|
||||
pub fn with_path<T>(cx: ctxt, id: ast::DefId, f: |ast_map::PathElems| -> T) -> T {
|
||||
if id.krate == ast::LOCAL_CRATE {
|
||||
cx.map.with_path(id.node, f)
|
||||
} else {
|
||||
f(ast_map::Values(csearch::get_item_path(cx, id).iter()).chain(None))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4164,8 +4112,8 @@ pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @~[@VariantInfo] {
|
|||
expr, since check_enum_variants also updates the enum_var_cache
|
||||
*/
|
||||
{
|
||||
match cx.items.get(id.node) {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
match cx.map.get(id.node) {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemEnum(ref enum_definition, _) => {
|
||||
let mut last_discriminant: Option<Disr> = None;
|
||||
|
@ -4287,15 +4235,8 @@ pub fn lookup_trait_def(cx: ctxt, did: ast::DefId) -> @ty::TraitDef {
|
|||
// decoder to use iterators instead of higher-order functions.)
|
||||
pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool {
|
||||
if is_local(did) {
|
||||
{
|
||||
match tcx.items.find(did.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
item.attrs.iter().advance(|attr| f(attr.node.value))
|
||||
}
|
||||
_ => tcx.sess.bug(format!("has_attr: {:?} is not an item",
|
||||
did))
|
||||
}
|
||||
}
|
||||
let item = tcx.map.expect_item(did.node);
|
||||
item.attrs.iter().advance(|attr| f(attr.node.value))
|
||||
} else {
|
||||
let mut cont = true;
|
||||
csearch::get_item_attrs(tcx.cstore, did, |meta_items| {
|
||||
|
@ -4303,7 +4244,7 @@ pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool {
|
|||
cont = meta_items.iter().advance(|ptrptr| f(*ptrptr));
|
||||
}
|
||||
});
|
||||
return cont;
|
||||
cont
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4318,7 +4259,7 @@ pub fn has_attr(tcx: ctxt, did: DefId, attr: &str) -> bool {
|
|||
true
|
||||
}
|
||||
});
|
||||
return found;
|
||||
found
|
||||
}
|
||||
|
||||
/// Determine whether an item is annotated with `#[packed]`
|
||||
|
@ -4371,8 +4312,8 @@ pub fn lookup_field_type(tcx: ctxt,
|
|||
pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] {
|
||||
if did.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match cx.items.find(did.node) {
|
||||
Some(ast_map::NodeItem(i,_)) => {
|
||||
match cx.map.find(did.node) {
|
||||
Some(ast_map::NodeItem(i)) => {
|
||||
match i.node {
|
||||
ast::ItemStruct(struct_def, _) => {
|
||||
struct_field_tys(struct_def.fields)
|
||||
|
@ -4380,7 +4321,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] {
|
|||
_ => cx.sess.bug("struct ID bound to non-struct")
|
||||
}
|
||||
}
|
||||
Some(ast_map::NodeVariant(ref variant, _, _)) => {
|
||||
Some(ast_map::NodeVariant(ref variant)) => {
|
||||
match (*variant).node.kind {
|
||||
ast::StructVariantKind(struct_def) => {
|
||||
struct_field_tys(struct_def.fields)
|
||||
|
@ -4394,8 +4335,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] {
|
|||
_ => {
|
||||
cx.sess.bug(
|
||||
format!("struct ID not bound to an item: {}",
|
||||
ast_map::node_id_to_str(cx.items, did.node,
|
||||
token::get_ident_interner())));
|
||||
cx.map.node_to_str(did.node)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4428,8 +4368,7 @@ fn struct_field_tys(fields: &[StructField]) -> ~[field_ty] {
|
|||
}
|
||||
UnnamedField => {
|
||||
field_ty {
|
||||
name:
|
||||
syntax::parse::token::special_idents::unnamed_field.name,
|
||||
name: syntax::parse::token::special_idents::unnamed_field.name,
|
||||
id: ast_util::local_def(field.node.id),
|
||||
vis: ast::Public,
|
||||
}
|
||||
|
@ -4909,12 +4848,12 @@ pub fn populate_implementations_for_trait_if_necessary(
|
|||
/// If it implements no trait, return `None`.
|
||||
pub fn trait_id_of_impl(tcx: ctxt,
|
||||
def_id: ast::DefId) -> Option<ast::DefId> {
|
||||
let node = match tcx.items.find(def_id.node) {
|
||||
let node = match tcx.map.find(def_id.node) {
|
||||
Some(node) => node,
|
||||
None => return None
|
||||
};
|
||||
match node {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemImpl(_, Some(ref trait_ref), _, _) => {
|
||||
Some(node_id_to_trait_ref(tcx, trait_ref.ref_id).def_id)
|
||||
|
|
|
@ -63,11 +63,10 @@ use util::ppaux::Repr;
|
|||
|
||||
use std::vec;
|
||||
use syntax::abi::AbiSet;
|
||||
use syntax::{ast, ast_map, ast_util};
|
||||
use syntax::{ast, ast_util};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::opt_vec::OptVec;
|
||||
use syntax::opt_vec;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::{lifetime_to_str, path_to_str};
|
||||
|
||||
pub trait AstConv {
|
||||
|
@ -111,9 +110,8 @@ pub fn ast_region_to_region(tcx: ty::ctxt, lifetime: &ast::Lifetime)
|
|||
};
|
||||
|
||||
debug!("ast_region_to_region(lifetime={} id={}) yields {}",
|
||||
lifetime_to_str(lifetime, tcx.sess.intr()),
|
||||
lifetime.id,
|
||||
r.repr(tcx));
|
||||
lifetime_to_str(lifetime),
|
||||
lifetime.id, r.repr(tcx));
|
||||
|
||||
r
|
||||
}
|
||||
|
@ -146,8 +144,7 @@ fn opt_ast_region_to_region<AC:AstConv,RS:RegionScope>(
|
|||
};
|
||||
|
||||
debug!("opt_ast_region_to_region(opt_lifetime={:?}) yields {}",
|
||||
opt_lifetime.as_ref().map(
|
||||
|e| lifetime_to_str(e, this.tcx().sess.intr())),
|
||||
opt_lifetime.as_ref().map(|e| lifetime_to_str(e)),
|
||||
r.repr(this.tcx()));
|
||||
|
||||
r
|
||||
|
@ -333,8 +330,7 @@ pub fn ast_ty_to_prim_ty(tcx: ty::ctxt, ast_ty: &ast::Ty) -> Option<ty::t> {
|
|||
let def_map = tcx.def_map.borrow();
|
||||
let a_def = match def_map.get().find(&id) {
|
||||
None => tcx.sess.span_fatal(
|
||||
ast_ty.span, format!("unbound path {}",
|
||||
path_to_str(path, tcx.sess.intr()))),
|
||||
ast_ty.span, format!("unbound path {}", path_to_str(path))),
|
||||
Some(&d) => d
|
||||
};
|
||||
match a_def {
|
||||
|
@ -564,8 +560,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
let def_map = tcx.def_map.borrow();
|
||||
let a_def = match def_map.get().find(&id) {
|
||||
None => tcx.sess.span_fatal(
|
||||
ast_ty.span, format!("unbound path {}",
|
||||
path_to_str(path, tcx.sess.intr()))),
|
||||
ast_ty.span, format!("unbound path {}", path_to_str(path))),
|
||||
Some(&d) => d
|
||||
};
|
||||
// Kind bounds on path types are only supported for traits.
|
||||
|
@ -579,7 +574,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
}
|
||||
match a_def {
|
||||
ast::DefTrait(_) => {
|
||||
let path_str = path_to_str(path, tcx.sess.intr());
|
||||
let path_str = path_to_str(path);
|
||||
tcx.sess.span_err(
|
||||
ast_ty.span,
|
||||
format!("reference to trait `{}` where a type is expected; \
|
||||
|
@ -605,8 +600,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope>(
|
|||
ast::DefMod(id) => {
|
||||
tcx.sess.span_fatal(ast_ty.span,
|
||||
format!("found module name used as a type: {}",
|
||||
ast_map::node_id_to_str(tcx.items, id.node,
|
||||
token::get_ident_interner())));
|
||||
tcx.map.node_to_str(id.node)));
|
||||
}
|
||||
ast::DefPrimTy(_) => {
|
||||
fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
|
||||
|
|
|
@ -308,7 +308,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
Some(&(_, true)) => {
|
||||
tcx.sess.span_err(span,
|
||||
format!("field `{}` bound twice in pattern",
|
||||
tcx.sess.str_of(field.ident)));
|
||||
token::get_ident(field.ident)));
|
||||
}
|
||||
Some(&(index, ref mut used)) => {
|
||||
*used = true;
|
||||
|
@ -321,14 +321,14 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
found_fields.insert(index);
|
||||
}
|
||||
None => {
|
||||
let name = pprust::path_to_str(path, tcx.sess.intr());
|
||||
let name = pprust::path_to_str(path);
|
||||
// Check the pattern anyway, so that attempts to look
|
||||
// up its type won't fail
|
||||
check_pat(pcx, field.pat, ty::mk_err());
|
||||
tcx.sess.span_err(span,
|
||||
format!("struct `{}` does not have a field named `{}`",
|
||||
name,
|
||||
tcx.sess.str_of(field.ident)));
|
||||
token::get_ident(field.ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -340,10 +340,9 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
|||
continue;
|
||||
}
|
||||
|
||||
let string = token::get_ident(field.name);
|
||||
tcx.sess.span_err(span,
|
||||
format!("pattern does not mention field `{}`",
|
||||
string.get()));
|
||||
token::get_name(field.name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -366,7 +365,7 @@ pub fn check_struct_pat(pcx: &pat_ctxt, pat_id: ast::NodeId, span: Span,
|
|||
// OK.
|
||||
}
|
||||
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
|
||||
let name = pprust::path_to_str(path, tcx.sess.intr());
|
||||
let name = pprust::path_to_str(path);
|
||||
tcx.sess.span_err(span,
|
||||
format!("mismatched types: expected `{}` but found `{}`",
|
||||
fcx.infcx().ty_to_str(expected),
|
||||
|
@ -405,7 +404,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
|
|||
variant_id, substitutions, etc);
|
||||
}
|
||||
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
|
||||
let name = pprust::path_to_str(path, tcx.sess.intr());
|
||||
let name = pprust::path_to_str(path);
|
||||
tcx.sess.span_err(span,
|
||||
format!("mismatched types: expected `{}` but \
|
||||
found `{}`",
|
||||
|
|
|
@ -105,7 +105,6 @@ use syntax::ast::{DefId, SelfValue, SelfRegion};
|
|||
use syntax::ast::{SelfUniq, SelfStatic, NodeId};
|
||||
use syntax::ast::{MutMutable, MutImmutable};
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::parse::token;
|
||||
|
||||
#[deriving(Eq)]
|
||||
|
@ -556,9 +555,8 @@ impl<'a> LookupContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let method_name = token::get_ident(self.m_name);
|
||||
debug!("push_candidates_from_impl: {} {} {}",
|
||||
method_name.get(),
|
||||
token::get_name(self.m_name),
|
||||
impl_info.ident.repr(self.tcx()),
|
||||
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
|
||||
|
||||
|
@ -1298,21 +1296,7 @@ impl<'a> LookupContext<'a> {
|
|||
|
||||
fn report_static_candidate(&self, idx: uint, did: DefId) {
|
||||
let span = if did.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match self.tcx().items.find(did.node) {
|
||||
Some(ast_map::NodeMethod(m, _, _)) => m.span,
|
||||
Some(ast_map::NodeTraitMethod(trait_method, _, _)) => {
|
||||
match *trait_method {
|
||||
ast::Provided(m) => m.span,
|
||||
_ => {
|
||||
fail!("report_static_candidate, bad item {:?}",
|
||||
did)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => fail!("report_static_candidate: bad item {:?}", did)
|
||||
}
|
||||
}
|
||||
self.tcx().map.span(did.node)
|
||||
} else {
|
||||
self.expr.span
|
||||
};
|
||||
|
|
|
@ -121,7 +121,6 @@ use std::vec;
|
|||
use syntax::abi::AbiSet;
|
||||
use syntax::ast::{Provided, Required};
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util::local_def;
|
||||
use syntax::ast_util;
|
||||
use syntax::attr;
|
||||
|
@ -389,7 +388,7 @@ impl Visitor<()> for GatherLocalsVisitor {
|
|||
{
|
||||
let locals = self.fcx.inh.locals.borrow();
|
||||
debug!("Pattern binding {} is assigned to {}",
|
||||
self.tcx.sess.str_of(path.segments[0].identifier),
|
||||
token::get_ident(path.segments[0].identifier),
|
||||
self.fcx.infcx().ty_to_str(
|
||||
locals.get().get_copy(&p.id)));
|
||||
}
|
||||
|
@ -520,7 +519,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
|
|||
match orig_sp {
|
||||
Some(orig_sp) => {
|
||||
tcx.sess.span_err(sp, format!("duplicate field name {} in record type declaration",
|
||||
tcx.sess.str_of(id)));
|
||||
token::get_ident(id)));
|
||||
tcx.sess.span_note(orig_sp, "first declaration of this field occurred here");
|
||||
break;
|
||||
}
|
||||
|
@ -574,7 +573,7 @@ pub fn check_item(ccx: @CrateCtxt, it: &ast::Item) {
|
|||
check_bare_fn(ccx, decl, body, it.id, fn_tpt.ty, param_env);
|
||||
}
|
||||
ast::ItemImpl(_, ref opt_trait_ref, _, ref ms) => {
|
||||
debug!("ItemImpl {} with id {}", ccx.tcx.sess.str_of(it.ident), it.id);
|
||||
debug!("ItemImpl {} with id {}", token::get_ident(it.ident), it.id);
|
||||
|
||||
let impl_tpt = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id));
|
||||
for m in ms.iter() {
|
||||
|
@ -723,9 +722,8 @@ fn check_impl_methods_against_trait(ccx: @CrateCtxt,
|
|||
tcx.sess.span_err(
|
||||
impl_method.span,
|
||||
format!("method `{}` is not a member of trait `{}`",
|
||||
tcx.sess.str_of(impl_method_ty.ident),
|
||||
pprust::path_to_str(&ast_trait_ref.path,
|
||||
tcx.sess.intr())));
|
||||
token::get_ident(impl_method_ty.ident),
|
||||
pprust::path_to_str(&ast_trait_ref.path)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -743,7 +741,7 @@ fn check_impl_methods_against_trait(ccx: @CrateCtxt,
|
|||
|m| m.ident.name == trait_method.ident.name);
|
||||
if !is_implemented && !is_provided {
|
||||
missing_methods.push(
|
||||
format!("`{}`", ccx.tcx.sess.str_of(trait_method.ident)));
|
||||
format!("`{}`", token::get_ident(trait_method.ident)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -794,9 +792,8 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
impl_m_span,
|
||||
format!("method `{}` has a `{}` declaration in the impl, \
|
||||
but not in the trait",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
pprust::explicit_self_to_str(&impl_m.explicit_self,
|
||||
tcx.sess.intr())));
|
||||
token::get_ident(trait_m.ident),
|
||||
pprust::explicit_self_to_str(&impl_m.explicit_self)));
|
||||
return;
|
||||
}
|
||||
(_, &ast::SelfStatic) => {
|
||||
|
@ -804,9 +801,8 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
impl_m_span,
|
||||
format!("method `{}` has a `{}` declaration in the trait, \
|
||||
but not in the impl",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
pprust::explicit_self_to_str(&trait_m.explicit_self,
|
||||
tcx.sess.intr())));
|
||||
token::get_ident(trait_m.ident),
|
||||
pprust::explicit_self_to_str(&trait_m.explicit_self)));
|
||||
return;
|
||||
}
|
||||
_ => {
|
||||
|
@ -821,7 +817,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
impl_m_span,
|
||||
format!("method `{}` has {} type parameter(s), but its trait \
|
||||
declaration has {} type parameter(s)",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
token::get_ident(trait_m.ident),
|
||||
num_impl_m_type_params,
|
||||
num_trait_m_type_params));
|
||||
return;
|
||||
|
@ -832,7 +828,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
impl_m_span,
|
||||
format!("method `{}` has {} parameter{} \
|
||||
but the declaration in trait `{}` has {}",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
token::get_ident(trait_m.ident),
|
||||
impl_m.fty.sig.inputs.len(),
|
||||
if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" },
|
||||
ty::item_path_str(tcx, trait_m.def_id),
|
||||
|
@ -857,7 +853,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
which is not required by \
|
||||
the corresponding type parameter \
|
||||
in the trait declaration",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
token::get_ident(trait_m.ident),
|
||||
i,
|
||||
extra_bounds.user_string(tcx)));
|
||||
return;
|
||||
|
@ -875,7 +871,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
type parameter {} has {} trait bound(s), but the \
|
||||
corresponding type parameter in \
|
||||
the trait declaration has {} trait bound(s)",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
token::get_ident(trait_m.ident),
|
||||
i, impl_param_def.bounds.trait_bounds.len(),
|
||||
trait_param_def.bounds.trait_bounds.len()));
|
||||
return;
|
||||
|
@ -945,7 +941,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
|||
tcx.sess.span_err(
|
||||
impl_m_span,
|
||||
format!("method `{}` has an incompatible type: {}",
|
||||
tcx.sess.str_of(trait_m.ident),
|
||||
token::get_ident(trait_m.ident),
|
||||
ty::type_err_to_str(tcx, terr)));
|
||||
ty::note_and_explain_type_err(tcx, terr);
|
||||
}
|
||||
|
@ -1102,9 +1098,7 @@ impl FnCtxt {
|
|||
None => {
|
||||
self.tcx().sess.bug(
|
||||
format!("no type for node {}: {} in fcx {}",
|
||||
id, ast_map::node_id_to_str(
|
||||
self.tcx().items, id,
|
||||
token::get_ident_interner()),
|
||||
id, self.tcx().map.node_to_str(id),
|
||||
self.tag()));
|
||||
}
|
||||
}
|
||||
|
@ -1117,8 +1111,7 @@ impl FnCtxt {
|
|||
None => {
|
||||
self.tcx().sess.bug(
|
||||
format!("no type substs for node {}: {} in fcx {}",
|
||||
id, ast_map::node_id_to_str(self.tcx().items, id,
|
||||
token::get_ident_interner()),
|
||||
id, self.tcx().map.node_to_str(id),
|
||||
self.tag()));
|
||||
}
|
||||
}
|
||||
|
@ -1908,8 +1901,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
|actual| {
|
||||
format!("type `{}` does not implement any method in scope \
|
||||
named `{}`",
|
||||
actual,
|
||||
fcx.ccx.tcx.sess.str_of(method_name))
|
||||
actual, token::get_ident(method_name))
|
||||
},
|
||||
expr_t,
|
||||
None);
|
||||
|
@ -2336,11 +2328,9 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
fcx.type_error_message(
|
||||
expr.span,
|
||||
|actual| {
|
||||
let string = token::get_ident(field);
|
||||
format!("attempted to take value of method `{}` on type `{}` \
|
||||
(try writing an anonymous function)",
|
||||
string.get(),
|
||||
actual)
|
||||
token::get_name(field), actual)
|
||||
},
|
||||
expr_t, None);
|
||||
}
|
||||
|
@ -2349,11 +2339,9 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
fcx.type_error_message(
|
||||
expr.span,
|
||||
|actual| {
|
||||
let string = token::get_ident(field);
|
||||
format!("attempted access of field `{}` on type `{}`, \
|
||||
but no field with that name was found",
|
||||
string.get(),
|
||||
actual)
|
||||
token::get_name(field), actual)
|
||||
},
|
||||
expr_t, None);
|
||||
}
|
||||
|
@ -2392,7 +2380,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
field.ident.span,
|
||||
|actual| {
|
||||
format!("structure `{}` has no field named `{}`",
|
||||
actual, tcx.sess.str_of(field.ident.node))
|
||||
actual, token::get_ident(field.ident.node))
|
||||
}, struct_ty, None);
|
||||
error_happened = true;
|
||||
}
|
||||
|
@ -2400,7 +2388,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
tcx.sess.span_err(
|
||||
field.ident.span,
|
||||
format!("field `{}` specified more than once",
|
||||
tcx.sess.str_of(field.ident.node)));
|
||||
token::get_ident(field.ident.node)));
|
||||
error_happened = true;
|
||||
}
|
||||
Some((field_id, false)) => {
|
||||
|
@ -2433,8 +2421,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
let name = class_field.name;
|
||||
let (_, seen) = *class_field_map.get(&name);
|
||||
if !seen {
|
||||
let string = token::get_ident(name);
|
||||
missing_fields.push(~"`" + string.get() + "`");
|
||||
missing_fields.push(~"`" + token::get_name(name).get() + "`");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3201,7 +3188,6 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
let resolved = structurally_resolved_type(fcx,
|
||||
expr.span,
|
||||
raw_base_t);
|
||||
let index_ident = tcx.sess.ident_of("index");
|
||||
let error_message = || {
|
||||
fcx.type_error_message(expr.span,
|
||||
|actual| {
|
||||
|
@ -3216,7 +3202,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
callee_id,
|
||||
expr,
|
||||
resolved,
|
||||
index_ident.name,
|
||||
token::intern("index"),
|
||||
[base, idx],
|
||||
DoDerefArgs,
|
||||
AutoderefReceiver,
|
||||
|
@ -3230,7 +3216,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
}
|
||||
|
||||
debug!("type of expr({}) {} is...", expr.id,
|
||||
syntax::print::pprust::expr_to_str(expr, tcx.sess.intr()));
|
||||
syntax::print::pprust::expr_to_str(expr));
|
||||
debug!("... {}, expected is {}",
|
||||
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
|
||||
match expected {
|
||||
|
@ -3576,7 +3562,7 @@ pub fn check_enum_variants(ccx: @CrateCtxt,
|
|||
|
||||
match v.node.disr_expr {
|
||||
Some(e) => {
|
||||
debug!("disr expr, checking {}", pprust::expr_to_str(e, ccx.tcx.sess.intr()));
|
||||
debug!("disr expr, checking {}", pprust::expr_to_str(e));
|
||||
|
||||
let fcx = blank_fn_ctxt(ccx, rty, e.id);
|
||||
let declty = ty::mk_int_var(ccx.tcx, fcx.infcx().next_int_var_id());
|
||||
|
@ -4013,7 +3999,7 @@ pub fn check_bounds_are_used(ccx: @CrateCtxt,
|
|||
if !*b {
|
||||
ccx.tcx.sess.span_err(
|
||||
span, format!("type parameter `{}` is unused",
|
||||
ccx.tcx.sess.str_of(tps.get(i).ident)));
|
||||
token::get_ident(tps.get(i).ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4024,10 +4010,9 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
|
|||
}
|
||||
|
||||
let tcx = ccx.tcx;
|
||||
let nm = ccx.tcx.sess.str_of(it.ident);
|
||||
let name = nm.as_slice();
|
||||
let (n_tps, inputs, output) = if name.starts_with("atomic_") {
|
||||
let split : ~[&str] = name.split('_').collect();
|
||||
let name = token::get_ident(it.ident);
|
||||
let (n_tps, inputs, output) = if name.get().starts_with("atomic_") {
|
||||
let split : ~[&str] = name.get().split('_').collect();
|
||||
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
|
||||
|
||||
//We only care about the operation here
|
||||
|
@ -4071,7 +4056,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
|
|||
}
|
||||
|
||||
} else {
|
||||
match name {
|
||||
match name.get() {
|
||||
"abort" => (0, ~[], ty::mk_bot()),
|
||||
"breakpoint" => (0, ~[], ty::mk_nil()),
|
||||
"size_of" |
|
||||
|
|
|
@ -561,7 +561,7 @@ pub fn location_info_for_item(item: &ast::Item) -> LocationInfo {
|
|||
|
||||
pub fn early_resolve_expr(ex: &ast::Expr, fcx: @FnCtxt, is_early: bool) {
|
||||
debug!("vtable: early_resolve_expr() ex with id {:?} (early: {}): {}",
|
||||
ex.id, is_early, expr_to_str(ex, fcx.tcx().sess.intr()));
|
||||
ex.id, is_early, expr_to_str(ex));
|
||||
let _indent = indenter();
|
||||
|
||||
let cx = fcx.ccx;
|
||||
|
|
|
@ -334,7 +334,7 @@ fn visit_pat(p: &ast::Pat, wbcx: &mut WbCtxt) {
|
|||
|
||||
resolve_type_vars_for_node(wbcx, p.span, p.id);
|
||||
debug!("Type for pattern binding {} (id {}) resolved to {}",
|
||||
pat_to_str(p, wbcx.fcx.ccx.tcx.sess.intr()), p.id,
|
||||
pat_to_str(p), p.id,
|
||||
wbcx.fcx.infcx().ty_to_str(
|
||||
ty::node_id_to_type(wbcx.fcx.ccx.tcx,
|
||||
p.id)));
|
||||
|
@ -347,7 +347,7 @@ fn visit_local(l: &ast::Local, wbcx: &mut WbCtxt) {
|
|||
match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
|
||||
Ok(lty) => {
|
||||
debug!("Type for local {} (id {}) resolved to {}",
|
||||
pat_to_str(l.pat, wbcx.fcx.tcx().sess.intr()),
|
||||
pat_to_str(l.pat),
|
||||
l.id,
|
||||
wbcx.fcx.infcx().ty_to_str(lty));
|
||||
write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.id, lty);
|
||||
|
|
|
@ -43,6 +43,7 @@ use syntax::ast_map;
|
|||
use syntax::ast_util::{def_id_of_def, local_def};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::opt_vec;
|
||||
use syntax::parse::token;
|
||||
use syntax::visit;
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
@ -155,8 +156,7 @@ struct CoherenceCheckVisitor<'a> {
|
|||
impl<'a> visit::Visitor<()> for CoherenceCheckVisitor<'a> {
|
||||
fn visit_item(&mut self, item: &Item, _: ()) {
|
||||
|
||||
// debug!("(checking coherence) item '{}'",
|
||||
// self.cc.crate_context.tcx.sess.str_of(item.ident));
|
||||
//debug!("(checking coherence) item '{}'", token::get_ident(item.ident));
|
||||
|
||||
match item.node {
|
||||
ItemImpl(_, ref opt_trait, _, _) => {
|
||||
|
@ -267,9 +267,8 @@ impl CoherenceChecker {
|
|||
// base type.
|
||||
|
||||
if associated_traits.len() == 0 {
|
||||
debug!("(checking implementation) no associated traits for item \
|
||||
'{}'",
|
||||
self.crate_context.tcx.sess.str_of(item.ident));
|
||||
debug!("(checking implementation) no associated traits for item '{}'",
|
||||
token::get_ident(item.ident));
|
||||
|
||||
match get_base_type_def_id(&self.inference_context,
|
||||
item.span,
|
||||
|
@ -293,7 +292,7 @@ impl CoherenceChecker {
|
|||
self.crate_context.tcx, associated_trait.ref_id);
|
||||
debug!("(checking implementation) adding impl for trait '{}', item '{}'",
|
||||
trait_ref.repr(self.crate_context.tcx),
|
||||
self.crate_context.tcx.sess.str_of(item.ident));
|
||||
token::get_ident(item.ident));
|
||||
|
||||
self.add_trait_impl(trait_ref.def_id, implementation);
|
||||
}
|
||||
|
@ -584,13 +583,13 @@ impl CoherenceChecker {
|
|||
|
||||
// Make sure that this type precisely names a nominal
|
||||
// type.
|
||||
match self.crate_context.tcx.items.find(def_id.node) {
|
||||
match self.crate_context.tcx.map.find(def_id.node) {
|
||||
None => {
|
||||
self.crate_context.tcx.sess.span_bug(
|
||||
original_type.span,
|
||||
"resolve didn't resolve this type?!");
|
||||
}
|
||||
Some(NodeItem(item, _)) => {
|
||||
Some(NodeItem(item)) => {
|
||||
match item.node {
|
||||
ItemStruct(..) | ItemEnum(..) => true,
|
||||
_ => false,
|
||||
|
@ -641,15 +640,7 @@ impl CoherenceChecker {
|
|||
|
||||
fn span_of_impl(&self, implementation: @Impl) -> Span {
|
||||
assert_eq!(implementation.did.krate, LOCAL_CRATE);
|
||||
match self.crate_context.tcx.items.find(implementation.did.node) {
|
||||
Some(NodeItem(item, _)) => {
|
||||
return item.span;
|
||||
}
|
||||
_ => {
|
||||
self.crate_context.tcx.sess.bug("span_of_impl() called on something that \
|
||||
wasn't an impl!");
|
||||
}
|
||||
}
|
||||
self.crate_context.tcx.map.span(implementation.did.node)
|
||||
}
|
||||
|
||||
// External crate handling
|
||||
|
@ -746,8 +737,8 @@ impl CoherenceChecker {
|
|||
// Destructors only work on nominal types.
|
||||
if impl_info.did.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match tcx.items.find(impl_info.did.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => {
|
||||
match tcx.map.find(impl_info.did.node) {
|
||||
Some(ast_map::NodeItem(item)) => {
|
||||
tcx.sess.span_err((*item).span,
|
||||
"the Drop trait may \
|
||||
only be implemented \
|
||||
|
|
|
@ -53,10 +53,11 @@ use syntax::ast_map;
|
|||
use syntax::ast_util::{local_def, split_trait_methods};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::codemap;
|
||||
use syntax::parse::token::special_idents;
|
||||
use syntax::parse::token;
|
||||
use syntax::print::pprust::{path_to_str};
|
||||
use syntax::visit;
|
||||
use syntax::opt_vec::OptVec;
|
||||
use syntax::parse::token::special_idents;
|
||||
|
||||
struct CollectItemTypesVisitor {
|
||||
ccx: @CrateCtxt
|
||||
|
@ -111,14 +112,15 @@ impl AstConv for CrateCtxt {
|
|||
return csearch::get_type(self.tcx, id)
|
||||
}
|
||||
|
||||
match self.tcx.items.find(id.node) {
|
||||
Some(ast_map::NodeItem(item, _)) => ty_of_item(self, item),
|
||||
Some(ast_map::NodeForeignItem(foreign_item, abis, _, _)) => {
|
||||
match self.tcx.map.find(id.node) {
|
||||
Some(ast_map::NodeItem(item)) => ty_of_item(self, item),
|
||||
Some(ast_map::NodeForeignItem(foreign_item)) => {
|
||||
let abis = self.tcx.map.get_foreign_abis(id.node);
|
||||
ty_of_foreign_item(self, foreign_item, abis)
|
||||
}
|
||||
ref x => {
|
||||
self.tcx.sess.bug(format!("unexpected sort of item \
|
||||
in get_item_ty(): {:?}", (*x)));
|
||||
x => {
|
||||
self.tcx.sess.bug(format!("unexpected sort of node \
|
||||
in get_item_ty(): {:?}", x));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,8 +130,7 @@ impl AstConv for CrateCtxt {
|
|||
}
|
||||
|
||||
fn ty_infer(&self, span: Span) -> ty::t {
|
||||
self.tcx.sess.span_bug(span,
|
||||
"found `ty_infer` in unexpected place");
|
||||
self.tcx.sess.span_bug(span, "found `ty_infer` in unexpected place");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -185,8 +186,8 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
|
|||
|
||||
pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||
let tcx = ccx.tcx;
|
||||
match tcx.items.get(trait_id) {
|
||||
ast_map::NodeItem(item, _) => {
|
||||
match tcx.map.get(trait_id) {
|
||||
ast_map::NodeItem(item) => {
|
||||
match item.node {
|
||||
ast::ItemTrait(ref generics, _, ref ms) => {
|
||||
let trait_ty_generics = ty_generics(ccx, generics, 0);
|
||||
|
@ -553,7 +554,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
|
|||
|
||||
pub fn convert(ccx: &CrateCtxt, it: &ast::Item) {
|
||||
let tcx = ccx.tcx;
|
||||
debug!("convert: item {} with id {}", tcx.sess.str_of(it.ident), it.id);
|
||||
debug!("convert: item {} with id {}", token::get_ident(it.ident), it.id);
|
||||
match it.node {
|
||||
// These don't define types.
|
||||
ast::ItemForeignMod(_) | ast::ItemMod(_) | ast::ItemMac(_) => {}
|
||||
|
@ -716,13 +717,7 @@ pub fn convert_foreign(ccx: &CrateCtxt, i: &ast::ForeignItem) {
|
|||
// map, and I regard each time that I use it as a personal and
|
||||
// moral failing, but at the moment it seems like the only
|
||||
// convenient way to extract the ABI. - ndm
|
||||
let abis = match ccx.tcx.items.find(i.id) {
|
||||
Some(ast_map::NodeForeignItem(_, abis, _, _)) => abis,
|
||||
ref x => {
|
||||
ccx.tcx.sess.bug(format!("unexpected sort of item \
|
||||
in get_item_ty(): {:?}", (*x)));
|
||||
}
|
||||
};
|
||||
let abis = ccx.tcx.map.get_foreign_abis(i.id);
|
||||
|
||||
let tpt = ty_of_foreign_item(ccx, i, abis);
|
||||
write_ty_to_tcx(ccx.tcx, i.id, tpt.ty);
|
||||
|
@ -758,8 +753,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt,
|
|||
ccx.tcx.sess.span_fatal(
|
||||
ast_trait_ref.path.span,
|
||||
format!("{} is not a trait",
|
||||
path_to_str(&ast_trait_ref.path,
|
||||
ccx.tcx.sess.intr())));
|
||||
path_to_str(&ast_trait_ref.path)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -769,8 +763,8 @@ fn get_trait_def(ccx: &CrateCtxt, trait_id: ast::DefId) -> @ty::TraitDef {
|
|||
return ty::lookup_trait_def(ccx.tcx, trait_id)
|
||||
}
|
||||
|
||||
match ccx.tcx.items.get(trait_id.node) {
|
||||
ast_map::NodeItem(item, _) => trait_def_of_item(ccx, item),
|
||||
match ccx.tcx.map.get(trait_id.node) {
|
||||
ast_map::NodeItem(item) => trait_def_of_item(ccx, item),
|
||||
_ => ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
|
||||
trait_id.node))
|
||||
}
|
||||
|
@ -845,7 +839,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
|
|||
ty: ty::mk_bare_fn(ccx.tcx, tofd)
|
||||
};
|
||||
debug!("type of {} (id {}) is {}",
|
||||
tcx.sess.str_of(it.ident),
|
||||
token::get_ident(it.ident),
|
||||
it.id,
|
||||
ppaux::ty_to_str(tcx, tpt.ty));
|
||||
|
||||
|
|
|
@ -337,8 +337,8 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
|
|||
let main_t = ty::node_id_to_type(tcx, main_id);
|
||||
match ty::get(main_t).sty {
|
||||
ty::ty_bare_fn(..) => {
|
||||
match tcx.items.find(main_id) {
|
||||
Some(ast_map::NodeItem(it,_)) => {
|
||||
match tcx.map.find(main_id) {
|
||||
Some(ast_map::NodeItem(it)) => {
|
||||
match it.node {
|
||||
ast::ItemFn(_, _, _, ref ps, _)
|
||||
if ps.is_parameterized() => {
|
||||
|
@ -382,8 +382,8 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
|
|||
let start_t = ty::node_id_to_type(tcx, start_id);
|
||||
match ty::get(start_t).sty {
|
||||
ty::ty_bare_fn(_) => {
|
||||
match tcx.items.find(start_id) {
|
||||
Some(ast_map::NodeItem(it,_)) => {
|
||||
match tcx.map.find(start_id) {
|
||||
Some(ast_map::NodeItem(it)) => {
|
||||
match it.node {
|
||||
ast::ItemFn(_,_,_,ref ps,_)
|
||||
if ps.is_parameterized() => {
|
||||
|
|
|
@ -198,9 +198,7 @@ use arena::Arena;
|
|||
use middle::ty;
|
||||
use std::vec;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util;
|
||||
use syntax::parse::token;
|
||||
use syntax::opt_vec;
|
||||
use syntax::visit;
|
||||
use syntax::visit::Visitor;
|
||||
|
@ -534,9 +532,7 @@ impl<'a> ConstraintContext<'a> {
|
|||
None => {
|
||||
self.tcx().sess.bug(format!(
|
||||
"No inferred index entry for {}",
|
||||
ast_map::node_id_to_str(self.tcx().items,
|
||||
param_id,
|
||||
token::get_ident_interner())));
|
||||
self.tcx().map.node_to_str(param_id)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -940,7 +936,7 @@ impl<'a> SolveContext<'a> {
|
|||
// attribute and report an error with various results if found.
|
||||
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
|
||||
let found = item_variances.repr(tcx);
|
||||
tcx.sess.span_err(ast_map::node_span(tcx.items, item_id), found);
|
||||
tcx.sess.span_err(tcx.map.span(item_id), found);
|
||||
}
|
||||
|
||||
let mut item_variance_map = tcx.item_variance_map.borrow_mut();
|
||||
|
|
|
@ -70,7 +70,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
|
|||
-> (~str, Option<Span>) {
|
||||
return match region {
|
||||
ReScope(node_id) => {
|
||||
match cx.items.find(node_id) {
|
||||
match cx.map.find(node_id) {
|
||||
Some(ast_map::NodeBlock(ref blk)) => {
|
||||
explain_span(cx, "block", blk.span)
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
|
|||
Some(ast_map::NodeStmt(stmt)) => {
|
||||
explain_span(cx, "statement", stmt.span)
|
||||
}
|
||||
Some(ast_map::NodeItem(it, _)) if (match it.node {
|
||||
Some(ast_map::NodeItem(it)) if (match it.node {
|
||||
ast::ItemFn(..) => true, _ => false}) => {
|
||||
explain_span(cx, "function body", it.span)
|
||||
}
|
||||
|
@ -111,12 +111,12 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
|
|||
bound_region_ptr_to_str(cx, fr.bound_region))
|
||||
};
|
||||
|
||||
match cx.items.find(fr.scope_id) {
|
||||
match cx.map.find(fr.scope_id) {
|
||||
Some(ast_map::NodeBlock(ref blk)) => {
|
||||
let (msg, opt_span) = explain_span(cx, "block", blk.span);
|
||||
(format!("{} {}", prefix, msg), opt_span)
|
||||
}
|
||||
Some(ast_map::NodeItem(it, _)) if match it.node {
|
||||
Some(ast_map::NodeItem(it)) if match it.node {
|
||||
ast::ItemImpl(..) => true, _ => false} => {
|
||||
let (msg, opt_span) = explain_span(cx, "impl", it.span);
|
||||
(format!("{} {}", prefix, msg), opt_span)
|
||||
|
@ -162,15 +162,15 @@ pub fn bound_region_to_str(cx: ctxt,
|
|||
}
|
||||
|
||||
match br {
|
||||
BrNamed(_, ident) => format!("{}'{}{}", prefix,
|
||||
cx.sess.str_of(ident), space_str),
|
||||
BrAnon(_) => prefix.to_str(),
|
||||
BrFresh(_) => prefix.to_str(),
|
||||
BrNamed(_, ident) => format!("{}'{}{}", prefix,
|
||||
token::get_ident(ident), space_str),
|
||||
BrAnon(_) => prefix.to_str(),
|
||||
BrFresh(_) => prefix.to_str(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ReScope_id_to_str(cx: ctxt, node_id: ast::NodeId) -> ~str {
|
||||
match cx.items.find(node_id) {
|
||||
match cx.map.find(node_id) {
|
||||
Some(ast_map::NodeBlock(ref blk)) => {
|
||||
format!("<block at {}>",
|
||||
cx.sess.codemap.span_to_str(blk.span))
|
||||
|
@ -201,10 +201,7 @@ pub fn ReScope_id_to_str(cx: ctxt, node_id: ast::NodeId) -> ~str {
|
|||
None => {
|
||||
format!("<unknown-{}>", node_id)
|
||||
}
|
||||
_ => { cx.sess.bug(
|
||||
format!("ReScope refers to {}",
|
||||
ast_map::node_id_to_str(cx.items, node_id,
|
||||
token::get_ident_interner()))) }
|
||||
_ => cx.sess.bug(format!("ReScope refers to {}", cx.map.node_to_str(node_id)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -228,7 +225,7 @@ pub fn region_to_str(cx: ctxt, prefix: &str, space: bool, region: Region) -> ~st
|
|||
// `explain_region()` or `note_and_explain_region()`.
|
||||
match region {
|
||||
ty::ReScope(_) => prefix.to_str(),
|
||||
ty::ReEarlyBound(_, _, ident) => cx.sess.str_of(ident).to_owned(),
|
||||
ty::ReEarlyBound(_, _, ident) => token::get_ident(ident).get().to_str(),
|
||||
ty::ReLateBound(_, br) => bound_region_to_str(cx, prefix, space, br),
|
||||
ty::ReFree(ref fr) => bound_region_to_str(cx, prefix, space, fr.bound_region),
|
||||
ty::ReInfer(ReSkolemized(_, br)) => {
|
||||
|
@ -329,11 +326,11 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
|||
s.push_str("fn");
|
||||
|
||||
match ident {
|
||||
Some(i) => {
|
||||
s.push_char(' ');
|
||||
s.push_str(cx.sess.str_of(i));
|
||||
}
|
||||
_ => { }
|
||||
Some(i) => {
|
||||
s.push_char(' ');
|
||||
s.push_str(token::get_ident(i).get());
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
|
||||
push_sig_to_str(cx, &mut s, '(', ')', sig);
|
||||
|
@ -432,8 +429,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
|||
// if there is an id, print that instead of the structural type:
|
||||
/*for def_id in ty::type_def_id(typ).iter() {
|
||||
// note that this typedef cannot have type parameters
|
||||
return ast_map::path_to_str(ty::item_path(cx, *def_id),
|
||||
cx.sess.intr());
|
||||
return ty::item_path_str(cx, *def_id);
|
||||
}*/
|
||||
|
||||
// pretty print the structural type representation:
|
||||
|
@ -470,23 +466,23 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
|||
let ty_param_defs = cx.ty_param_defs.borrow();
|
||||
let param_def = ty_param_defs.get().find(&did.node);
|
||||
let ident = match param_def {
|
||||
Some(def) => cx.sess.str_of(def.ident).to_owned(),
|
||||
None => {
|
||||
// This should not happen...
|
||||
format!("BUG[{:?}]", id)
|
||||
}
|
||||
Some(def) => token::get_ident(def.ident).get().to_str(),
|
||||
// This should not happen...
|
||||
None => format!("BUG[{:?}]", id)
|
||||
};
|
||||
if !cx.sess.verbose() { ident } else { format!("{}:{:?}", ident, did) }
|
||||
if !cx.sess.verbose() {
|
||||
ident
|
||||
} else {
|
||||
format!("{}:{:?}", ident, did)
|
||||
}
|
||||
}
|
||||
ty_self(..) => ~"Self",
|
||||
ty_enum(did, ref substs) | ty_struct(did, ref substs) => {
|
||||
let path = ty::item_path(cx, did);
|
||||
let base = ast_map::path_to_str(path, cx.sess.intr());
|
||||
let base = ty::item_path_str(cx, did);
|
||||
parameterized(cx, base, &substs.regions, substs.tps, did, false)
|
||||
}
|
||||
ty_trait(did, ref substs, s, mutbl, ref bounds) => {
|
||||
let path = ty::item_path(cx, did);
|
||||
let base = ast_map::path_to_str(path, cx.sess.intr());
|
||||
let base = ty::item_path_str(cx, did);
|
||||
let ty = parameterized(cx, base, &substs.regions,
|
||||
substs.tps, did, true);
|
||||
let bound_sep = if bounds.is_empty() { "" } else { ":" };
|
||||
|
@ -632,9 +628,9 @@ impl Repr for ty::TypeParameterDef {
|
|||
}
|
||||
|
||||
impl Repr for ty::RegionParameterDef {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
format!("RegionParameterDef({}, {:?})",
|
||||
tcx.sess.str_of(self.ident),
|
||||
token::get_ident(self.ident),
|
||||
self.def_id)
|
||||
}
|
||||
}
|
||||
|
@ -689,35 +685,30 @@ impl Repr for ty::TraitRef {
|
|||
}
|
||||
|
||||
impl Repr for ast::Expr {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
format!("expr({}: {})",
|
||||
self.id,
|
||||
pprust::expr_to_str(self, tcx.sess.intr()))
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
format!("expr({}: {})", self.id, pprust::expr_to_str(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Repr for ast::Item {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
format!("item({})",
|
||||
ast_map::node_id_to_str(tcx.items,
|
||||
self.id,
|
||||
token::get_ident_interner()))
|
||||
format!("item({})", tcx.map.node_to_str(self.id))
|
||||
}
|
||||
}
|
||||
|
||||
impl Repr for ast::Stmt {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
format!("stmt({}: {})",
|
||||
ast_util::stmt_id(self),
|
||||
pprust::stmt_to_str(self, tcx.sess.intr()))
|
||||
pprust::stmt_to_str(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Repr for ast::Pat {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
format!("pat({}: {})",
|
||||
self.id,
|
||||
pprust::pat_to_str(self, tcx.sess.intr()))
|
||||
pprust::pat_to_str(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -783,7 +774,7 @@ impl Repr for ast::DefId {
|
|||
// and otherwise fallback to just printing the crate/node pair
|
||||
if self.krate == ast::LOCAL_CRATE {
|
||||
{
|
||||
match tcx.items.find(self.node) {
|
||||
match tcx.map.find(self.node) {
|
||||
Some(ast_map::NodeItem(..)) |
|
||||
Some(ast_map::NodeForeignItem(..)) |
|
||||
Some(ast_map::NodeMethod(..)) |
|
||||
|
@ -848,8 +839,7 @@ impl Repr for ty::Method {
|
|||
|
||||
impl Repr for ast::Ident {
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
let string = token::get_ident(self.name);
|
||||
string.get().to_str()
|
||||
token::get_ident(*self).get().to_str()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -943,16 +933,6 @@ impl Repr for ty::vstore {
|
|||
}
|
||||
}
|
||||
|
||||
impl Repr for ast_map::PathElem {
|
||||
fn repr(&self, tcx: ctxt) -> ~str {
|
||||
match *self {
|
||||
ast_map::PathMod(id) => id.repr(tcx),
|
||||
ast_map::PathName(id) => id.repr(tcx),
|
||||
ast_map::PathPrettyName(id, _) => id.repr(tcx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Repr for ty::BuiltinBound {
|
||||
fn repr(&self, _tcx: ctxt) -> ~str {
|
||||
format!("{:?}", *self)
|
||||
|
@ -1004,8 +984,7 @@ impl UserString for ty::BuiltinBounds {
|
|||
|
||||
impl UserString for ty::TraitRef {
|
||||
fn user_string(&self, tcx: ctxt) -> ~str {
|
||||
let path = ty::item_path(tcx, self.def_id);
|
||||
let base = ast_map::path_to_str(path, tcx.sess.intr());
|
||||
let base = ty::item_path_str(tcx, self.def_id);
|
||||
if tcx.sess.verbose() && self.substs.self_ty.is_some() {
|
||||
let mut all_tps = self.substs.tps.clone();
|
||||
for &t in self.substs.self_ty.iter() { all_tps.push(t); }
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
|
||||
use syntax;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util;
|
||||
use syntax::attr;
|
||||
use syntax::attr::AttributeMethods;
|
||||
|
@ -887,7 +886,7 @@ fn path_to_str(p: &ast::Path) -> ~str {
|
|||
|
||||
let mut s = ~"";
|
||||
let mut first = true;
|
||||
for i in p.segments.iter().map(|x| token::get_ident(x.identifier.name)) {
|
||||
for i in p.segments.iter().map(|x| token::get_ident(x.identifier)) {
|
||||
if !first || p.global {
|
||||
s.push_str("::");
|
||||
} else {
|
||||
|
@ -900,8 +899,7 @@ fn path_to_str(p: &ast::Path) -> ~str {
|
|||
|
||||
impl Clean<~str> for ast::Ident {
|
||||
fn clean(&self) -> ~str {
|
||||
let string = token::get_ident(self.name);
|
||||
string.get().to_owned()
|
||||
token::get_ident(*self).get().to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1212,8 +1210,7 @@ fn resolve_type(path: Path, tpbs: Option<~[TyParamBound]>,
|
|||
let d = match def_map.get().find(&id) {
|
||||
Some(k) => k,
|
||||
None => {
|
||||
debug!("could not find {:?} in defmap (`{}`)", id,
|
||||
syntax::ast_map::node_id_to_str(tycx.items, id, cx.sess.intr()));
|
||||
debug!("could not find {:?} in defmap (`{}`)", id, tycx.map.node_to_str(id));
|
||||
fail!("Unexpected failure: unresolved id not in defmap (this is a bug!)")
|
||||
}
|
||||
};
|
||||
|
@ -1243,12 +1240,7 @@ fn resolve_type(path: Path, tpbs: Option<~[TyParamBound]>,
|
|||
ResolvedPath{ path: path, typarams: tpbs, id: def_id.node }
|
||||
} else {
|
||||
let fqn = csearch::get_item_path(tycx, def_id);
|
||||
let fqn = fqn.move_iter().map(|i| {
|
||||
match i {
|
||||
ast_map::PathMod(id) | ast_map::PathName(id) |
|
||||
ast_map::PathPrettyName(id, _) => id.clean()
|
||||
}
|
||||
}).to_owned_vec();
|
||||
let fqn = fqn.move_iter().map(|i| i.to_str()).to_owned_vec();
|
||||
ExternalPath{ path: path, typarams: tpbs, fqn: fqn, kind: kind,
|
||||
krate: def_id.krate }
|
||||
}
|
||||
|
|
|
@ -188,9 +188,9 @@ impl<'a> RustdocVisitor<'a> {
|
|||
};
|
||||
if analysis.public_items.contains(&def.node) { return false }
|
||||
|
||||
let item = self.cx.tycx.unwrap().items.get(def.node);
|
||||
let item = self.cx.tycx.unwrap().map.get(def.node);
|
||||
match item {
|
||||
ast_map::NodeItem(it, _) => {
|
||||
ast_map::NodeItem(it) => {
|
||||
if glob {
|
||||
match it.node {
|
||||
ast::ItemMod(ref m) => {
|
||||
|
|
|
@ -125,10 +125,9 @@ pub type Name = u32;
|
|||
/// A mark represents a unique id associated with a macro expansion
|
||||
pub type Mrk = u32;
|
||||
|
||||
impl<S:Encoder> Encodable<S> for Ident {
|
||||
impl<S: Encoder> Encodable<S> for Ident {
|
||||
fn encode(&self, s: &mut S) {
|
||||
let string = token::get_ident(self.name);
|
||||
s.emit_str(string.get());
|
||||
s.emit_str(token::get_ident(*self).get());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,161 +10,93 @@
|
|||
|
||||
use abi::AbiSet;
|
||||
use ast::*;
|
||||
use ast;
|
||||
use ast_util;
|
||||
use codemap::Span;
|
||||
use diagnostic::SpanHandler;
|
||||
use fold::Folder;
|
||||
use fold;
|
||||
use parse::token::{get_ident_interner, IdentInterner};
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::logging;
|
||||
use std::cell::RefCell;
|
||||
use collections::SmallIntMap;
|
||||
use std::iter;
|
||||
use std::vec;
|
||||
|
||||
#[deriving(Clone, Eq)]
|
||||
pub enum PathElem {
|
||||
PathMod(Ident),
|
||||
PathName(Ident),
|
||||
|
||||
// A pretty name can come from an `impl` block. We attempt to select a
|
||||
// reasonable name for debuggers to see, but to guarantee uniqueness with
|
||||
// other paths the hash should also be taken into account during symbol
|
||||
// generation.
|
||||
PathPrettyName(Ident, u64),
|
||||
PathMod(Name),
|
||||
PathName(Name)
|
||||
}
|
||||
|
||||
impl PathElem {
|
||||
pub fn ident(&self) -> Ident {
|
||||
pub fn name(&self) -> Name {
|
||||
match *self {
|
||||
PathMod(ident) |
|
||||
PathName(ident) |
|
||||
PathPrettyName(ident, _) => ident
|
||||
PathMod(name) | PathName(name) => name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type Path = ~[PathElem];
|
||||
impl ToStr for PathElem {
|
||||
fn to_str(&self) -> ~str {
|
||||
token::get_name(self.name()).get().to_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_to_str_with_sep(p: &[PathElem], sep: &str, itr: @IdentInterner)
|
||||
-> ~str {
|
||||
let strs = p.map(|e| {
|
||||
match *e {
|
||||
PathMod(s) | PathName(s) | PathPrettyName(s, _) => {
|
||||
itr.get(s.name)
|
||||
#[deriving(Clone)]
|
||||
struct LinkedPathNode<'a> {
|
||||
node: PathElem,
|
||||
next: LinkedPath<'a>,
|
||||
}
|
||||
|
||||
type LinkedPath<'a> = Option<&'a LinkedPathNode<'a>>;
|
||||
|
||||
impl<'a> Iterator<PathElem> for LinkedPath<'a> {
|
||||
fn next(&mut self) -> Option<PathElem> {
|
||||
match *self {
|
||||
Some(node) => {
|
||||
*self = node.next;
|
||||
Some(node.node)
|
||||
}
|
||||
}
|
||||
});
|
||||
strs.connect(sep)
|
||||
}
|
||||
|
||||
pub fn path_ident_to_str(p: &Path, i: Ident, itr: @IdentInterner) -> ~str {
|
||||
if p.is_empty() {
|
||||
itr.get(i.name).into_owned()
|
||||
} else {
|
||||
let string = itr.get(i.name);
|
||||
format!("{}::{}", path_to_str(*p, itr), string.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_to_str(p: &[PathElem], itr: @IdentInterner) -> ~str {
|
||||
path_to_str_with_sep(p, "::", itr)
|
||||
}
|
||||
|
||||
pub fn path_elem_to_str(pe: PathElem, itr: @IdentInterner) -> ~str {
|
||||
match pe {
|
||||
PathMod(s) | PathName(s) | PathPrettyName(s, _) => {
|
||||
itr.get(s.name).into_owned()
|
||||
None => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// write a "pretty" version of `ty` to `out`. This is designed so
|
||||
/// that symbols of `impl`'d methods give some hint of where they came
|
||||
/// from, even if it's hard to read (previously they would all just be
|
||||
/// listed as `__extensions__::method_name::hash`, with no indication
|
||||
/// of the type).
|
||||
// FIXME: these dollar signs and the names in general are actually a
|
||||
// relic of $ being one of the very few valid symbol names on
|
||||
// unix. These kinds of details shouldn't be exposed way up here
|
||||
// in the ast.
|
||||
fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) {
|
||||
let (prefix, subty) = match ty.node {
|
||||
TyUniq(ty) => ("$UP$", &*ty),
|
||||
TyBox(ty) => ("$SP$", &*ty),
|
||||
TyPtr(MutTy { ty, mutbl }) => (if mutbl == MutMutable {"$RPmut$"} else {"$RP$"},
|
||||
&*ty),
|
||||
TyRptr(_, MutTy { ty, mutbl }) => (if mutbl == MutMutable {"$BPmut$"} else {"$BP$"},
|
||||
&*ty),
|
||||
// HACK(eddyb) move this into libstd (value wrapper for vec::Items).
|
||||
#[deriving(Clone)]
|
||||
pub struct Values<'a, T>(vec::Items<'a, T>);
|
||||
|
||||
TyVec(ty) => ("$VEC$", &*ty),
|
||||
TyFixedLengthVec(ty, _) => ("$FIXEDVEC$", &*ty),
|
||||
|
||||
// these can't be represented as <prefix><contained ty>, so
|
||||
// need custom handling.
|
||||
TyNil => { out.push_str("$NIL$"); return }
|
||||
TyPath(ref path, _, _) => {
|
||||
out.push_str(itr.get(path.segments
|
||||
.last()
|
||||
.unwrap()
|
||||
.identifier
|
||||
.name).as_slice());
|
||||
return
|
||||
}
|
||||
TyTup(ref tys) => {
|
||||
out.push_str(format!("$TUP_{}$", tys.len()));
|
||||
for subty in tys.iter() {
|
||||
pretty_ty(*subty, itr, out);
|
||||
out.push_char('$');
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// meh, better than nothing.
|
||||
TyBot => { out.push_str("$BOT$"); return }
|
||||
TyClosure(..) => { out.push_str("$CLOSURE$"); return }
|
||||
TyBareFn(..) => { out.push_str("$FN$"); return }
|
||||
TyTypeof(..) => { out.push_str("$TYPEOF$"); return }
|
||||
TyInfer(..) => { out.push_str("$INFER$"); return }
|
||||
|
||||
};
|
||||
|
||||
out.push_str(prefix);
|
||||
pretty_ty(subty, itr, out);
|
||||
impl<'a, T: Pod> Iterator<T> for Values<'a, T> {
|
||||
fn next(&mut self) -> Option<T> {
|
||||
let &Values(ref mut items) = self;
|
||||
items.next().map(|&x| x)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> PathElem {
|
||||
let itr = get_ident_interner();
|
||||
/// The type of the iterator used by with_path.
|
||||
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
|
||||
|
||||
let hash = (trait_ref, ty).hash();
|
||||
let mut pretty;
|
||||
match *trait_ref {
|
||||
None => pretty = ~"",
|
||||
Some(ref trait_ref) => {
|
||||
pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name)
|
||||
.into_owned();
|
||||
pretty.push_char('$');
|
||||
pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
|
||||
let itr = token::get_ident_interner();
|
||||
|
||||
path.fold(~"", |mut s, e| {
|
||||
let e = itr.get(e.name());
|
||||
if !s.is_empty() {
|
||||
s.push_str("::");
|
||||
}
|
||||
};
|
||||
pretty_ty(ty, itr, &mut pretty);
|
||||
|
||||
PathPrettyName(Ident::new(itr.gensym(pretty)), hash)
|
||||
s.push_str(e.as_slice());
|
||||
s
|
||||
})
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub enum Node {
|
||||
NodeItem(@Item, @Path),
|
||||
NodeForeignItem(@ForeignItem, AbiSet, Visibility, @Path),
|
||||
NodeTraitMethod(@TraitMethod, DefId /* trait did */,
|
||||
@Path /* path to the trait */),
|
||||
NodeMethod(@Method, DefId /* impl did */, @Path /* path to the impl */),
|
||||
|
||||
/// NodeVariant represents a variant of an enum, e.g., for
|
||||
/// `enum A { B, C, D }`, there would be a NodeItem for `A`, and a
|
||||
/// NodeVariant item for each of `B`, `C`, and `D`.
|
||||
NodeVariant(P<Variant>, @Item, @Path),
|
||||
NodeItem(@Item),
|
||||
NodeForeignItem(@ForeignItem),
|
||||
NodeTraitMethod(@TraitMethod),
|
||||
NodeMethod(@Method),
|
||||
NodeVariant(P<Variant>),
|
||||
NodeExpr(@Expr),
|
||||
NodeStmt(@Stmt),
|
||||
NodeArg(@Pat),
|
||||
|
@ -172,27 +104,76 @@ pub enum Node {
|
|||
NodeBlock(P<Block>),
|
||||
|
||||
/// NodeStructCtor represents a tuple struct.
|
||||
NodeStructCtor(@StructDef, @Item, @Path),
|
||||
NodeCalleeScope(@Expr)
|
||||
NodeStructCtor(@StructDef),
|
||||
NodeCalleeScope(@Expr),
|
||||
}
|
||||
|
||||
impl Node {
|
||||
pub fn with_attrs<T>(&self, f: |Option<&[Attribute]>| -> T) -> T {
|
||||
let attrs = match *self {
|
||||
NodeItem(i, _) => Some(i.attrs.as_slice()),
|
||||
NodeForeignItem(fi, _, _, _) => Some(fi.attrs.as_slice()),
|
||||
NodeTraitMethod(tm, _, _) => match *tm {
|
||||
Required(ref type_m) => Some(type_m.attrs.as_slice()),
|
||||
Provided(m) => Some(m.attrs.as_slice())
|
||||
},
|
||||
NodeMethod(m, _, _) => Some(m.attrs.as_slice()),
|
||||
NodeVariant(ref v, _, _) => Some(v.node.attrs.as_slice()),
|
||||
// unit/tuple structs take the attributes straight from
|
||||
// the struct definition.
|
||||
NodeStructCtor(_, strct, _) => Some(strct.attrs.as_slice()),
|
||||
_ => None
|
||||
};
|
||||
f(attrs)
|
||||
// The odd layout is to bring down the total size.
|
||||
#[deriving(Clone)]
|
||||
enum MapEntry {
|
||||
// Placeholder for holes in the map.
|
||||
NotPresent,
|
||||
|
||||
// All the node types, with a parent ID.
|
||||
EntryItem(NodeId, @Item),
|
||||
EntryForeignItem(NodeId, @ForeignItem),
|
||||
EntryTraitMethod(NodeId, @TraitMethod),
|
||||
EntryMethod(NodeId, @Method),
|
||||
EntryVariant(NodeId, P<Variant>),
|
||||
EntryExpr(NodeId, @Expr),
|
||||
EntryStmt(NodeId, @Stmt),
|
||||
EntryArg(NodeId, @Pat),
|
||||
EntryLocal(NodeId, @Pat),
|
||||
EntryBlock(NodeId, P<Block>),
|
||||
EntryStructCtor(NodeId, @StructDef),
|
||||
EntryCalleeScope(NodeId, @Expr),
|
||||
|
||||
// Roots for node trees.
|
||||
RootCrate,
|
||||
RootInlinedParent(P<InlinedParent>)
|
||||
}
|
||||
|
||||
struct InlinedParent {
|
||||
path: ~[PathElem],
|
||||
// Required by NodeTraitMethod and NodeMethod.
|
||||
def_id: DefId
|
||||
}
|
||||
|
||||
impl MapEntry {
|
||||
fn parent(&self) -> Option<NodeId> {
|
||||
Some(match *self {
|
||||
EntryItem(id, _) => id,
|
||||
EntryForeignItem(id, _) => id,
|
||||
EntryTraitMethod(id, _) => id,
|
||||
EntryMethod(id, _) => id,
|
||||
EntryVariant(id, _) => id,
|
||||
EntryExpr(id, _) => id,
|
||||
EntryStmt(id, _) => id,
|
||||
EntryArg(id, _) => id,
|
||||
EntryLocal(id, _) => id,
|
||||
EntryBlock(id, _) => id,
|
||||
EntryStructCtor(id, _) => id,
|
||||
EntryCalleeScope(id, _) => id,
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
|
||||
fn to_node(&self) -> Option<Node> {
|
||||
Some(match *self {
|
||||
EntryItem(_, p) => NodeItem(p),
|
||||
EntryForeignItem(_, p) => NodeForeignItem(p),
|
||||
EntryTraitMethod(_, p) => NodeTraitMethod(p),
|
||||
EntryMethod(_, p) => NodeMethod(p),
|
||||
EntryVariant(_, p) => NodeVariant(p),
|
||||
EntryExpr(_, p) => NodeExpr(p),
|
||||
EntryStmt(_, p) => NodeStmt(p),
|
||||
EntryArg(_, p) => NodeArg(p),
|
||||
EntryLocal(_, p) => NodeLocal(p),
|
||||
EntryBlock(_, p) => NodeBlock(p),
|
||||
EntryStructCtor(_, p) => NodeStructCtor(p),
|
||||
EntryCalleeScope(_, p) => NodeCalleeScope(p),
|
||||
_ => return None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,33 +183,201 @@ pub struct Map {
|
|||
/// a NodeId is in the map, but empirically the occupancy is about
|
||||
/// 75-80%, so there's not too much overhead (certainly less than
|
||||
/// a hashmap, since they (at the time of writing) have a maximum
|
||||
/// of 75% occupancy). (The additional overhead of the Option<>
|
||||
/// inside the SmallIntMap could be removed by adding an extra
|
||||
/// empty variant to Node and storing a vector here, but that was
|
||||
/// found to not make much difference.)
|
||||
/// of 75% occupancy).
|
||||
///
|
||||
/// Also, indexing is pretty quick when you've got a vector and
|
||||
/// plain old integers.
|
||||
priv map: @RefCell<SmallIntMap<Node>>
|
||||
priv map: RefCell<~[MapEntry]>
|
||||
}
|
||||
|
||||
impl Map {
|
||||
fn find_entry(&self, id: NodeId) -> Option<MapEntry> {
|
||||
let map = self.map.borrow();
|
||||
map.get().get(id as uint).map(|x| *x)
|
||||
}
|
||||
|
||||
/// Retrieve the Node corresponding to `id`, failing if it cannot
|
||||
/// be found.
|
||||
pub fn get(&self, id: ast::NodeId) -> Node {
|
||||
let map = self.map.borrow();
|
||||
*map.get().get(&(id as uint))
|
||||
pub fn get(&self, id: NodeId) -> Node {
|
||||
match self.find(id) {
|
||||
Some(node) => node,
|
||||
None => fail!("couldn't find node id {} in the AST map", id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieve the Node corresponding to `id`, returning None if
|
||||
/// cannot be found.
|
||||
pub fn find(&self, id: ast::NodeId) -> Option<Node> {
|
||||
let map = self.map.borrow();
|
||||
map.get().find(&(id as uint)).map(|&n| n)
|
||||
pub fn find(&self, id: NodeId) -> Option<Node> {
|
||||
self.find_entry(id).and_then(|x| x.to_node())
|
||||
}
|
||||
|
||||
pub fn get_parent(&self, id: NodeId) -> NodeId {
|
||||
self.find_entry(id).and_then(|x| x.parent()).unwrap_or(id)
|
||||
}
|
||||
|
||||
pub fn get_parent_did(&self, id: NodeId) -> DefId {
|
||||
let parent = self.get_parent(id);
|
||||
match self.find_entry(parent) {
|
||||
Some(RootInlinedParent(data)) => data.def_id,
|
||||
_ => ast_util::local_def(parent)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_foreign_abis(&self, id: NodeId) -> AbiSet {
|
||||
let parent = self.get_parent(id);
|
||||
let abis = match self.find_entry(parent) {
|
||||
Some(EntryItem(_, i)) => match i.node {
|
||||
ItemForeignMod(ref nm) => Some(nm.abis),
|
||||
_ => None
|
||||
},
|
||||
// Wrong but OK, because the only inlined foreign items are intrinsics.
|
||||
Some(RootInlinedParent(_)) => Some(AbiSet::Intrinsic()),
|
||||
_ => None
|
||||
};
|
||||
match abis {
|
||||
Some(abis) => abis,
|
||||
None => fail!("expected foreign mod or inlined parent, found {}",
|
||||
self.node_to_str(parent))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_foreign_vis(&self, id: NodeId) -> Visibility {
|
||||
let vis = self.expect_foreign_item(id).vis;
|
||||
match self.find(self.get_parent(id)) {
|
||||
Some(NodeItem(i)) => vis.inherit_from(i.vis),
|
||||
_ => vis
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_item(&self, id: NodeId) -> @Item {
|
||||
match self.find(id) {
|
||||
Some(NodeItem(item)) => item,
|
||||
_ => fail!("expected item, found {}", self.node_to_str(id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_foreign_item(&self, id: NodeId) -> @ForeignItem {
|
||||
match self.find(id) {
|
||||
Some(NodeForeignItem(item)) => item,
|
||||
_ => fail!("expected foreign item, found {}", self.node_to_str(id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_path_elem(&self, id: NodeId) -> PathElem {
|
||||
match self.get(id) {
|
||||
NodeItem(item) => {
|
||||
match item.node {
|
||||
ItemMod(_) | ItemForeignMod(_) => {
|
||||
PathMod(item.ident.name)
|
||||
}
|
||||
_ => PathName(item.ident.name)
|
||||
}
|
||||
}
|
||||
NodeForeignItem(i) => PathName(i.ident.name),
|
||||
NodeMethod(m) => PathName(m.ident.name),
|
||||
NodeTraitMethod(tm) => match *tm {
|
||||
Required(ref m) => PathName(m.ident.name),
|
||||
Provided(ref m) => PathName(m.ident.name)
|
||||
},
|
||||
NodeVariant(v) => PathName(v.node.name.name),
|
||||
node => fail!("no path elem for {:?}", node)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_path<T>(&self, id: NodeId, f: |PathElems| -> T) -> T {
|
||||
self.with_path_next(id, None, f)
|
||||
}
|
||||
|
||||
pub fn path_to_str(&self, id: NodeId) -> ~str {
|
||||
self.with_path(id, |path| path_to_str(path))
|
||||
}
|
||||
|
||||
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> ~str {
|
||||
self.with_path(id, |path| {
|
||||
path_to_str(path.chain(Some(PathName(i.name)).move_iter()))
|
||||
})
|
||||
}
|
||||
|
||||
fn with_path_next<T>(&self, id: NodeId, next: LinkedPath, f: |PathElems| -> T) -> T {
|
||||
let parent = self.get_parent(id);
|
||||
let parent = match self.find_entry(id) {
|
||||
Some(EntryForeignItem(..)) | Some(EntryVariant(..)) => {
|
||||
// Anonymous extern items, enum variants and struct ctors
|
||||
// go in the parent scope.
|
||||
self.get_parent(parent)
|
||||
}
|
||||
// But tuple struct ctors don't have names, so use the path of its
|
||||
// parent, the struct item. Similarly with closure expressions.
|
||||
Some(EntryStructCtor(..)) | Some(EntryExpr(..)) => {
|
||||
return self.with_path_next(parent, next, f);
|
||||
}
|
||||
_ => parent
|
||||
};
|
||||
if parent == id {
|
||||
match self.find_entry(id) {
|
||||
Some(RootInlinedParent(data)) => {
|
||||
f(Values(data.path.iter()).chain(next))
|
||||
}
|
||||
_ => f(Values([].iter()).chain(next))
|
||||
}
|
||||
} else {
|
||||
self.with_path_next(parent, Some(&LinkedPathNode {
|
||||
node: self.get_path_elem(id),
|
||||
next: next
|
||||
}), f)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_attrs<T>(&self, id: NodeId, f: |Option<&[Attribute]>| -> T) -> T {
|
||||
let attrs = match self.get(id) {
|
||||
NodeItem(i) => Some(i.attrs.as_slice()),
|
||||
NodeForeignItem(fi) => Some(fi.attrs.as_slice()),
|
||||
NodeTraitMethod(tm) => match *tm {
|
||||
Required(ref type_m) => Some(type_m.attrs.as_slice()),
|
||||
Provided(m) => Some(m.attrs.as_slice())
|
||||
},
|
||||
NodeMethod(m) => Some(m.attrs.as_slice()),
|
||||
NodeVariant(ref v) => Some(v.node.attrs.as_slice()),
|
||||
// unit/tuple structs take the attributes straight from
|
||||
// the struct definition.
|
||||
// FIXME(eddyb) make this work again (requires access to the map).
|
||||
NodeStructCtor(_) => {
|
||||
return self.with_attrs(self.get_parent(id), f);
|
||||
}
|
||||
_ => None
|
||||
};
|
||||
f(attrs)
|
||||
}
|
||||
|
||||
pub fn span(&self, id: NodeId) -> Span {
|
||||
match self.find(id) {
|
||||
Some(NodeItem(item)) => item.span,
|
||||
Some(NodeForeignItem(foreign_item)) => foreign_item.span,
|
||||
Some(NodeTraitMethod(trait_method)) => {
|
||||
match *trait_method {
|
||||
Required(ref type_method) => type_method.span,
|
||||
Provided(ref method) => method.span,
|
||||
}
|
||||
}
|
||||
Some(NodeMethod(method)) => method.span,
|
||||
Some(NodeVariant(variant)) => variant.span,
|
||||
Some(NodeExpr(expr)) => expr.span,
|
||||
Some(NodeStmt(stmt)) => stmt.span,
|
||||
Some(NodeArg(pat)) | Some(NodeLocal(pat)) => pat.span,
|
||||
Some(NodeBlock(block)) => block.span,
|
||||
Some(NodeStructCtor(_)) => self.expect_item(self.get_parent(id)).span,
|
||||
Some(NodeCalleeScope(expr)) => expr.span,
|
||||
_ => fail!("node_span: could not find span for id {}", id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_to_str(&self, id: NodeId) -> ~str {
|
||||
node_id_to_str(self, id)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FoldOps {
|
||||
fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
|
||||
fn new_id(&self, id: NodeId) -> NodeId {
|
||||
id
|
||||
}
|
||||
fn new_span(&self, span: Span) -> Span {
|
||||
|
@ -236,23 +385,28 @@ pub trait FoldOps {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct Ctx<F> {
|
||||
map: Map,
|
||||
path: Path,
|
||||
diag: @SpanHandler,
|
||||
pub struct Ctx<'a, F> {
|
||||
map: &'a Map,
|
||||
// The node in which we are currently mapping (an item or a method).
|
||||
// When equal to DUMMY_NODE_ID, the next mapped node becomes the parent.
|
||||
parent: NodeId,
|
||||
fold_ops: F
|
||||
}
|
||||
|
||||
impl<F> Ctx<F> {
|
||||
fn insert(&self, id: ast::NodeId, node: Node) {
|
||||
impl<'a, F> Ctx<'a, F> {
|
||||
fn insert(&self, id: NodeId, entry: MapEntry) {
|
||||
let mut map = self.map.map.borrow_mut();
|
||||
map.get().insert(id as uint, node);
|
||||
map.get().grow_set(id as uint, &NotPresent, entry);
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: FoldOps> Folder for Ctx<F> {
|
||||
fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
|
||||
self.fold_ops.new_id(id)
|
||||
impl<'a, F: FoldOps> Folder for Ctx<'a, F> {
|
||||
fn new_id(&mut self, id: NodeId) -> NodeId {
|
||||
let id = self.fold_ops.new_id(id);
|
||||
if self.parent == DUMMY_NODE_ID {
|
||||
self.parent = id;
|
||||
}
|
||||
id
|
||||
}
|
||||
|
||||
fn new_span(&mut self, span: Span) -> Span {
|
||||
|
@ -260,75 +414,52 @@ impl<F: FoldOps> Folder for Ctx<F> {
|
|||
}
|
||||
|
||||
fn fold_item(&mut self, i: @Item) -> SmallVector<@Item> {
|
||||
// clone is FIXME #2543
|
||||
let item_path = @self.path.clone();
|
||||
self.path.push(match i.node {
|
||||
ItemImpl(_, ref maybe_trait, ty, _) => {
|
||||
// Right now the ident on impls is __extensions__ which isn't
|
||||
// very pretty when debugging, so attempt to select a better
|
||||
// name to use.
|
||||
impl_pretty_name(maybe_trait, ty)
|
||||
}
|
||||
ItemMod(_) | ItemForeignMod(_) => PathMod(i.ident),
|
||||
_ => PathName(i.ident)
|
||||
});
|
||||
let parent = self.parent;
|
||||
self.parent = DUMMY_NODE_ID;
|
||||
|
||||
let i = fold::noop_fold_item(i, self).expect_one("expected one item");
|
||||
self.insert(i.id, NodeItem(i, item_path));
|
||||
assert_eq!(self.parent, i.id);
|
||||
|
||||
match i.node {
|
||||
ItemImpl(_, _, _, ref ms) => {
|
||||
// clone is FIXME #2543
|
||||
let p = @self.path.clone();
|
||||
let impl_did = ast_util::local_def(i.id);
|
||||
for &m in ms.iter() {
|
||||
self.insert(m.id, NodeMethod(m, impl_did, p));
|
||||
self.insert(m.id, EntryMethod(self.parent, m));
|
||||
}
|
||||
|
||||
}
|
||||
ItemEnum(ref enum_definition, _) => {
|
||||
// clone is FIXME #2543
|
||||
let p = @self.path.clone();
|
||||
for &v in enum_definition.variants.iter() {
|
||||
self.insert(v.node.id, NodeVariant(v, i, p));
|
||||
self.insert(v.node.id, EntryVariant(self.parent, v));
|
||||
}
|
||||
}
|
||||
ItemForeignMod(ref nm) => {
|
||||
for nitem in nm.items.iter() {
|
||||
// Compute the visibility for this native item.
|
||||
let visibility = nitem.vis.inherit_from(i.vis);
|
||||
|
||||
self.insert(nitem.id,
|
||||
// Anonymous extern mods go in the parent scope.
|
||||
NodeForeignItem(*nitem, nm.abis, visibility, item_path));
|
||||
for &nitem in nm.items.iter() {
|
||||
self.insert(nitem.id, EntryForeignItem(self.parent, nitem));
|
||||
}
|
||||
}
|
||||
ItemStruct(struct_def, _) => {
|
||||
// If this is a tuple-like struct, register the constructor.
|
||||
match struct_def.ctor_id {
|
||||
None => {}
|
||||
Some(ctor_id) => {
|
||||
// clone is FIXME #2543
|
||||
let p = @self.path.clone();
|
||||
self.insert(ctor_id, NodeStructCtor(struct_def, i, p));
|
||||
self.insert(ctor_id, EntryStructCtor(self.parent,
|
||||
struct_def));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
ItemTrait(_, ref traits, ref methods) => {
|
||||
for t in traits.iter() {
|
||||
self.insert(t.ref_id, NodeItem(i, item_path));
|
||||
self.insert(t.ref_id, EntryItem(self.parent, i));
|
||||
}
|
||||
|
||||
// clone is FIXME #2543
|
||||
let p = @self.path.clone();
|
||||
for tm in methods.iter() {
|
||||
let d_id = ast_util::local_def(i.id);
|
||||
match *tm {
|
||||
Required(ref m) => {
|
||||
self.insert(m.id, NodeTraitMethod(@(*tm).clone(), d_id, p));
|
||||
self.insert(m.id, EntryTraitMethod(self.parent,
|
||||
@(*tm).clone()));
|
||||
}
|
||||
Provided(m) => {
|
||||
self.insert(m.id, NodeTraitMethod(@Provided(m), d_id, p));
|
||||
self.insert(m.id, EntryTraitMethod(self.parent,
|
||||
@Provided(m)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -336,7 +467,8 @@ impl<F: FoldOps> Folder for Ctx<F> {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
self.path.pop().unwrap();
|
||||
self.parent = parent;
|
||||
self.insert(i.id, EntryItem(self.parent, i));
|
||||
|
||||
SmallVector::one(i)
|
||||
}
|
||||
|
@ -346,7 +478,7 @@ impl<F: FoldOps> Folder for Ctx<F> {
|
|||
match pat.node {
|
||||
PatIdent(..) => {
|
||||
// Note: this is at least *potentially* a pattern...
|
||||
self.insert(pat.id, NodeLocal(pat));
|
||||
self.insert(pat.id, EntryLocal(self.parent, pat));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -357,14 +489,11 @@ impl<F: FoldOps> Folder for Ctx<F> {
|
|||
fn fold_expr(&mut self, expr: @Expr) -> @Expr {
|
||||
let expr = fold::noop_fold_expr(expr, self);
|
||||
|
||||
self.insert(expr.id, NodeExpr(expr));
|
||||
self.insert(expr.id, EntryExpr(self.parent, expr));
|
||||
|
||||
// Expressions which are or might be calls:
|
||||
{
|
||||
let r = expr.get_callee_id();
|
||||
for callee_id in r.iter() {
|
||||
self.insert(*callee_id, NodeCalleeScope(expr));
|
||||
}
|
||||
for callee_id in expr.get_callee_id().iter() {
|
||||
self.insert(*callee_id, EntryCalleeScope(self.parent, expr));
|
||||
}
|
||||
|
||||
expr
|
||||
|
@ -372,196 +501,173 @@ impl<F: FoldOps> Folder for Ctx<F> {
|
|||
|
||||
fn fold_stmt(&mut self, stmt: &Stmt) -> SmallVector<@Stmt> {
|
||||
let stmt = fold::noop_fold_stmt(stmt, self).expect_one("expected one statement");
|
||||
self.insert(ast_util::stmt_id(stmt), NodeStmt(stmt));
|
||||
self.insert(ast_util::stmt_id(stmt), EntryStmt(self.parent, stmt));
|
||||
SmallVector::one(stmt)
|
||||
}
|
||||
|
||||
fn fold_method(&mut self, m: @Method) -> @Method {
|
||||
self.path.push(PathName(m.ident));
|
||||
let parent = self.parent;
|
||||
self.parent = DUMMY_NODE_ID;
|
||||
let m = fold::noop_fold_method(m, self);
|
||||
self.path.pop();
|
||||
assert_eq!(self.parent, m.id);
|
||||
self.parent = parent;
|
||||
m
|
||||
}
|
||||
|
||||
fn fold_fn_decl(&mut self, decl: &FnDecl) -> P<FnDecl> {
|
||||
let decl = fold::noop_fold_fn_decl(decl, self);
|
||||
for a in decl.inputs.iter() {
|
||||
self.insert(a.id, NodeArg(a.pat));
|
||||
self.insert(a.id, EntryArg(self.parent, a.pat));
|
||||
}
|
||||
decl
|
||||
}
|
||||
|
||||
fn fold_block(&mut self, block: P<Block>) -> P<Block> {
|
||||
let block = fold::noop_fold_block(block, self);
|
||||
self.insert(block.id, NodeBlock(block));
|
||||
self.insert(block.id, EntryBlock(self.parent, block));
|
||||
block
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_crate<F: 'static + FoldOps>(diag: @SpanHandler, c: Crate,
|
||||
fold_ops: F) -> (Crate, Map) {
|
||||
let mut cx = Ctx {
|
||||
map: Map { map: @RefCell::new(SmallIntMap::new()) },
|
||||
path: ~[],
|
||||
diag: diag,
|
||||
fold_ops: fold_ops
|
||||
pub fn map_crate<F: FoldOps>(krate: Crate, fold_ops: F) -> (Crate, Map) {
|
||||
let map = Map { map: RefCell::new(~[]) };
|
||||
let krate = {
|
||||
let mut cx = Ctx {
|
||||
map: &map,
|
||||
parent: CRATE_NODE_ID,
|
||||
fold_ops: fold_ops
|
||||
};
|
||||
cx.insert(CRATE_NODE_ID, RootCrate);
|
||||
cx.fold_crate(krate)
|
||||
};
|
||||
let krate = cx.fold_crate(c);
|
||||
|
||||
if log_enabled!(logging::DEBUG) {
|
||||
let map = cx.map.map.borrow();
|
||||
// this only makes sense for ordered stores; note the
|
||||
let map = map.map.borrow();
|
||||
// This only makes sense for ordered stores; note the
|
||||
// enumerate to count the number of entries.
|
||||
let (entries_less_1, (largest_id, _)) =
|
||||
map.get().iter().enumerate().last().expect("AST map was empty after folding?");
|
||||
let (entries_less_1, _) = map.get().iter().filter(|&x| {
|
||||
match *x {
|
||||
NotPresent => false,
|
||||
_ => true
|
||||
}
|
||||
}).enumerate().last().expect("AST map was empty after folding?");
|
||||
|
||||
let entries = entries_less_1 + 1;
|
||||
let vector_length = largest_id + 1;
|
||||
let vector_length = map.get().len();
|
||||
debug!("The AST map has {} entries with a maximum of {}: occupancy {:.1}%",
|
||||
entries, vector_length, (entries as f64 / vector_length as f64) * 100.);
|
||||
}
|
||||
|
||||
(krate, cx.map)
|
||||
(krate, map)
|
||||
}
|
||||
|
||||
// Used for items loaded from external crate that are being inlined into this
|
||||
// crate. The `path` should be the path to the item but should not include
|
||||
// the item itself.
|
||||
pub fn map_decoded_item<F: 'static + FoldOps>(diag: @SpanHandler,
|
||||
map: Map,
|
||||
path: Path,
|
||||
fold_ops: F,
|
||||
fold_ii: |&mut Ctx<F>| -> InlinedItem)
|
||||
-> InlinedItem {
|
||||
// I believe it is ok for the local IDs of inlined items from other crates
|
||||
// to overlap with the local ids from this crate, so just generate the ids
|
||||
// starting from 0.
|
||||
pub fn map_decoded_item<F: FoldOps>(map: &Map,
|
||||
path: ~[PathElem],
|
||||
fold_ops: F,
|
||||
fold: |&mut Ctx<F>| -> InlinedItem)
|
||||
-> InlinedItem {
|
||||
let mut cx = Ctx {
|
||||
map: map,
|
||||
path: path.clone(),
|
||||
diag: diag,
|
||||
parent: DUMMY_NODE_ID,
|
||||
fold_ops: fold_ops
|
||||
};
|
||||
|
||||
let ii = fold_ii(&mut cx);
|
||||
// Generate a NodeId for the RootInlinedParent inserted below.
|
||||
cx.new_id(DUMMY_NODE_ID);
|
||||
|
||||
// Methods get added to the AST map when their impl is visited. Since we
|
||||
// don't decode and instantiate the impl, but just the method, we have to
|
||||
// add it to the table now. Likewise with foreign items.
|
||||
let mut def_id = DefId { krate: LOCAL_CRATE, node: DUMMY_NODE_ID };
|
||||
let ii = fold(&mut cx);
|
||||
match ii {
|
||||
IIItem(..) => {} // fallthrough
|
||||
IIForeign(i) => {
|
||||
cx.insert(i.id, NodeForeignItem(i,
|
||||
AbiSet::Intrinsic(),
|
||||
i.vis, // Wrong but OK
|
||||
@path));
|
||||
}
|
||||
IIItem(_) => {}
|
||||
IIMethod(impl_did, is_provided, m) => {
|
||||
let entry = if is_provided {
|
||||
NodeTraitMethod(@Provided(m), impl_did, @path)
|
||||
EntryTraitMethod(cx.parent, @Provided(m))
|
||||
} else {
|
||||
NodeMethod(m, impl_did, @path)
|
||||
EntryMethod(cx.parent, m)
|
||||
};
|
||||
cx.insert(m.id, entry);
|
||||
def_id = impl_did;
|
||||
}
|
||||
IIForeign(i) => {
|
||||
cx.insert(i.id, EntryForeignItem(cx.parent, i));
|
||||
}
|
||||
}
|
||||
|
||||
cx.insert(cx.parent, RootInlinedParent(P(InlinedParent {
|
||||
path: path,
|
||||
def_id: def_id
|
||||
})));
|
||||
|
||||
ii
|
||||
}
|
||||
|
||||
pub fn node_id_to_str(map: Map, id: NodeId, itr: @IdentInterner) -> ~str {
|
||||
fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
|
||||
match map.find(id) {
|
||||
None => {
|
||||
format!("unknown node (id={})", id)
|
||||
}
|
||||
Some(NodeItem(item, path)) => {
|
||||
let path_str = path_ident_to_str(path, item.ident, itr);
|
||||
let item_str = match item.node {
|
||||
ItemStatic(..) => ~"static",
|
||||
ItemFn(..) => ~"fn",
|
||||
ItemMod(..) => ~"mod",
|
||||
ItemForeignMod(..) => ~"foreign mod",
|
||||
ItemTy(..) => ~"ty",
|
||||
ItemEnum(..) => ~"enum",
|
||||
ItemStruct(..) => ~"struct",
|
||||
ItemTrait(..) => ~"trait",
|
||||
ItemImpl(..) => ~"impl",
|
||||
ItemMac(..) => ~"macro"
|
||||
};
|
||||
format!("{} {} (id={})", item_str, path_str, id)
|
||||
}
|
||||
Some(NodeForeignItem(item, abi, _, path)) => {
|
||||
format!("foreign item {} with abi {:?} (id={})",
|
||||
path_ident_to_str(path, item.ident, itr), abi, id)
|
||||
}
|
||||
Some(NodeMethod(m, _, path)) => {
|
||||
let name = itr.get(m.ident.name);
|
||||
format!("method {} in {} (id={})",
|
||||
name.as_slice(), path_to_str(*path, itr), id)
|
||||
}
|
||||
Some(NodeTraitMethod(ref tm, _, path)) => {
|
||||
let m = ast_util::trait_method_to_ty_method(&**tm);
|
||||
let name = itr.get(m.ident.name);
|
||||
format!("method {} in {} (id={})",
|
||||
name.as_slice(), path_to_str(*path, itr), id)
|
||||
}
|
||||
Some(NodeVariant(ref variant, _, path)) => {
|
||||
let name = itr.get(variant.node.name.name);
|
||||
format!("variant {} in {} (id={})",
|
||||
name.as_slice(),
|
||||
path_to_str(*path, itr), id)
|
||||
}
|
||||
Some(NodeExpr(expr)) => {
|
||||
format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id)
|
||||
}
|
||||
Some(NodeCalleeScope(expr)) => {
|
||||
format!("callee_scope {} (id={})", pprust::expr_to_str(expr, itr), id)
|
||||
}
|
||||
Some(NodeStmt(stmt)) => {
|
||||
format!("stmt {} (id={})",
|
||||
pprust::stmt_to_str(stmt, itr), id)
|
||||
}
|
||||
Some(NodeArg(pat)) => {
|
||||
format!("arg {} (id={})", pprust::pat_to_str(pat, itr), id)
|
||||
}
|
||||
Some(NodeLocal(pat)) => {
|
||||
format!("local {} (id={})", pprust::pat_to_str(pat, itr), id)
|
||||
}
|
||||
Some(NodeBlock(block)) => {
|
||||
format!("block {} (id={})", pprust::block_to_str(block, itr), id)
|
||||
}
|
||||
Some(NodeStructCtor(_, _, path)) => {
|
||||
format!("struct_ctor {} (id={})", path_to_str(*path, itr), id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_item_query<Result>(items: Map, id: NodeId, query: |@Item| -> Result, error_msg: ~str)
|
||||
-> Result {
|
||||
match items.find(id) {
|
||||
Some(NodeItem(it, _)) => query(it),
|
||||
_ => fail!("{}", error_msg)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_span(items: Map, id: ast::NodeId) -> Span {
|
||||
match items.find(id) {
|
||||
Some(NodeItem(item, _)) => item.span,
|
||||
Some(NodeForeignItem(foreign_item, _, _, _)) => foreign_item.span,
|
||||
Some(NodeTraitMethod(trait_method, _, _)) => {
|
||||
match *trait_method {
|
||||
Required(ref type_method) => type_method.span,
|
||||
Provided(ref method) => method.span,
|
||||
}
|
||||
Some(NodeItem(item)) => {
|
||||
let path_str = map.path_to_str_with_ident(id, item.ident);
|
||||
let item_str = match item.node {
|
||||
ItemStatic(..) => "static",
|
||||
ItemFn(..) => "fn",
|
||||
ItemMod(..) => "mod",
|
||||
ItemForeignMod(..) => "foreign mod",
|
||||
ItemTy(..) => "ty",
|
||||
ItemEnum(..) => "enum",
|
||||
ItemStruct(..) => "struct",
|
||||
ItemTrait(..) => "trait",
|
||||
ItemImpl(..) => "impl",
|
||||
ItemMac(..) => "macro"
|
||||
};
|
||||
format!("{} {} (id={})", item_str, path_str, id)
|
||||
}
|
||||
Some(NodeForeignItem(item)) => {
|
||||
let path_str = map.path_to_str_with_ident(id, item.ident);
|
||||
format!("foreign item {} (id={})", path_str, id)
|
||||
}
|
||||
Some(NodeMethod(m)) => {
|
||||
format!("method {} in {} (id={})",
|
||||
token::get_ident(m.ident),
|
||||
map.path_to_str(id), id)
|
||||
}
|
||||
Some(NodeTraitMethod(ref tm)) => {
|
||||
let m = ast_util::trait_method_to_ty_method(&**tm);
|
||||
format!("method {} in {} (id={})",
|
||||
token::get_ident(m.ident),
|
||||
map.path_to_str(id), id)
|
||||
}
|
||||
Some(NodeVariant(ref variant)) => {
|
||||
format!("variant {} in {} (id={})",
|
||||
token::get_ident(variant.node.name),
|
||||
map.path_to_str(id), id)
|
||||
}
|
||||
Some(NodeExpr(expr)) => {
|
||||
format!("expr {} (id={})", pprust::expr_to_str(expr), id)
|
||||
}
|
||||
Some(NodeCalleeScope(expr)) => {
|
||||
format!("callee_scope {} (id={})", pprust::expr_to_str(expr), id)
|
||||
}
|
||||
Some(NodeStmt(stmt)) => {
|
||||
format!("stmt {} (id={})", pprust::stmt_to_str(stmt), id)
|
||||
}
|
||||
Some(NodeArg(pat)) => {
|
||||
format!("arg {} (id={})", pprust::pat_to_str(pat), id)
|
||||
}
|
||||
Some(NodeLocal(pat)) => {
|
||||
format!("local {} (id={})", pprust::pat_to_str(pat), id)
|
||||
}
|
||||
Some(NodeBlock(block)) => {
|
||||
format!("block {} (id={})", pprust::block_to_str(block), id)
|
||||
}
|
||||
Some(NodeStructCtor(_)) => {
|
||||
format!("struct_ctor {} (id={})", map.path_to_str(id), id)
|
||||
}
|
||||
None => {
|
||||
format!("unknown node (id={})", id)
|
||||
}
|
||||
Some(NodeMethod(method, _, _)) => method.span,
|
||||
Some(NodeVariant(variant, _, _)) => variant.span,
|
||||
Some(NodeExpr(expr)) => expr.span,
|
||||
Some(NodeStmt(stmt)) => stmt.span,
|
||||
Some(NodeArg(pat)) | Some(NodeLocal(pat)) => pat.span,
|
||||
Some(NodeBlock(block)) => block.span,
|
||||
Some(NodeStructCtor(_, item, _)) => item.span,
|
||||
Some(NodeCalleeScope(expr)) => expr.span,
|
||||
None => fail!("node_span: could not find id {}", id),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use ast_util;
|
|||
use codemap::Span;
|
||||
use opt_vec;
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use visit::Visitor;
|
||||
use visit;
|
||||
|
||||
|
@ -26,8 +27,7 @@ use std::local_data;
|
|||
pub fn path_name_i(idents: &[Ident]) -> ~str {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.map(|i| {
|
||||
let string = token::get_ident(i.name);
|
||||
string.get().to_str()
|
||||
token::get_ident(*i).get().to_str()
|
||||
}).connect("::")
|
||||
}
|
||||
|
||||
|
@ -246,6 +246,23 @@ pub fn unguarded_pat(a: &Arm) -> Option<~[@Pat]> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Generate a "pretty" name for an `impl` from its type and trait.
|
||||
/// This is designed so that symbols of `impl`'d methods give some
|
||||
/// hint of where they came from, (previously they would all just be
|
||||
/// listed as `__extensions__::method_name::hash`, with no indication
|
||||
/// of the type).
|
||||
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
|
||||
let mut pretty = pprust::ty_to_str(ty);
|
||||
match *trait_ref {
|
||||
Some(ref trait_ref) => {
|
||||
pretty.push_char('.');
|
||||
pretty.push_str(pprust::path_to_str(&trait_ref.path));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
token::gensym_ident(pretty)
|
||||
}
|
||||
|
||||
pub fn public_methods(ms: ~[@Method]) -> ~[@Method] {
|
||||
ms.move_iter().filter(|m| {
|
||||
match m.vis {
|
||||
|
|
|
@ -440,8 +440,7 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
|
|||
match tts[0] {
|
||||
ast::TTTok(_, token::LIT_STR(ident))
|
||||
| ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
let interned_str = token::get_ident(ident.name);
|
||||
return Some(interned_str.get().to_str())
|
||||
return Some(token::get_ident(ident).get().to_str())
|
||||
}
|
||||
_ => cx.span_err(sp, format!("{} requires a string.", name)),
|
||||
}
|
||||
|
|
|
@ -31,8 +31,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
} else {
|
||||
match *e {
|
||||
ast::TTTok(_, token::IDENT(ident,_)) => {
|
||||
let interned_str = token::get_ident(ident.name);
|
||||
res_str.push_str(interned_str.get())
|
||||
res_str.push_str(token::get_ident(ident).get())
|
||||
}
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
|
|
|
@ -86,8 +86,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
decoder,
|
||||
cx.ident_of("read_struct"),
|
||||
~[
|
||||
cx.expr_str(trait_span,
|
||||
token::get_ident(substr.type_ident.name)),
|
||||
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
|
||||
cx.expr_uint(trait_span, nfields),
|
||||
cx.lambda_expr_1(trait_span, result, blkarg)
|
||||
])
|
||||
|
@ -100,8 +99,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
|
||||
|
||||
for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() {
|
||||
variants.push(cx.expr_str(v_span,
|
||||
token::get_ident(name.name)));
|
||||
variants.push(cx.expr_str(v_span, token::get_ident(name)));
|
||||
|
||||
let decoded = decode_static_fields(cx,
|
||||
v_span,
|
||||
|
@ -130,8 +128,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
decoder,
|
||||
cx.ident_of("read_enum"),
|
||||
~[
|
||||
cx.expr_str(trait_span,
|
||||
token::get_ident(substr.type_ident.name)),
|
||||
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
|
||||
cx.lambda_expr_1(trait_span, result, blkarg)
|
||||
])
|
||||
}
|
||||
|
@ -166,7 +163,7 @@ fn decode_static_fields(cx: &mut ExtCtxt,
|
|||
Named(ref fields) => {
|
||||
// use the field's span to get nicer error messages.
|
||||
let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
|
||||
let arg = getarg(cx, span, token::get_ident(name.name), i);
|
||||
let arg = getarg(cx, span, token::get_ident(name), i);
|
||||
cx.field_imm(span, name, arg)
|
||||
}).collect();
|
||||
cx.expr_struct_ident(trait_span, outer_pat_ident, fields)
|
||||
|
|
|
@ -132,7 +132,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
..
|
||||
}) in fields.iter().enumerate() {
|
||||
let name = match name {
|
||||
Some(id) => token::get_ident(id.name),
|
||||
Some(id) => token::get_ident(id),
|
||||
None => {
|
||||
token::intern_and_get_ident(format!("_field{}", i))
|
||||
}
|
||||
|
@ -152,8 +152,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
encoder,
|
||||
cx.ident_of("emit_struct"),
|
||||
~[
|
||||
cx.expr_str(trait_span,
|
||||
token::get_ident(substr.type_ident.name)),
|
||||
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
|
||||
cx.expr_uint(trait_span, fields.len()),
|
||||
blk
|
||||
])
|
||||
|
@ -179,8 +178,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
}
|
||||
|
||||
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
|
||||
let name = cx.expr_str(trait_span,
|
||||
token::get_ident(variant.node.name.name));
|
||||
let name = cx.expr_str(trait_span, token::get_ident(variant.node.name));
|
||||
let call = cx.expr_method_call(trait_span, blkencoder,
|
||||
cx.ident_of("emit_enum_variant"),
|
||||
~[name,
|
||||
|
@ -192,8 +190,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
encoder,
|
||||
cx.ident_of("emit_enum"),
|
||||
~[
|
||||
cx.expr_str(trait_span,
|
||||
token::get_ident(substr.type_ident.name)),
|
||||
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
|
||||
blk
|
||||
]);
|
||||
cx.expr_block(cx.block(trait_span, ~[me], Some(ret)))
|
||||
|
|
|
@ -178,7 +178,7 @@ StaticEnum(<ast::EnumDef of C>, ~[(<ident of C0>, <span of C0>, Unnamed(~[<span
|
|||
|
||||
use ast;
|
||||
use ast::{P, EnumDef, Expr, Ident, Generics, StructDef};
|
||||
|
||||
use ast_util;
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::build::AstBuilder;
|
||||
use codemap;
|
||||
|
@ -405,11 +405,13 @@ impl<'a> TraitDef<'a> {
|
|||
ast::LitStr(token::intern_and_get_ident(
|
||||
"Automatically derived."),
|
||||
ast::CookedStr)));
|
||||
let opt_trait_ref = Some(trait_ref);
|
||||
let ident = ast_util::impl_pretty_name(&opt_trait_ref, self_type);
|
||||
cx.item(
|
||||
self.span,
|
||||
::parse::token::special_idents::clownshoes_extensions,
|
||||
ident,
|
||||
~[doc_attr],
|
||||
ast::ItemImpl(trait_generics, Some(trait_ref),
|
||||
ast::ItemImpl(trait_generics, opt_trait_ref,
|
||||
self_type, methods.map(|x| *x)))
|
||||
}
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
|||
}
|
||||
};
|
||||
|
||||
let mut format_string = token::get_ident(name.name).get().to_owned();
|
||||
let mut format_string = token::get_ident(name).get().to_owned();
|
||||
// the internal fields we're actually formatting
|
||||
let mut exprs = ~[];
|
||||
|
||||
|
@ -99,7 +99,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
|||
for (i, field) in fields.iter().enumerate() {
|
||||
if i != 0 { format_string.push_str(","); }
|
||||
|
||||
let name = token::get_ident(field.name.unwrap().name);
|
||||
let name = token::get_ident(field.name.unwrap());
|
||||
format_string.push_str(" ");
|
||||
format_string.push_str(name.get());
|
||||
format_string.push_str(": {}");
|
||||
|
|
|
@ -57,10 +57,10 @@ fn to_str_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure)
|
|||
name: ast::Ident,
|
||||
fields: &[FieldInfo]| {
|
||||
if fields.len() == 0 {
|
||||
cx.expr_str_uniq(span, token::get_ident(name.name))
|
||||
cx.expr_str_uniq(span, token::get_ident(name))
|
||||
} else {
|
||||
let buf = cx.ident_of("buf");
|
||||
let interned_str = token::get_ident(name.name);
|
||||
let interned_str = token::get_ident(name);
|
||||
let start =
|
||||
token::intern_and_get_ident(interned_str.get() + start);
|
||||
let init = cx.expr_str_uniq(span, start);
|
||||
|
@ -81,7 +81,7 @@ fn to_str_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure)
|
|||
match name {
|
||||
None => {}
|
||||
Some(id) => {
|
||||
let interned_id = token::get_ident(id.name);
|
||||
let interned_id = token::get_ident(id);
|
||||
let name = interned_id.get() + ": ";
|
||||
push(cx.expr_str(span,
|
||||
token::intern_and_get_ident(name)));
|
||||
|
|
|
@ -54,8 +54,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
|
|||
// let compilation continue
|
||||
return e;
|
||||
}
|
||||
let extname = &pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname.name);
|
||||
let extname = pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname);
|
||||
// leaving explicit deref here to highlight unbox op:
|
||||
let marked_after = match fld.extsbox.find(&extname.name) {
|
||||
None => {
|
||||
|
@ -297,26 +297,25 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
|
|||
_ => fld.cx.span_bug(it.span, "invalid item macro invocation")
|
||||
};
|
||||
|
||||
let extname = &pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname.name);
|
||||
let extname = pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname);
|
||||
let fm = fresh_mark();
|
||||
let expanded = match fld.extsbox.find(&extname.name) {
|
||||
None => {
|
||||
fld.cx.span_err(pth.span,
|
||||
format!("macro undefined: '{}!'",
|
||||
extnamestr.get()));
|
||||
extnamestr));
|
||||
// let compilation continue
|
||||
return SmallVector::zero();
|
||||
}
|
||||
|
||||
Some(&NormalTT(ref expander, span)) => {
|
||||
if it.ident.name != parse::token::special_idents::invalid.name {
|
||||
let string = token::get_ident(it.ident.name);
|
||||
fld.cx.span_err(pth.span,
|
||||
format!("macro {}! expects no ident argument, \
|
||||
given '{}'",
|
||||
extnamestr.get(),
|
||||
string.get()));
|
||||
extnamestr,
|
||||
token::get_ident(it.ident)));
|
||||
return SmallVector::zero();
|
||||
}
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
|
@ -418,13 +417,10 @@ fn load_extern_macros(krate: &ast::ViewItem, fld: &mut MacroExpander) {
|
|||
let MacroCrate { lib, cnum } = fld.cx.loader.load_crate(krate);
|
||||
|
||||
let crate_name = match krate.node {
|
||||
ast::ViewItemExternMod(ref name, _, _) => {
|
||||
let string = token::get_ident(name.name);
|
||||
string.get().to_str()
|
||||
},
|
||||
_ => unreachable!(),
|
||||
ast::ViewItemExternMod(name, _, _) => name,
|
||||
_ => unreachable!()
|
||||
};
|
||||
let name = format!("<{} macros>", crate_name);
|
||||
let name = format!("<{} macros>", token::get_ident(crate_name));
|
||||
|
||||
let exported_macros = fld.cx.loader.get_exported_macros(cnum);
|
||||
for source in exported_macros.iter() {
|
||||
|
@ -496,12 +492,11 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
|
|||
fld.cx.span_err(pth.span, "expected macro name without module separators");
|
||||
return SmallVector::zero();
|
||||
}
|
||||
let extname = &pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname.name);
|
||||
let extname = pth.segments[0].identifier;
|
||||
let extnamestr = token::get_ident(extname);
|
||||
let marked_after = match fld.extsbox.find(&extname.name) {
|
||||
None => {
|
||||
fld.cx.span_err(pth.span, format!("macro undefined: '{}'",
|
||||
extnamestr.get()));
|
||||
fld.cx.span_err(pth.span, format!("macro undefined: '{}'", extnamestr));
|
||||
return SmallVector::zero();
|
||||
}
|
||||
|
||||
|
@ -535,7 +530,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
|
|||
_ => {
|
||||
fld.cx.span_err(pth.span,
|
||||
format!("non-stmt macro in stmt pos: {}",
|
||||
extnamestr.get()));
|
||||
extnamestr));
|
||||
return SmallVector::zero();
|
||||
}
|
||||
};
|
||||
|
@ -545,7 +540,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
|
|||
|
||||
_ => {
|
||||
fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro",
|
||||
extnamestr.get()));
|
||||
extnamestr));
|
||||
return SmallVector::zero();
|
||||
}
|
||||
};
|
||||
|
@ -1186,9 +1181,7 @@ mod test {
|
|||
println!("uh oh, matches but shouldn't:");
|
||||
println!("varref: {:?}",varref);
|
||||
// good lord, you can't make a path with 0 segments, can you?
|
||||
let string = token::get_ident(varref.segments[0]
|
||||
.identifier
|
||||
.name);
|
||||
let string = token::get_ident(varref.segments[0].identifier);
|
||||
println!("varref's first segment's uint: {}, and string: \"{}\"",
|
||||
varref.segments[0].identifier.name,
|
||||
string.get());
|
||||
|
@ -1213,10 +1206,7 @@ foo_module!()
|
|||
let bindings = name_finder.ident_accumulator;
|
||||
|
||||
let cxbinds: ~[&ast::Ident] =
|
||||
bindings.iter().filter(|b| {
|
||||
let string = token::get_ident(b.name);
|
||||
"xx" == string.get()
|
||||
}).collect();
|
||||
bindings.iter().filter(|b| "xx" == token::get_ident(**b).get()).collect();
|
||||
let cxbind = match cxbinds {
|
||||
[b] => b,
|
||||
_ => fail!("expected just one binding for ext_cx")
|
||||
|
@ -1228,12 +1218,9 @@ foo_module!()
|
|||
let varrefs = path_finder.path_accumulator;
|
||||
|
||||
// the xx binding should bind all of the xx varrefs:
|
||||
for (idx,v) in varrefs.iter().filter(|p|{
|
||||
for (idx,v) in varrefs.iter().filter(|p| {
|
||||
p.segments.len() == 1
|
||||
&& {
|
||||
let string = token::get_ident(p.segments[0].identifier.name);
|
||||
"xx" == string.get()
|
||||
}
|
||||
&& "xx" == token::get_ident(p.segments[0].identifier).get()
|
||||
}).enumerate() {
|
||||
if mtwt_resolve(v.segments[0].identifier) != resolved_binding {
|
||||
println!("uh oh, xx binding didn't match xx varref:");
|
||||
|
|
|
@ -112,7 +112,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span,
|
|||
return (extra, None);
|
||||
}
|
||||
};
|
||||
let interned_name = token::get_ident(ident.name);
|
||||
let interned_name = token::get_ident(ident);
|
||||
let name = interned_name.get();
|
||||
p.expect(&token::EQ);
|
||||
let e = p.parse_expr();
|
||||
|
|
|
@ -13,7 +13,6 @@ use codemap;
|
|||
use ext::base::*;
|
||||
use ext::base;
|
||||
use print;
|
||||
use parse::token::{get_ident_interner};
|
||||
|
||||
pub fn expand_syntax_ext(cx: &mut ExtCtxt,
|
||||
sp: codemap::Span,
|
||||
|
@ -21,10 +20,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt,
|
|||
-> base::MacResult {
|
||||
|
||||
cx.print_backtrace();
|
||||
println!("{}",
|
||||
print::pprust::tt_to_str(
|
||||
&ast::TTDelim(@tt.to_owned()),
|
||||
get_ident_interner()));
|
||||
println!("{}", print::pprust::tt_to_str(&ast::TTDelim(@tt.to_owned())));
|
||||
|
||||
//trivial expression
|
||||
MRExpr(@ast::Expr {
|
||||
|
|
|
@ -71,14 +71,13 @@ pub mod rt {
|
|||
|
||||
impl ToSource for ast::Ident {
|
||||
fn to_source(&self) -> ~str {
|
||||
let this = get_ident(self.name);
|
||||
this.get().to_owned()
|
||||
get_ident(*self).get().to_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for @ast::Item {
|
||||
fn to_source(&self) -> ~str {
|
||||
pprust::item_to_str(*self, get_ident_interner())
|
||||
pprust::item_to_str(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,7 +89,7 @@ pub mod rt {
|
|||
|
||||
impl ToSource for ast::Ty {
|
||||
fn to_source(&self) -> ~str {
|
||||
pprust::ty_to_str(self, get_ident_interner())
|
||||
pprust::ty_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,19 +101,19 @@ pub mod rt {
|
|||
|
||||
impl ToSource for Generics {
|
||||
fn to_source(&self) -> ~str {
|
||||
pprust::generics_to_str(self, get_ident_interner())
|
||||
pprust::generics_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for @ast::Expr {
|
||||
fn to_source(&self) -> ~str {
|
||||
pprust::expr_to_str(*self, get_ident_interner())
|
||||
pprust::expr_to_str(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for ast::Block {
|
||||
fn to_source(&self) -> ~str {
|
||||
pprust::block_to_str(self, get_ident_interner())
|
||||
pprust::block_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -349,7 +348,7 @@ fn id_ext(str: &str) -> ast::Ident {
|
|||
|
||||
// Lift an ident to the expr that evaluates to that ident.
|
||||
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
|
||||
let e_str = cx.expr_str(sp, token::get_ident(ident.name));
|
||||
let e_str = cx.expr_str(sp, token::get_ident(ident));
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, id_ext("ext_cx")),
|
||||
id_ext("ident_of"),
|
||||
|
|
|
@ -16,7 +16,6 @@ use ext::base::*;
|
|||
use ext::base;
|
||||
use ext::build::AstBuilder;
|
||||
use parse;
|
||||
use parse::token::get_ident_interner;
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
|
||||
|
@ -64,7 +63,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
|
||||
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> base::MacResult {
|
||||
let s = pprust::tts_to_str(tts, get_ident_interner());
|
||||
let s = pprust::tts_to_str(tts);
|
||||
base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(s)))
|
||||
}
|
||||
|
||||
|
@ -72,10 +71,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
-> base::MacResult {
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let string = cx.mod_path()
|
||||
.map(|x| {
|
||||
let interned_str = token::get_ident(x.name);
|
||||
interned_str.get().to_str()
|
||||
})
|
||||
.map(|x| token::get_ident(*x).get().to_str())
|
||||
.connect("::");
|
||||
base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ use parse::lexer::*; //resolve bug?
|
|||
use parse::ParseSess;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
|
||||
use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner};
|
||||
use parse::token::{Token, EOF, Nonterminal};
|
||||
use parse::token;
|
||||
|
||||
use std::hashmap::HashMap;
|
||||
|
@ -180,14 +180,15 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch])
|
|||
};
|
||||
}
|
||||
codemap::Spanned {
|
||||
node: MatchNonterminal(ref bind_name, _, idx), span: sp
|
||||
node: MatchNonterminal(bind_name, _, idx),
|
||||
span
|
||||
} => {
|
||||
if ret_val.contains_key(bind_name) {
|
||||
let string = token::get_ident(bind_name.name);
|
||||
if ret_val.contains_key(&bind_name) {
|
||||
let string = token::get_ident(bind_name);
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(sp, "duplicated bind name: " + string.get())
|
||||
.span_fatal(span, "duplicated bind name: " + string.get())
|
||||
}
|
||||
ret_val.insert(*bind_name, res[idx]);
|
||||
ret_val.insert(bind_name, res[idx]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -364,12 +365,10 @@ pub fn parse<R: Reader>(sess: @ParseSess,
|
|||
|| bb_eis.len() > 1u {
|
||||
let nts = bb_eis.map(|ei| {
|
||||
match ei.elts[ei.idx].node {
|
||||
MatchNonterminal(ref bind,ref name,_) => {
|
||||
let bind_string = token::get_ident(bind.name);
|
||||
let name_string = token::get_ident(name.name);
|
||||
MatchNonterminal(bind, name, _) => {
|
||||
format!("{} ('{}')",
|
||||
name_string.get(),
|
||||
bind_string.get())
|
||||
token::get_ident(name),
|
||||
token::get_ident(bind))
|
||||
}
|
||||
_ => fail!()
|
||||
} }).connect(" or ");
|
||||
|
@ -379,7 +378,7 @@ pub fn parse<R: Reader>(sess: @ParseSess,
|
|||
nts, next_eis.len()));
|
||||
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
|
||||
return Failure(sp, format!("no rules expected the token `{}`",
|
||||
to_str(get_ident_interner(), &tok)));
|
||||
token::to_str(&tok)));
|
||||
} else if next_eis.len() > 0u {
|
||||
/* Now process the next token */
|
||||
while next_eis.len() > 0u {
|
||||
|
@ -391,8 +390,8 @@ pub fn parse<R: Reader>(sess: @ParseSess,
|
|||
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
match ei.elts[ei.idx].node {
|
||||
MatchNonterminal(_, ref name, idx) => {
|
||||
let name_string = token::get_ident(name.name);
|
||||
MatchNonterminal(_, name, idx) => {
|
||||
let name_string = token::get_ident(name);
|
||||
ei.matches[idx].push(@MatchedNonterminal(
|
||||
parse_nt(&mut rust_parser, name_string.get())));
|
||||
ei.idx += 1u;
|
||||
|
@ -426,7 +425,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
|
|||
"ident" => match p.token {
|
||||
token::IDENT(sn,b) => { p.bump(); token::NtIdent(~sn,b) }
|
||||
_ => {
|
||||
let token_str = token::to_str(get_ident_interner(), &p.token);
|
||||
let token_str = token::to_str(&p.token);
|
||||
p.fatal(~"expected ident, found " + token_str)
|
||||
}
|
||||
},
|
||||
|
|
|
@ -21,7 +21,7 @@ use ext::tt::macro_parser::{parse, parse_or_else};
|
|||
use parse::lexer::new_tt_reader;
|
||||
use parse::parser::Parser;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::token::{get_ident_interner, special_idents, gensym_ident};
|
||||
use parse::token::{special_idents, gensym_ident};
|
||||
use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
|
||||
use parse::token;
|
||||
use print;
|
||||
|
@ -113,11 +113,9 @@ fn generic_extension(cx: &ExtCtxt,
|
|||
rhses: &[@NamedMatch])
|
||||
-> MacResult {
|
||||
if cx.trace_macros() {
|
||||
let interned_name = token::get_ident(name.name);
|
||||
println!("{}! \\{ {} \\}",
|
||||
interned_name.get(),
|
||||
print::pprust::tt_to_str(&TTDelim(@arg.to_owned()),
|
||||
get_ident_interner()));
|
||||
token::get_ident(name),
|
||||
print::pprust::tt_to_str(&TTDelim(@arg.to_owned())));
|
||||
}
|
||||
|
||||
// Which arm's failure should we report? (the one furthest along)
|
||||
|
@ -231,7 +229,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt,
|
|||
};
|
||||
|
||||
return MRDef(MacroDef {
|
||||
name: token::get_ident(name.name).get().to_str(),
|
||||
name: token::get_ident(name).get().to_str(),
|
||||
ext: NormalTT(exp, Some(sp))
|
||||
});
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ use parse::lexer::TokenAndSpan;
|
|||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::hashmap::HashMap;
|
||||
use std::option;
|
||||
|
||||
///an unzipping of `TokenTree`s
|
||||
struct TtFrame {
|
||||
|
@ -57,7 +56,7 @@ pub fn new_tt_reader(sp_diag: @SpanHandler,
|
|||
idx: Cell::new(0u),
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
up: option::None
|
||||
up: None
|
||||
}),
|
||||
interpolations: match interp { /* just a convienience */
|
||||
None => RefCell::new(HashMap::new()),
|
||||
|
@ -122,10 +121,9 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch {
|
|||
match matched_opt {
|
||||
Some(s) => lookup_cur_matched_by_matched(r, s),
|
||||
None => {
|
||||
let name_string = token::get_ident(name.name);
|
||||
r.sp_diag.span_fatal(r.cur_span.get(),
|
||||
format!("unknown macro variable `{}`",
|
||||
name_string.get()));
|
||||
token::get_ident(name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -141,16 +139,16 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
|||
match lhs {
|
||||
LisUnconstrained => rhs.clone(),
|
||||
LisContradiction(_) => lhs.clone(),
|
||||
LisConstraint(l_len, ref l_id) => match rhs {
|
||||
LisConstraint(l_len, l_id) => match rhs {
|
||||
LisUnconstrained => lhs.clone(),
|
||||
LisContradiction(_) => rhs.clone(),
|
||||
LisConstraint(r_len, _) if l_len == r_len => lhs.clone(),
|
||||
LisConstraint(r_len, ref r_id) => {
|
||||
let l_n = token::get_ident(l_id.name);
|
||||
let r_n = token::get_ident(r_id.name);
|
||||
LisConstraint(r_len, r_id) => {
|
||||
let l_n = token::get_ident(l_id);
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n.get(), l_len, r_n.get(), r_len))
|
||||
l_n, l_len, r_n, r_len))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -240,7 +238,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
|
|||
idx: Cell::new(0u),
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
up: option::Some(r.stack.get())
|
||||
up: Some(r.stack.get())
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
|
@ -314,11 +312,10 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
|
|||
return ret_val;
|
||||
}
|
||||
MatchedSeq(..) => {
|
||||
let string = token::get_ident(ident.name);
|
||||
r.sp_diag.span_fatal(
|
||||
r.cur_span.get(), /* blame the macro writer */
|
||||
format!("variable '{}' is still repeating at this depth",
|
||||
string.get()));
|
||||
token::get_ident(ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
use ast::*;
|
||||
use ast;
|
||||
use ast_util;
|
||||
use codemap::{respan, Span, Spanned};
|
||||
use parse::token;
|
||||
use opt_vec::OptVec;
|
||||
|
@ -261,10 +262,10 @@ pub trait Folder {
|
|||
|
||||
fn fold_local(&mut self, l: @Local) -> @Local {
|
||||
@Local {
|
||||
id: self.new_id(l.id), // Needs to be first, for ast_map.
|
||||
ty: self.fold_ty(l.ty),
|
||||
pat: self.fold_pat(l.pat),
|
||||
init: l.init.map(|e| self.fold_expr(e)),
|
||||
id: self.new_id(l.id),
|
||||
span: self.new_span(l.span),
|
||||
}
|
||||
}
|
||||
|
@ -344,9 +345,9 @@ fn fold_attribute_<T: Folder>(at: Attribute, fld: &mut T) -> Attribute {
|
|||
//used in noop_fold_foreign_item and noop_fold_fn_decl
|
||||
fn fold_arg_<T: Folder>(a: &Arg, fld: &mut T) -> Arg {
|
||||
Arg {
|
||||
id: fld.new_id(a.id), // Needs to be first, for ast_map.
|
||||
ty: fld.fold_ty(a.ty),
|
||||
pat: fld.fold_pat(a.pat),
|
||||
id: fld.new_id(a.id),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -514,10 +515,10 @@ pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
|
|||
let view_items = b.view_items.map(|x| folder.fold_view_item(x));
|
||||
let stmts = b.stmts.iter().flat_map(|s| folder.fold_stmt(*s).move_iter()).collect();
|
||||
P(Block {
|
||||
id: folder.new_id(b.id), // Needs to be first, for ast_map.
|
||||
view_items: view_items,
|
||||
stmts: stmts,
|
||||
expr: b.expr.map(|x| folder.fold_expr(x)),
|
||||
id: folder.new_id(b.id),
|
||||
rules: b.rules,
|
||||
span: folder.new_span(b.span),
|
||||
})
|
||||
|
@ -579,13 +580,13 @@ pub fn noop_fold_item_underscore<T: Folder>(i: &Item_, folder: &mut T) -> Item_
|
|||
|
||||
pub fn noop_fold_type_method<T: Folder>(m: &TypeMethod, fld: &mut T) -> TypeMethod {
|
||||
TypeMethod {
|
||||
id: fld.new_id(m.id), // Needs to be first, for ast_map.
|
||||
ident: fld.fold_ident(m.ident),
|
||||
attrs: m.attrs.map(|a| fold_attribute_(*a, fld)),
|
||||
purity: m.purity,
|
||||
decl: fld.fold_fn_decl(m.decl),
|
||||
generics: fold_generics(&m.generics, fld),
|
||||
explicit_self: fld.fold_explicit_self(&m.explicit_self),
|
||||
id: fld.new_id(m.id),
|
||||
span: fld.new_span(m.span),
|
||||
}
|
||||
}
|
||||
|
@ -609,11 +610,21 @@ pub fn noop_fold_crate<T: Folder>(c: Crate, folder: &mut T) -> Crate {
|
|||
}
|
||||
|
||||
pub fn noop_fold_item<T: Folder>(i: &Item, folder: &mut T) -> SmallVector<@Item> {
|
||||
let id = folder.new_id(i.id); // Needs to be first, for ast_map.
|
||||
let node = folder.fold_item_underscore(&i.node);
|
||||
let ident = match node {
|
||||
// The node may have changed, recompute the "pretty" impl name.
|
||||
ItemImpl(_, ref maybe_trait, ty, _) => {
|
||||
ast_util::impl_pretty_name(maybe_trait, ty)
|
||||
}
|
||||
_ => i.ident
|
||||
};
|
||||
|
||||
SmallVector::one(@Item {
|
||||
ident: folder.fold_ident(i.ident),
|
||||
id: id,
|
||||
ident: folder.fold_ident(ident),
|
||||
attrs: i.attrs.map(|e| fold_attribute_(*e, folder)),
|
||||
id: folder.new_id(i.id),
|
||||
node: folder.fold_item_underscore(&i.node),
|
||||
node: node,
|
||||
vis: i.vis,
|
||||
span: folder.new_span(i.span)
|
||||
})
|
||||
|
@ -621,6 +632,7 @@ pub fn noop_fold_item<T: Folder>(i: &Item, folder: &mut T) -> SmallVector<@Item>
|
|||
|
||||
pub fn noop_fold_foreign_item<T: Folder>(ni: &ForeignItem, folder: &mut T) -> @ForeignItem {
|
||||
@ForeignItem {
|
||||
id: folder.new_id(ni.id), // Needs to be first, for ast_map.
|
||||
ident: folder.fold_ident(ni.ident),
|
||||
attrs: ni.attrs.map(|x| fold_attribute_(*x, folder)),
|
||||
node: match ni.node {
|
||||
|
@ -636,7 +648,6 @@ pub fn noop_fold_foreign_item<T: Folder>(ni: &ForeignItem, folder: &mut T) -> @F
|
|||
ForeignItemStatic(folder.fold_ty(t), m)
|
||||
}
|
||||
},
|
||||
id: folder.new_id(ni.id),
|
||||
span: folder.new_span(ni.span),
|
||||
vis: ni.vis,
|
||||
}
|
||||
|
@ -644,6 +655,7 @@ pub fn noop_fold_foreign_item<T: Folder>(ni: &ForeignItem, folder: &mut T) -> @F
|
|||
|
||||
pub fn noop_fold_method<T: Folder>(m: &Method, folder: &mut T) -> @Method {
|
||||
@Method {
|
||||
id: folder.new_id(m.id), // Needs to be first, for ast_map.
|
||||
ident: folder.fold_ident(m.ident),
|
||||
attrs: m.attrs.map(|a| fold_attribute_(*a, folder)),
|
||||
generics: fold_generics(&m.generics, folder),
|
||||
|
@ -651,7 +663,6 @@ pub fn noop_fold_method<T: Folder>(m: &Method, folder: &mut T) -> @Method {
|
|||
purity: m.purity,
|
||||
decl: folder.fold_fn_decl(m.decl),
|
||||
body: folder.fold_block(m.body),
|
||||
id: folder.new_id(m.id),
|
||||
span: folder.new_span(m.span),
|
||||
vis: m.vis
|
||||
}
|
||||
|
@ -894,8 +905,7 @@ mod test {
|
|||
~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
|
||||
assert_pred!(matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,
|
||||
token::get_ident_interner()),
|
||||
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate),
|
||||
~"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}");
|
||||
}
|
||||
|
||||
|
@ -907,8 +917,7 @@ mod test {
|
|||
(g $(d $d $e)+))} ");
|
||||
assert_pred!(matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,
|
||||
token::get_ident_interner()),
|
||||
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate),
|
||||
~"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ use parse::lexer::{StringReader, bump, is_eof, nextch_is, TokenAndSpan};
|
|||
use parse::lexer::{is_line_non_doc_comment, is_block_non_doc_comment};
|
||||
use parse::lexer;
|
||||
use parse::token;
|
||||
use parse::token::{get_ident_interner};
|
||||
|
||||
use std::io;
|
||||
use std::str;
|
||||
|
@ -385,7 +384,7 @@ pub fn gather_comments_and_literals(span_diagnostic:
|
|||
literals.push(Literal {lit: s.to_owned(), pos: sp.lo});
|
||||
})
|
||||
} else {
|
||||
debug!("tok: {}", token::to_str(get_ident_interner(), &tok));
|
||||
debug!("tok: {}", token::to_str(&tok));
|
||||
}
|
||||
first_read = false;
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
use parse::token;
|
||||
use parse::token::{get_ident_interner};
|
||||
|
||||
// SeqSep : a sequence separator (token)
|
||||
// and whether a trailing separator is allowed.
|
||||
|
@ -36,10 +35,3 @@ pub fn seq_sep_none() -> SeqSep {
|
|||
trailing_sep_allowed: false,
|
||||
}
|
||||
}
|
||||
|
||||
// maps any token back to a string. not necessary if you know it's
|
||||
// an identifier....
|
||||
pub fn token_to_str(token: &token::Token) -> ~str {
|
||||
token::to_str(get_ident_interner(), token)
|
||||
}
|
||||
|
||||
|
|
|
@ -175,8 +175,7 @@ impl ParserObsoleteMethods for Parser {
|
|||
fn is_obsolete_ident(&mut self, ident: &str) -> bool {
|
||||
match self.token {
|
||||
token::IDENT(sid, _) => {
|
||||
let interned_string = token::get_ident(sid.name);
|
||||
interned_string.equiv(&ident)
|
||||
token::get_ident(sid).equiv(&ident)
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
|
|
|
@ -71,9 +71,9 @@ use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed};
|
|||
use parse::lexer::Reader;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use parse::obsolete::*;
|
||||
use parse::token::{INTERPOLATED, InternedString, can_begin_expr, get_ident};
|
||||
use parse::token::{get_ident_interner, is_ident, is_ident_or_path};
|
||||
use parse::token::{is_plain_ident, keywords, special_idents, token_to_binop};
|
||||
use parse::token::{INTERPOLATED, InternedString, can_begin_expr};
|
||||
use parse::token::{is_ident, is_ident_or_path, is_plain_ident};
|
||||
use parse::token::{keywords, special_idents, token_to_binop};
|
||||
use parse::token;
|
||||
use parse::{new_sub_parser_from_file, ParseSess};
|
||||
use opt_vec;
|
||||
|
@ -288,7 +288,6 @@ struct ParsedItemsAndViewItems {
|
|||
pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:)
|
||||
-> Parser {
|
||||
let tok0 = rdr.next_token();
|
||||
let interner = get_ident_interner();
|
||||
let span = tok0.sp;
|
||||
let placeholder = TokenAndSpan {
|
||||
tok: token::UNDERSCORE,
|
||||
|
@ -297,7 +296,7 @@ pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:)
|
|||
|
||||
Parser {
|
||||
reader: rdr,
|
||||
interner: interner,
|
||||
interner: token::get_ident_interner(),
|
||||
sess: sess,
|
||||
cfg: cfg,
|
||||
token: tok0.tok,
|
||||
|
@ -359,7 +358,7 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
|
|||
impl Parser {
|
||||
// convert a token to a string using self's reader
|
||||
pub fn token_to_str(token: &token::Token) -> ~str {
|
||||
token::to_str(get_ident_interner(), token)
|
||||
token::to_str(token)
|
||||
}
|
||||
|
||||
// convert the current token to a string using self's reader
|
||||
|
@ -531,12 +530,10 @@ impl Parser {
|
|||
// otherwise, eat it.
|
||||
pub fn expect_keyword(&mut self, kw: keywords::Keyword) {
|
||||
if !self.eat_keyword(kw) {
|
||||
let id_ident = kw.to_ident();
|
||||
let id_interned_str = token::get_ident(id_ident.name);
|
||||
let id_interned_str = token::get_ident(kw.to_ident());
|
||||
let token_str = self.this_token_to_str();
|
||||
self.fatal(format!("expected `{}`, found `{}`",
|
||||
id_interned_str.get(),
|
||||
token_str))
|
||||
id_interned_str, token_str))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -804,7 +801,7 @@ impl Parser {
|
|||
}
|
||||
|
||||
pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
|
||||
get_ident(id.name)
|
||||
token::get_ident(id)
|
||||
}
|
||||
|
||||
// Is the current token one of the keywords that signals a bare function
|
||||
|
@ -3425,8 +3422,7 @@ impl Parser {
|
|||
loop {
|
||||
match self.token {
|
||||
token::LIFETIME(lifetime) => {
|
||||
let lifetime_interned_string =
|
||||
token::get_ident(lifetime.name);
|
||||
let lifetime_interned_string = token::get_ident(lifetime);
|
||||
if lifetime_interned_string.equiv(&("static")) {
|
||||
result.push(RegionTyParamBound);
|
||||
} else {
|
||||
|
@ -3876,10 +3872,6 @@ impl Parser {
|
|||
// First, parse type parameters if necessary.
|
||||
let generics = self.parse_generics();
|
||||
|
||||
// This is a new-style impl declaration.
|
||||
// FIXME: clownshoes
|
||||
let ident = special_idents::clownshoes_extensions;
|
||||
|
||||
// Special case: if the next identifier that follows is '(', don't
|
||||
// allow this to be parsed as a trait.
|
||||
let could_be_trait = self.token != token::LPAREN;
|
||||
|
@ -3923,6 +3915,8 @@ impl Parser {
|
|||
method_attrs = None;
|
||||
}
|
||||
|
||||
let ident = ast_util::impl_pretty_name(&opt_trait, ty);
|
||||
|
||||
(ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs))
|
||||
}
|
||||
|
||||
|
@ -3959,9 +3953,8 @@ impl Parser {
|
|||
fields.push(self.parse_struct_decl_field());
|
||||
}
|
||||
if fields.len() == 0 {
|
||||
let string = get_ident_interner().get(class_name.name);
|
||||
self.fatal(format!("unit-like struct definition should be written as `struct {};`",
|
||||
string.as_slice()));
|
||||
token::get_ident(class_name)));
|
||||
}
|
||||
self.bump();
|
||||
} else if self.token == token::LPAREN {
|
||||
|
@ -4159,7 +4152,7 @@ impl Parser {
|
|||
outer_attrs, "path") {
|
||||
Some(d) => dir_path.join(d),
|
||||
None => {
|
||||
let mod_string = token::get_ident(id.name);
|
||||
let mod_string = token::get_ident(id);
|
||||
let mod_name = mod_string.get().to_owned();
|
||||
let default_path_str = mod_name + ".rs";
|
||||
let secondary_path_str = mod_name + "/mod.rs";
|
||||
|
@ -4378,7 +4371,7 @@ impl Parser {
|
|||
|
||||
let item = self.mk_item(lo,
|
||||
self.last_span.hi,
|
||||
special_idents::clownshoes_foreign_mod,
|
||||
special_idents::invalid,
|
||||
ItemForeignMod(m),
|
||||
visibility,
|
||||
maybe_append(attrs, Some(inner)));
|
||||
|
@ -4498,7 +4491,7 @@ impl Parser {
|
|||
token::LIT_STR(s)
|
||||
| token::LIT_STR_RAW(s, _) => {
|
||||
self.bump();
|
||||
let identifier_string = token::get_ident(s.name);
|
||||
let identifier_string = token::get_ident(s);
|
||||
let the_string = identifier_string.get();
|
||||
let mut abis = AbiSet::empty();
|
||||
for word in the_string.words() {
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{P, Name, Mrk};
|
||||
use ast::{P, Ident, Name, Mrk};
|
||||
use ast_util;
|
||||
use parse::token;
|
||||
use util::interner::{RcStr, StrInterner};
|
||||
|
@ -133,7 +133,7 @@ pub fn binop_to_str(o: BinOp) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn to_str(input: @IdentInterner, t: &Token) -> ~str {
|
||||
pub fn to_str(t: &Token) -> ~str {
|
||||
match *t {
|
||||
EQ => ~"=",
|
||||
LT => ~"<",
|
||||
|
@ -187,50 +187,42 @@ pub fn to_str(input: @IdentInterner, t: &Token) -> ~str {
|
|||
u.to_str() + ast_util::uint_ty_to_str(t)
|
||||
}
|
||||
LIT_INT_UNSUFFIXED(i) => { i.to_str() }
|
||||
LIT_FLOAT(ref s, t) => {
|
||||
let body_string = get_ident(s.name);
|
||||
let mut body = body_string.get().to_str();
|
||||
LIT_FLOAT(s, t) => {
|
||||
let mut body = get_ident(s).get().to_str();
|
||||
if body.ends_with(".") {
|
||||
body.push_char('0'); // `10.f` is not a float literal
|
||||
}
|
||||
body + ast_util::float_ty_to_str(t)
|
||||
}
|
||||
LIT_FLOAT_UNSUFFIXED(ref s) => {
|
||||
let body_string = get_ident(s.name);
|
||||
let mut body = body_string.get().to_owned();
|
||||
LIT_FLOAT_UNSUFFIXED(s) => {
|
||||
let mut body = get_ident(s).get().to_str();
|
||||
if body.ends_with(".") {
|
||||
body.push_char('0'); // `10.f` is not a float literal
|
||||
}
|
||||
body
|
||||
}
|
||||
LIT_STR(ref s) => {
|
||||
let literal_string = get_ident(s.name);
|
||||
format!("\"{}\"", literal_string.get().escape_default())
|
||||
LIT_STR(s) => {
|
||||
format!("\"{}\"", get_ident(s).get().escape_default())
|
||||
}
|
||||
LIT_STR_RAW(ref s, n) => {
|
||||
let literal_string = get_ident(s.name);
|
||||
LIT_STR_RAW(s, n) => {
|
||||
format!("r{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n), string=literal_string.get())
|
||||
delim="#".repeat(n), string=get_ident(s))
|
||||
}
|
||||
|
||||
/* Name components */
|
||||
IDENT(s, _) => input.get(s.name).into_owned(),
|
||||
IDENT(s, _) => get_ident(s).get().to_str(),
|
||||
LIFETIME(s) => {
|
||||
let name = input.get(s.name);
|
||||
format!("'{}", name.as_slice())
|
||||
format!("'{}", get_ident(s))
|
||||
}
|
||||
UNDERSCORE => ~"_",
|
||||
|
||||
/* Other */
|
||||
DOC_COMMENT(ref s) => {
|
||||
let comment_string = get_ident(s.name);
|
||||
comment_string.get().to_str()
|
||||
}
|
||||
DOC_COMMENT(s) => get_ident(s).get().to_str(),
|
||||
EOF => ~"<eof>",
|
||||
INTERPOLATED(ref nt) => {
|
||||
match nt {
|
||||
&NtExpr(e) => ::print::pprust::expr_to_str(e, input),
|
||||
&NtAttr(e) => ::print::pprust::attribute_to_str(e, input),
|
||||
&NtExpr(e) => ::print::pprust::expr_to_str(e),
|
||||
&NtAttr(e) => ::print::pprust::attribute_to_str(e),
|
||||
_ => {
|
||||
~"an interpolated " +
|
||||
match *nt {
|
||||
|
@ -398,7 +390,7 @@ macro_rules! declare_special_idents_and_keywords {(
|
|||
}
|
||||
}
|
||||
|
||||
fn mk_fresh_ident_interner() -> @IdentInterner {
|
||||
fn mk_fresh_ident_interner() -> IdentInterner {
|
||||
// The indices here must correspond to the numbers in
|
||||
// special_idents, in Keyword to_ident(), and in static
|
||||
// constants below.
|
||||
|
@ -408,92 +400,85 @@ macro_rules! declare_special_idents_and_keywords {(
|
|||
$( $rk_str, )*
|
||||
];
|
||||
|
||||
@interner::StrInterner::prefill(init_vec)
|
||||
interner::StrInterner::prefill(init_vec)
|
||||
}
|
||||
}}
|
||||
|
||||
// If the special idents get renumbered, remember to modify these two as appropriate
|
||||
static SELF_KEYWORD_NAME: Name = 3;
|
||||
static STATIC_KEYWORD_NAME: Name = 10;
|
||||
static SELF_KEYWORD_NAME: Name = 1;
|
||||
static STATIC_KEYWORD_NAME: Name = 2;
|
||||
|
||||
declare_special_idents_and_keywords! {
|
||||
pub mod special_idents {
|
||||
// These ones are statics
|
||||
|
||||
(0, anon, "anon");
|
||||
(1, invalid, ""); // ''
|
||||
(2, clownshoes_extensions, "__extensions__");
|
||||
|
||||
(super::SELF_KEYWORD_NAME, self_, "self"); // 'self'
|
||||
(0, invalid, "");
|
||||
(super::SELF_KEYWORD_NAME, self_, "self");
|
||||
(super::STATIC_KEYWORD_NAME, statik, "static");
|
||||
|
||||
// for matcher NTs
|
||||
(4, tt, "tt");
|
||||
(5, matchers, "matchers");
|
||||
(3, tt, "tt");
|
||||
(4, matchers, "matchers");
|
||||
|
||||
// outside of libsyntax
|
||||
(6, arg, "arg");
|
||||
(7, clownshoe_abi, "__rust_abi");
|
||||
(8, main, "main");
|
||||
(9, opaque, "<opaque>");
|
||||
(super::STATIC_KEYWORD_NAME, statik, "static");
|
||||
(11, clownshoes_foreign_mod, "__foreign_mod__");
|
||||
(12, unnamed_field, "<unnamed_field>");
|
||||
(13, type_self, "Self"); // `Self`
|
||||
(5, clownshoe_abi, "__rust_abi");
|
||||
(6, opaque, "<opaque>");
|
||||
(7, unnamed_field, "<unnamed_field>");
|
||||
(8, type_self, "Self");
|
||||
}
|
||||
|
||||
pub mod keywords {
|
||||
// These ones are variants of the Keyword enum
|
||||
|
||||
'strict:
|
||||
(14, As, "as");
|
||||
(15, Break, "break");
|
||||
(16, Const, "const");
|
||||
(17, Else, "else");
|
||||
(18, Enum, "enum");
|
||||
(19, Extern, "extern");
|
||||
(20, False, "false");
|
||||
(21, Fn, "fn");
|
||||
(22, For, "for");
|
||||
(23, If, "if");
|
||||
(24, Impl, "impl");
|
||||
(25, In, "in");
|
||||
(26, Let, "let");
|
||||
(27, __LogLevel, "__log_level");
|
||||
(28, Loop, "loop");
|
||||
(29, Match, "match");
|
||||
(30, Mod, "mod");
|
||||
(31, Crate, "crate");
|
||||
(32, Mut, "mut");
|
||||
(33, Once, "once");
|
||||
(34, Priv, "priv");
|
||||
(35, Pub, "pub");
|
||||
(36, Ref, "ref");
|
||||
(37, Return, "return");
|
||||
(9, As, "as");
|
||||
(10, Break, "break");
|
||||
(11, Const, "const");
|
||||
(12, Crate, "crate");
|
||||
(13, Else, "else");
|
||||
(14, Enum, "enum");
|
||||
(15, Extern, "extern");
|
||||
(16, False, "false");
|
||||
(17, Fn, "fn");
|
||||
(18, For, "for");
|
||||
(19, If, "if");
|
||||
(20, Impl, "impl");
|
||||
(21, In, "in");
|
||||
(22, Let, "let");
|
||||
(23, __LogLevel, "__log_level");
|
||||
(24, Loop, "loop");
|
||||
(25, Match, "match");
|
||||
(26, Mod, "mod");
|
||||
(27, Mut, "mut");
|
||||
(28, Once, "once");
|
||||
(29, Priv, "priv");
|
||||
(30, Pub, "pub");
|
||||
(31, Ref, "ref");
|
||||
(32, Return, "return");
|
||||
// Static and Self are also special idents (prefill de-dupes)
|
||||
(super::STATIC_KEYWORD_NAME, Static, "static");
|
||||
(super::SELF_KEYWORD_NAME, Self, "self");
|
||||
(38, Struct, "struct");
|
||||
(39, Super, "super");
|
||||
(40, True, "true");
|
||||
(41, Trait, "trait");
|
||||
(42, Type, "type");
|
||||
(43, Unsafe, "unsafe");
|
||||
(44, Use, "use");
|
||||
(45, While, "while");
|
||||
(46, Continue, "continue");
|
||||
(47, Proc, "proc");
|
||||
(48, Box, "box");
|
||||
(33, Struct, "struct");
|
||||
(34, Super, "super");
|
||||
(35, True, "true");
|
||||
(36, Trait, "trait");
|
||||
(37, Type, "type");
|
||||
(38, Unsafe, "unsafe");
|
||||
(39, Use, "use");
|
||||
(40, While, "while");
|
||||
(41, Continue, "continue");
|
||||
(42, Proc, "proc");
|
||||
(43, Box, "box");
|
||||
|
||||
'reserved:
|
||||
(49, Alignof, "alignof");
|
||||
(50, Be, "be");
|
||||
(51, Offsetof, "offsetof");
|
||||
(52, Pure, "pure");
|
||||
(53, Sizeof, "sizeof");
|
||||
(54, Typeof, "typeof");
|
||||
(55, Unsized, "unsized");
|
||||
(56, Yield, "yield");
|
||||
(57, Do, "do");
|
||||
(44, Alignof, "alignof");
|
||||
(45, Be, "be");
|
||||
(46, Offsetof, "offsetof");
|
||||
(47, Pure, "pure");
|
||||
(48, Sizeof, "sizeof");
|
||||
(49, Typeof, "typeof");
|
||||
(50, Unsized, "unsized");
|
||||
(51, Yield, "yield");
|
||||
(52, Do, "do");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -531,12 +516,12 @@ pub type IdentInterner = StrInterner;
|
|||
// if an interner exists in TLS, return it. Otherwise, prepare a
|
||||
// fresh one.
|
||||
pub fn get_ident_interner() -> @IdentInterner {
|
||||
local_data_key!(key: @@::parse::token::IdentInterner)
|
||||
local_data_key!(key: @::parse::token::IdentInterner)
|
||||
match local_data::get(key, |k| k.map(|k| *k)) {
|
||||
Some(interner) => *interner,
|
||||
Some(interner) => interner,
|
||||
None => {
|
||||
let interner = mk_fresh_ident_interner();
|
||||
local_data::set(key, @interner);
|
||||
let interner = @mk_fresh_ident_interner();
|
||||
local_data::set(key, interner);
|
||||
interner
|
||||
}
|
||||
}
|
||||
|
@ -603,8 +588,7 @@ impl<'a> Equiv<&'a str> for InternedString {
|
|||
|
||||
impl<D:Decoder> Decodable<D> for InternedString {
|
||||
fn decode(d: &mut D) -> InternedString {
|
||||
let interner = get_ident_interner();
|
||||
get_ident(interner.intern(d.read_str()))
|
||||
get_name(get_ident_interner().intern(d.read_str()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -614,54 +598,55 @@ impl<E:Encoder> Encodable<E> for InternedString {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the string contents of a name, using the task-local interner.
|
||||
#[inline]
|
||||
pub fn get_name(name: Name) -> InternedString {
|
||||
let interner = get_ident_interner();
|
||||
InternedString::new_from_rc_str(interner.get(name))
|
||||
}
|
||||
|
||||
/// Returns the string contents of an identifier, using the task-local
|
||||
/// interner.
|
||||
#[inline]
|
||||
pub fn get_ident(idx: Name) -> InternedString {
|
||||
let interner = get_ident_interner();
|
||||
InternedString::new_from_rc_str(interner.get(idx))
|
||||
pub fn get_ident(ident: Ident) -> InternedString {
|
||||
get_name(ident.name)
|
||||
}
|
||||
|
||||
/// Interns and returns the string contents of an identifier, using the
|
||||
/// task-local interner.
|
||||
#[inline]
|
||||
pub fn intern_and_get_ident(s: &str) -> InternedString {
|
||||
get_ident(intern(s))
|
||||
get_name(intern(s))
|
||||
}
|
||||
|
||||
/* for when we don't care about the contents; doesn't interact with TLD or
|
||||
serialization */
|
||||
pub fn mk_fake_ident_interner() -> @IdentInterner {
|
||||
@interner::StrInterner::new()
|
||||
}
|
||||
|
||||
// maps a string to its interned representation
|
||||
/// Maps a string to its interned representation.
|
||||
#[inline]
|
||||
pub fn intern(str : &str) -> Name {
|
||||
let interner = get_ident_interner();
|
||||
interner.intern(str)
|
||||
pub fn intern(s: &str) -> Name {
|
||||
get_ident_interner().intern(s)
|
||||
}
|
||||
|
||||
// gensyms a new uint, using the current interner
|
||||
pub fn gensym(str : &str) -> Name {
|
||||
let interner = get_ident_interner();
|
||||
interner.gensym(str)
|
||||
/// gensym's a new uint, using the current interner.
|
||||
#[inline]
|
||||
pub fn gensym(s: &str) -> Name {
|
||||
get_ident_interner().gensym(s)
|
||||
}
|
||||
|
||||
// maps a string to an identifier with an empty syntax context
|
||||
pub fn str_to_ident(str : &str) -> ast::Ident {
|
||||
ast::Ident::new(intern(str))
|
||||
/// Maps a string to an identifier with an empty syntax context.
|
||||
#[inline]
|
||||
pub fn str_to_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::new(intern(s))
|
||||
}
|
||||
|
||||
// maps a string to a gensym'ed identifier
|
||||
pub fn gensym_ident(str : &str) -> ast::Ident {
|
||||
ast::Ident::new(gensym(str))
|
||||
/// Maps a string to a gensym'ed identifier.
|
||||
#[inline]
|
||||
pub fn gensym_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::new(gensym(s))
|
||||
}
|
||||
|
||||
// create a fresh name that maps to the same string as the old one.
|
||||
// note that this guarantees that str_ptr_eq(ident_to_str(src),interner_get(fresh_name(src)));
|
||||
// that is, that the new name and the old one are connected to ptr_eq strings.
|
||||
pub fn fresh_name(src : &ast::Ident) -> Name {
|
||||
pub fn fresh_name(src: &ast::Ident) -> Name {
|
||||
let interner = get_ident_interner();
|
||||
interner.gensym_copy(src.name)
|
||||
// following: debug version. Could work in final except that it's incompatible with
|
||||
|
|
|
@ -83,18 +83,15 @@ pub fn end(s: &mut State) -> io::IoResult<()> {
|
|||
pp::end(&mut s.s)
|
||||
}
|
||||
|
||||
pub fn rust_printer(writer: ~io::Writer, intr: @IdentInterner) -> State<'static> {
|
||||
rust_printer_annotated(writer, intr, &NoAnn)
|
||||
pub fn rust_printer(writer: ~io::Writer) -> State<'static> {
|
||||
rust_printer_annotated(writer, &NoAnn)
|
||||
}
|
||||
|
||||
pub fn rust_printer_annotated<'a>(writer: ~io::Writer,
|
||||
intr: @IdentInterner,
|
||||
ann: &'a PpAnn)
|
||||
-> State<'a> {
|
||||
pub fn rust_printer_annotated<'a>(writer: ~io::Writer, ann: &'a PpAnn) -> State<'a> {
|
||||
State {
|
||||
s: pp::mk_printer(writer, default_columns),
|
||||
cm: None,
|
||||
intr: intr,
|
||||
intr: token::get_ident_interner(),
|
||||
comments: None,
|
||||
literals: None,
|
||||
cur_cmnt_and_lit: CurrentCommentAndLiteral {
|
||||
|
@ -114,7 +111,6 @@ pub static default_columns: uint = 78u;
|
|||
// it can scan the input text for comments and literals to
|
||||
// copy forward.
|
||||
pub fn print_crate(cm: @CodeMap,
|
||||
intr: @IdentInterner,
|
||||
span_diagnostic: @diagnostic::SpanHandler,
|
||||
krate: &ast::Crate,
|
||||
filename: ~str,
|
||||
|
@ -130,7 +126,7 @@ pub fn print_crate(cm: @CodeMap,
|
|||
let mut s = State {
|
||||
s: pp::mk_printer(out, default_columns),
|
||||
cm: Some(cm),
|
||||
intr: intr,
|
||||
intr: token::get_ident_interner(),
|
||||
comments: Some(cmnts),
|
||||
// If the code is post expansion, don't use the table of
|
||||
// literals, since it doesn't correspond with the literals
|
||||
|
@ -157,52 +153,51 @@ pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn ty_to_str(ty: &ast::Ty, intr: @IdentInterner) -> ~str {
|
||||
to_str(ty, print_type, intr)
|
||||
pub fn ty_to_str(ty: &ast::Ty) -> ~str {
|
||||
to_str(ty, print_type)
|
||||
}
|
||||
|
||||
pub fn pat_to_str(pat: &ast::Pat, intr: @IdentInterner) -> ~str {
|
||||
to_str(pat, print_pat, intr)
|
||||
pub fn pat_to_str(pat: &ast::Pat) -> ~str {
|
||||
to_str(pat, print_pat)
|
||||
}
|
||||
|
||||
pub fn expr_to_str(e: &ast::Expr, intr: @IdentInterner) -> ~str {
|
||||
to_str(e, print_expr, intr)
|
||||
pub fn expr_to_str(e: &ast::Expr) -> ~str {
|
||||
to_str(e, print_expr)
|
||||
}
|
||||
|
||||
pub fn lifetime_to_str(e: &ast::Lifetime, intr: @IdentInterner) -> ~str {
|
||||
to_str(e, print_lifetime, intr)
|
||||
pub fn lifetime_to_str(e: &ast::Lifetime) -> ~str {
|
||||
to_str(e, print_lifetime)
|
||||
}
|
||||
|
||||
pub fn tt_to_str(tt: &ast::TokenTree, intr: @IdentInterner) -> ~str {
|
||||
to_str(tt, print_tt, intr)
|
||||
pub fn tt_to_str(tt: &ast::TokenTree) -> ~str {
|
||||
to_str(tt, print_tt)
|
||||
}
|
||||
|
||||
pub fn tts_to_str(tts: &[ast::TokenTree], intr: @IdentInterner) -> ~str {
|
||||
to_str(&tts, print_tts, intr)
|
||||
pub fn tts_to_str(tts: &[ast::TokenTree]) -> ~str {
|
||||
to_str(&tts, print_tts)
|
||||
}
|
||||
|
||||
pub fn stmt_to_str(s: &ast::Stmt, intr: @IdentInterner) -> ~str {
|
||||
to_str(s, print_stmt, intr)
|
||||
pub fn stmt_to_str(s: &ast::Stmt) -> ~str {
|
||||
to_str(s, print_stmt)
|
||||
}
|
||||
|
||||
pub fn item_to_str(i: &ast::Item, intr: @IdentInterner) -> ~str {
|
||||
to_str(i, print_item, intr)
|
||||
pub fn item_to_str(i: &ast::Item) -> ~str {
|
||||
to_str(i, print_item)
|
||||
}
|
||||
|
||||
pub fn generics_to_str(generics: &ast::Generics,
|
||||
intr: @IdentInterner) -> ~str {
|
||||
to_str(generics, print_generics, intr)
|
||||
pub fn generics_to_str(generics: &ast::Generics) -> ~str {
|
||||
to_str(generics, print_generics)
|
||||
}
|
||||
|
||||
pub fn path_to_str(p: &ast::Path, intr: @IdentInterner) -> ~str {
|
||||
to_str(p, |a,b| print_path(a, b, false), intr)
|
||||
pub fn path_to_str(p: &ast::Path) -> ~str {
|
||||
to_str(p, |a,b| print_path(a, b, false))
|
||||
}
|
||||
|
||||
pub fn fun_to_str(decl: &ast::FnDecl, purity: ast::Purity, name: ast::Ident,
|
||||
opt_explicit_self: Option<ast::ExplicitSelf_>,
|
||||
generics: &ast::Generics, intr: @IdentInterner) -> ~str {
|
||||
generics: &ast::Generics) -> ~str {
|
||||
let wr = ~MemWriter::new();
|
||||
let mut s = rust_printer(wr as ~io::Writer, intr);
|
||||
let mut s = rust_printer(wr as ~io::Writer);
|
||||
print_fn(&mut s, decl, Some(purity), AbiSet::Rust(),
|
||||
name, generics, opt_explicit_self, ast::Inherited).unwrap();
|
||||
end(&mut s).unwrap(); // Close the head box
|
||||
|
@ -213,9 +208,9 @@ pub fn fun_to_str(decl: &ast::FnDecl, purity: ast::Purity, name: ast::Ident,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn block_to_str(blk: &ast::Block, intr: @IdentInterner) -> ~str {
|
||||
pub fn block_to_str(blk: &ast::Block) -> ~str {
|
||||
let wr = ~MemWriter::new();
|
||||
let mut s = rust_printer(wr as ~io::Writer, intr);
|
||||
let mut s = rust_printer(wr as ~io::Writer);
|
||||
// containing cbox, will be closed by print-block at }
|
||||
cbox(&mut s, indent_unit).unwrap();
|
||||
// head-ibox, will be closed by print-block after {
|
||||
|
@ -227,16 +222,16 @@ pub fn block_to_str(blk: &ast::Block, intr: @IdentInterner) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn meta_item_to_str(mi: &ast::MetaItem, intr: @IdentInterner) -> ~str {
|
||||
to_str(mi, print_meta_item, intr)
|
||||
pub fn meta_item_to_str(mi: &ast::MetaItem) -> ~str {
|
||||
to_str(mi, print_meta_item)
|
||||
}
|
||||
|
||||
pub fn attribute_to_str(attr: &ast::Attribute, intr: @IdentInterner) -> ~str {
|
||||
to_str(attr, print_attribute, intr)
|
||||
pub fn attribute_to_str(attr: &ast::Attribute) -> ~str {
|
||||
to_str(attr, print_attribute)
|
||||
}
|
||||
|
||||
pub fn variant_to_str(var: &ast::Variant, intr: @IdentInterner) -> ~str {
|
||||
to_str(var, print_variant, intr)
|
||||
pub fn variant_to_str(var: &ast::Variant) -> ~str {
|
||||
to_str(var, print_variant)
|
||||
}
|
||||
|
||||
pub fn cbox(s: &mut State, u: uint) -> io::IoResult<()> {
|
||||
|
@ -817,7 +812,7 @@ pub fn print_tt(s: &mut State, tt: &ast::TokenTree) -> io::IoResult<()> {
|
|||
match *tt {
|
||||
ast::TTDelim(ref tts) => print_tts(s, &(tts.as_slice())),
|
||||
ast::TTTok(_, ref tk) => {
|
||||
word(&mut s.s, parse::token::to_str(s.intr, tk))
|
||||
word(&mut s.s, parse::token::to_str(tk))
|
||||
}
|
||||
ast::TTSeq(_, ref tts, ref sep, zerok) => {
|
||||
if_ok!(word(&mut s.s, "$("));
|
||||
|
@ -827,7 +822,7 @@ pub fn print_tt(s: &mut State, tt: &ast::TokenTree) -> io::IoResult<()> {
|
|||
if_ok!(word(&mut s.s, ")"));
|
||||
match *sep {
|
||||
Some(ref tk) => {
|
||||
if_ok!(word(&mut s.s, parse::token::to_str(s.intr, tk)));
|
||||
if_ok!(word(&mut s.s, parse::token::to_str(tk)));
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
|
@ -1615,13 +1610,11 @@ pub fn print_decl(s: &mut State, decl: &ast::Decl) -> io::IoResult<()> {
|
|||
}
|
||||
|
||||
pub fn print_ident(s: &mut State, ident: ast::Ident) -> io::IoResult<()> {
|
||||
let string = token::get_ident(ident.name);
|
||||
word(&mut s.s, string.get())
|
||||
word(&mut s.s, token::get_ident(ident).get())
|
||||
}
|
||||
|
||||
pub fn print_name(s: &mut State, name: ast::Name) -> io::IoResult<()> {
|
||||
let string = token::get_ident(name);
|
||||
word(&mut s.s, string.get())
|
||||
word(&mut s.s, token::get_name(name).get())
|
||||
}
|
||||
|
||||
pub fn print_for_decl(s: &mut State, loc: &ast::Local,
|
||||
|
@ -1692,15 +1685,14 @@ fn print_path_(s: &mut State,
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn print_path(s: &mut State, path: &ast::Path,
|
||||
colons_before_params: bool) -> io::IoResult<()> {
|
||||
fn print_path(s: &mut State, path: &ast::Path,
|
||||
colons_before_params: bool) -> io::IoResult<()> {
|
||||
print_path_(s, path, colons_before_params, &None)
|
||||
}
|
||||
|
||||
pub fn print_bounded_path(s: &mut State, path: &ast::Path,
|
||||
bounds: &Option<OptVec<ast::TyParamBound>>)
|
||||
-> io::IoResult<()>
|
||||
{
|
||||
fn print_bounded_path(s: &mut State, path: &ast::Path,
|
||||
bounds: &Option<OptVec<ast::TyParamBound>>)
|
||||
-> io::IoResult<()> {
|
||||
print_path_(s, path, false, bounds)
|
||||
}
|
||||
|
||||
|
@ -1818,11 +1810,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn explicit_self_to_str(explicit_self: &ast::ExplicitSelf_,
|
||||
intr: @IdentInterner) -> ~str {
|
||||
pub fn explicit_self_to_str(explicit_self: &ast::ExplicitSelf_) -> ~str {
|
||||
to_str(explicit_self, |a, &b| {
|
||||
print_explicit_self(a, b, ast::MutImmutable).map(|_| ())
|
||||
}, intr)
|
||||
})
|
||||
}
|
||||
|
||||
// Returns whether it printed anything
|
||||
|
@ -2346,7 +2337,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) -> io::IoResult<()> {
|
|||
}
|
||||
|
||||
pub fn lit_to_str(l: &ast::Lit) -> ~str {
|
||||
return to_str(l, print_literal, parse::token::mk_fake_ident_interner());
|
||||
to_str(l, print_literal)
|
||||
}
|
||||
|
||||
pub fn next_lit(s: &mut State, pos: BytePos) -> Option<comments::Literal> {
|
||||
|
@ -2450,10 +2441,9 @@ unsafe fn get_mem_writer(writer: &mut ~io::Writer) -> ~str {
|
|||
result
|
||||
}
|
||||
|
||||
pub fn to_str<T>(t: &T, f: |&mut State, &T| -> io::IoResult<()>,
|
||||
intr: @IdentInterner) -> ~str {
|
||||
pub fn to_str<T>(t: &T, f: |&mut State, &T| -> io::IoResult<()>) -> ~str {
|
||||
let wr = ~MemWriter::new();
|
||||
let mut s = rust_printer(wr as ~io::Writer, intr);
|
||||
let mut s = rust_printer(wr as ~io::Writer);
|
||||
f(&mut s, t).unwrap();
|
||||
eof(&mut s.s).unwrap();
|
||||
unsafe {
|
||||
|
@ -2600,7 +2590,7 @@ mod test {
|
|||
};
|
||||
let generics = ast_util::empty_generics();
|
||||
assert_eq!(&fun_to_str(&decl, ast::ImpureFn, abba_ident,
|
||||
None, &generics, token::get_ident_interner()),
|
||||
None, &generics),
|
||||
&~"fn abba()");
|
||||
}
|
||||
|
||||
|
@ -2618,7 +2608,7 @@ mod test {
|
|||
vis: ast::Public,
|
||||
});
|
||||
|
||||
let varstr = variant_to_str(&var,token::get_ident_interner());
|
||||
let varstr = variant_to_str(&var);
|
||||
assert_eq!(&varstr,&~"pub principal_skinner");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,16 +21,16 @@ use std::hashmap::HashMap;
|
|||
use std::rc::Rc;
|
||||
|
||||
pub struct Interner<T> {
|
||||
priv map: @RefCell<HashMap<T, Name>>,
|
||||
priv vect: @RefCell<~[T]>,
|
||||
priv map: RefCell<HashMap<T, Name>>,
|
||||
priv vect: RefCell<~[T]>,
|
||||
}
|
||||
|
||||
// when traits can extend traits, we should extend index<Name,T> to get []
|
||||
impl<T:Eq + IterBytes + Hash + Freeze + Clone + 'static> Interner<T> {
|
||||
pub fn new() -> Interner<T> {
|
||||
Interner {
|
||||
map: @RefCell::new(HashMap::new()),
|
||||
vect: @RefCell::new(~[]),
|
||||
map: RefCell::new(HashMap::new()),
|
||||
vect: RefCell::new(~[]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -123,18 +123,18 @@ impl RcStr {
|
|||
}
|
||||
|
||||
// A StrInterner differs from Interner<String> in that it accepts
|
||||
// references rather than @ ones, resulting in less allocation.
|
||||
// &str rather than RcStr, resulting in less allocation.
|
||||
pub struct StrInterner {
|
||||
priv map: @RefCell<HashMap<RcStr, Name>>,
|
||||
priv vect: @RefCell<~[RcStr]>,
|
||||
priv map: RefCell<HashMap<RcStr, Name>>,
|
||||
priv vect: RefCell<~[RcStr]>,
|
||||
}
|
||||
|
||||
// when traits can extend traits, we should extend index<Name,T> to get []
|
||||
impl StrInterner {
|
||||
pub fn new() -> StrInterner {
|
||||
StrInterner {
|
||||
map: @RefCell::new(HashMap::new()),
|
||||
vect: @RefCell::new(~[]),
|
||||
map: RefCell::new(HashMap::new()),
|
||||
vect: RefCell::new(~[]),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ pub enum FnKind<'a> {
|
|||
pub fn name_of_fn(fk: &FnKind) -> Ident {
|
||||
match *fk {
|
||||
FkItemFn(name, _, _, _) | FkMethod(name, _, _) => name,
|
||||
FkFnBlock(..) => parse::token::special_idents::anon
|
||||
FkFnBlock(..) => parse::token::special_idents::invalid
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,6 @@ use ambig_impl_2_lib::me;
|
|||
trait me {
|
||||
fn me(&self) -> uint;
|
||||
}
|
||||
impl me for uint { fn me(&self) -> uint { *self } } //~ NOTE is `me$uint::me`
|
||||
impl me for uint { fn me(&self) -> uint { *self } } //~ NOTE is `uint.me::me`
|
||||
fn main() { 1u.me(); } //~ ERROR multiple applicable methods in scope
|
||||
//~^ NOTE is `ambig_impl_2_lib::me$uint::me`
|
||||
//~^ NOTE is `ambig_impl_2_lib::uint.me::me`
|
||||
|
|
|
@ -13,11 +13,11 @@ trait foo {
|
|||
}
|
||||
|
||||
impl foo for ~[uint] {
|
||||
fn foo(&self) -> int {1} //~ NOTE candidate #1 is `foo$$UP$$VEC$uint::foo`
|
||||
fn foo(&self) -> int {1} //~ NOTE candidate #1 is `~[uint].foo::foo`
|
||||
}
|
||||
|
||||
impl foo for ~[int] {
|
||||
fn foo(&self) -> int {2} //~ NOTE candidate #2 is `foo$$UP$$VEC$int::foo`
|
||||
fn foo(&self) -> int {2} //~ NOTE candidate #2 is `~[int].foo::foo`
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue