librustc: Fix up fallout from the automatic conversion.
This commit is contained in:
parent
3b6e9d4a7a
commit
43c07244b3
98 changed files with 1172 additions and 758 deletions
|
@ -98,9 +98,17 @@ impl Archive {
|
||||||
let archive = os::make_absolute(&self.dst);
|
let archive = os::make_absolute(&self.dst);
|
||||||
run_ar(self.sess, "x", Some(loc.path()), [&archive,
|
run_ar(self.sess, "x", Some(loc.path()), [&archive,
|
||||||
&Path::new(file)]);
|
&Path::new(file)]);
|
||||||
fs::File::open(&loc.path().join(file)).read_to_end().unwrap()
|
let result: Vec<u8> =
|
||||||
|
fs::File::open(&loc.path().join(file)).read_to_end()
|
||||||
|
.unwrap()
|
||||||
|
.move_iter()
|
||||||
|
.collect();
|
||||||
|
result
|
||||||
} else {
|
} else {
|
||||||
run_ar(self.sess, "p", None, [&self.dst, &Path::new(file)]).output
|
run_ar(self.sess,
|
||||||
|
"p",
|
||||||
|
None,
|
||||||
|
[&self.dst, &Path::new(file)]).output.move_iter().collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,7 +132,7 @@ impl Archive {
|
||||||
if lto {
|
if lto {
|
||||||
ignore.push(object.as_slice());
|
ignore.push(object.as_slice());
|
||||||
}
|
}
|
||||||
self.add_archive(rlib, name, ignore)
|
self.add_archive(rlib, name, ignore.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds an arbitrary file to this archive
|
/// Adds an arbitrary file to this archive
|
||||||
|
|
|
@ -34,6 +34,7 @@ use std::str;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::Process;
|
use std::io::Process;
|
||||||
use std::io::fs;
|
use std::io::fs;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use flate;
|
use flate;
|
||||||
use serialize::hex::ToHex;
|
use serialize::hex::ToHex;
|
||||||
use extra::tempfile::TempDir;
|
use extra::tempfile::TempDir;
|
||||||
|
@ -106,6 +107,7 @@ pub mod write {
|
||||||
use std::io::Process;
|
use std::io::Process;
|
||||||
use std::libc::{c_uint, c_int};
|
use std::libc::{c_uint, c_int};
|
||||||
use std::str;
|
use std::str;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
// On android, we by default compile for armv7 processors. This enables
|
// On android, we by default compile for armv7 processors. This enables
|
||||||
// things like double word CAS instructions (rather than emulating them)
|
// things like double word CAS instructions (rather than emulating them)
|
||||||
|
@ -222,7 +224,7 @@ pub mod write {
|
||||||
|
|
||||||
if sess.lto() {
|
if sess.lto() {
|
||||||
time(sess.time_passes(), "all lto passes", (), |()|
|
time(sess.time_passes(), "all lto passes", (), |()|
|
||||||
lto::run(sess, llmod, tm, trans.reachable));
|
lto::run(sess, llmod, tm, trans.reachable.as_slice()));
|
||||||
|
|
||||||
if sess.opts.cg.save_temps {
|
if sess.opts.cg.save_temps {
|
||||||
output.with_extension("lto.bc").with_c_str(|buf| {
|
output.with_extension("lto.bc").with_c_str(|buf| {
|
||||||
|
@ -931,7 +933,8 @@ fn link_rlib(sess: Session,
|
||||||
// the same filename for metadata (stomping over one another)
|
// the same filename for metadata (stomping over one another)
|
||||||
let tmpdir = TempDir::new("rustc").expect("needs a temp dir");
|
let tmpdir = TempDir::new("rustc").expect("needs a temp dir");
|
||||||
let metadata = tmpdir.path().join(METADATA_FILENAME);
|
let metadata = tmpdir.path().join(METADATA_FILENAME);
|
||||||
match fs::File::create(&metadata).write(trans.metadata) {
|
match fs::File::create(&metadata).write(trans.metadata
|
||||||
|
.as_slice()) {
|
||||||
Ok(..) => {}
|
Ok(..) => {}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
sess.err(format!("failed to write {}: {}",
|
sess.err(format!("failed to write {}: {}",
|
||||||
|
@ -1035,7 +1038,7 @@ fn link_natively(sess: Session, dylib: bool, obj_filename: &Path,
|
||||||
// Invoke the system linker
|
// Invoke the system linker
|
||||||
debug!("{} {}", cc_prog, cc_args.connect(" "));
|
debug!("{} {}", cc_prog, cc_args.connect(" "));
|
||||||
let prog = time(sess.time_passes(), "running linker", (), |()|
|
let prog = time(sess.time_passes(), "running linker", (), |()|
|
||||||
Process::output(cc_prog, cc_args));
|
Process::output(cc_prog, cc_args.as_slice()));
|
||||||
match prog {
|
match prog {
|
||||||
Ok(prog) => {
|
Ok(prog) => {
|
||||||
if !prog.status.success() {
|
if !prog.status.success() {
|
||||||
|
@ -1198,7 +1201,7 @@ fn link_args(sess: Session,
|
||||||
// where extern libraries might live, based on the
|
// where extern libraries might live, based on the
|
||||||
// addl_lib_search_paths
|
// addl_lib_search_paths
|
||||||
if !sess.opts.cg.no_rpath {
|
if !sess.opts.cg.no_rpath {
|
||||||
args.push_all(rpath::get_rpath_flags(sess, out_filename));
|
args.push_all(rpath::get_rpath_flags(sess, out_filename).as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stack growth requires statically linking a __morestack function
|
// Stack growth requires statically linking a __morestack function
|
||||||
|
@ -1210,7 +1213,7 @@ fn link_args(sess: Session,
|
||||||
|
|
||||||
// Finally add all the linker arguments provided on the command line along
|
// Finally add all the linker arguments provided on the command line along
|
||||||
// with any #[link_args] attributes found inside the crate
|
// with any #[link_args] attributes found inside the crate
|
||||||
args.push_all(sess.opts.cg.link_args);
|
args.push_all(sess.opts.cg.link_args.as_slice());
|
||||||
let used_link_args = sess.cstore.get_used_link_args();
|
let used_link_args = sess.cstore.get_used_link_args();
|
||||||
let used_link_args = used_link_args.borrow();
|
let used_link_args = used_link_args.borrow();
|
||||||
for arg in used_link_args.get().iter() {
|
for arg in used_link_args.get().iter() {
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
use back::target_strs;
|
use back::target_strs;
|
||||||
use driver::session::sess_os_to_meta_os;
|
use driver::session::sess_os_to_meta_os;
|
||||||
use metadata::loader::meta_section_name;
|
use metadata::loader::meta_section_name;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi;
|
use syntax::abi;
|
||||||
|
|
||||||
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
|
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
|
||||||
|
|
|
@ -15,6 +15,7 @@ use metadata::filesearch;
|
||||||
|
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
use std::{os, vec};
|
use std::{os, vec};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi;
|
use syntax::abi;
|
||||||
|
|
||||||
fn not_win32(os: abi::Os) -> bool {
|
fn not_win32(os: abi::Os) -> bool {
|
||||||
|
@ -49,7 +50,7 @@ pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> Vec<~str>
|
||||||
|
|
||||||
let rpaths = get_rpaths(os, sysroot, output, libs,
|
let rpaths = get_rpaths(os, sysroot, output, libs,
|
||||||
sess.opts.target_triple);
|
sess.opts.target_triple);
|
||||||
flags.push_all(rpaths_to_flags(rpaths));
|
flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
|
||||||
flags
|
flags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,16 +101,16 @@ fn get_rpaths(os: abi::Os,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log_rpaths("relative", rel_rpaths);
|
log_rpaths("relative", rel_rpaths.as_slice());
|
||||||
log_rpaths("absolute", abs_rpaths);
|
log_rpaths("absolute", abs_rpaths.as_slice());
|
||||||
log_rpaths("fallback", fallback_rpaths);
|
log_rpaths("fallback", fallback_rpaths.as_slice());
|
||||||
|
|
||||||
let mut rpaths = rel_rpaths;
|
let mut rpaths = rel_rpaths;
|
||||||
rpaths.push_all(abs_rpaths);
|
rpaths.push_all(abs_rpaths.as_slice());
|
||||||
rpaths.push_all(fallback_rpaths);
|
rpaths.push_all(fallback_rpaths.as_slice());
|
||||||
|
|
||||||
// Remove duplicates
|
// Remove duplicates
|
||||||
let rpaths = minimize_rpaths(rpaths);
|
let rpaths = minimize_rpaths(rpaths.as_slice());
|
||||||
return rpaths;
|
return rpaths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,8 @@
|
||||||
|
|
||||||
#[allow(non_camel_case_types)];
|
#[allow(non_camel_case_types)];
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
pub struct t {
|
pub struct t {
|
||||||
module_asm: ~str,
|
module_asm: ~str,
|
||||||
meta_sect_name: ~str,
|
meta_sect_name: ~str,
|
||||||
|
|
|
@ -11,8 +11,8 @@
|
||||||
|
|
||||||
use back::link;
|
use back::link;
|
||||||
use back::{arm, x86, x86_64, mips};
|
use back::{arm, x86, x86_64, mips};
|
||||||
use driver::session::{Aggressive, CrateTypeExecutable, FullDebugInfo, LimitedDebugInfo,
|
use driver::session::{Aggressive, CrateTypeExecutable, CrateType,
|
||||||
NoDebugInfo};
|
FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
|
||||||
use driver::session::{Session, Session_, No, Less, Default};
|
use driver::session::{Session, Session_, No, Less, Default};
|
||||||
use driver::session;
|
use driver::session;
|
||||||
use front;
|
use front;
|
||||||
|
@ -36,7 +36,6 @@ use std::io;
|
||||||
use std::io::fs;
|
use std::io::fs;
|
||||||
use std::io::MemReader;
|
use std::io::MemReader;
|
||||||
use std::os;
|
use std::os;
|
||||||
use std::vec;
|
|
||||||
use std::vec_ng::Vec;
|
use std::vec_ng::Vec;
|
||||||
use std::vec_ng;
|
use std::vec_ng;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
@ -434,7 +433,7 @@ pub fn phase_5_run_llvm_passes(sess: Session,
|
||||||
time(sess.time_passes(), "LLVM passes", (), |_|
|
time(sess.time_passes(), "LLVM passes", (), |_|
|
||||||
link::write::run_passes(sess,
|
link::write::run_passes(sess,
|
||||||
trans,
|
trans,
|
||||||
sess.opts.output_types,
|
sess.opts.output_types.as_slice(),
|
||||||
outputs));
|
outputs));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -767,18 +766,21 @@ pub fn host_triple() -> ~str {
|
||||||
|
|
||||||
pub fn build_session_options(matches: &getopts::Matches)
|
pub fn build_session_options(matches: &getopts::Matches)
|
||||||
-> @session::Options {
|
-> @session::Options {
|
||||||
let crate_types = matches.opt_strs("crate-type").flat_map(|s| {
|
let mut crate_types: Vec<CrateType> = Vec::new();
|
||||||
s.split(',').map(|part| {
|
let unparsed_crate_types = matches.opt_strs("crate-type");
|
||||||
match part {
|
for unparsed_crate_type in unparsed_crate_types.iter() {
|
||||||
|
for part in unparsed_crate_type.split(',') {
|
||||||
|
let new_part = match part {
|
||||||
"lib" => session::default_lib_output(),
|
"lib" => session::default_lib_output(),
|
||||||
"rlib" => session::CrateTypeRlib,
|
"rlib" => session::CrateTypeRlib,
|
||||||
"staticlib" => session::CrateTypeStaticlib,
|
"staticlib" => session::CrateTypeStaticlib,
|
||||||
"dylib" => session::CrateTypeDylib,
|
"dylib" => session::CrateTypeDylib,
|
||||||
"bin" => session::CrateTypeExecutable,
|
"bin" => session::CrateTypeExecutable,
|
||||||
_ => early_error(format!("unknown crate type: `{}`", part))
|
_ => early_error(format!("unknown crate type: `{}`", part))
|
||||||
|
};
|
||||||
|
crate_types.push(new_part)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}).collect()
|
|
||||||
});
|
|
||||||
|
|
||||||
let parse_only = matches.opt_present("parse-only");
|
let parse_only = matches.opt_present("parse-only");
|
||||||
let no_trans = matches.opt_present("no-trans");
|
let no_trans = matches.opt_present("no-trans");
|
||||||
|
@ -793,7 +795,9 @@ pub fn build_session_options(matches: &getopts::Matches)
|
||||||
|
|
||||||
let level_short = level_name.slice_chars(0, 1);
|
let level_short = level_name.slice_chars(0, 1);
|
||||||
let level_short = level_short.to_ascii().to_upper().into_str();
|
let level_short = level_short.to_ascii().to_upper().into_str();
|
||||||
let flags = vec_ng::append(matches.opt_strs(level_short),
|
let flags = vec_ng::append(matches.opt_strs(level_short)
|
||||||
|
.move_iter()
|
||||||
|
.collect(),
|
||||||
matches.opt_strs(level_name));
|
matches.opt_strs(level_name));
|
||||||
for lint_name in flags.iter() {
|
for lint_name in flags.iter() {
|
||||||
let lint_name = lint_name.replace("-", "_");
|
let lint_name = lint_name.replace("-", "_");
|
||||||
|
@ -828,23 +832,24 @@ pub fn build_session_options(matches: &getopts::Matches)
|
||||||
unsafe { llvm::LLVMSetDebug(1); }
|
unsafe { llvm::LLVMSetDebug(1); }
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut output_types = if parse_only || no_trans {
|
let mut output_types = Vec::new();
|
||||||
Vec::new()
|
if !parse_only && !no_trans {
|
||||||
} else {
|
let unparsed_output_types = matches.opt_strs("emit");
|
||||||
matches.opt_strs("emit").flat_map(|s| {
|
for unparsed_output_type in unparsed_output_types.iter() {
|
||||||
s.split(',').map(|part| {
|
for part in unparsed_output_type.split(',') {
|
||||||
match part.as_slice() {
|
let output_type = match part.as_slice() {
|
||||||
"asm" => link::OutputTypeAssembly,
|
"asm" => link::OutputTypeAssembly,
|
||||||
"ir" => link::OutputTypeLlvmAssembly,
|
"ir" => link::OutputTypeLlvmAssembly,
|
||||||
"bc" => link::OutputTypeBitcode,
|
"bc" => link::OutputTypeBitcode,
|
||||||
"obj" => link::OutputTypeObject,
|
"obj" => link::OutputTypeObject,
|
||||||
"link" => link::OutputTypeExe,
|
"link" => link::OutputTypeExe,
|
||||||
_ => early_error(format!("unknown emission type: `{}`", part))
|
_ => early_error(format!("unknown emission type: `{}`", part))
|
||||||
}
|
|
||||||
}).collect()
|
|
||||||
})
|
|
||||||
};
|
};
|
||||||
output_types.sort();
|
output_types.push(output_type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
output_types.as_mut_slice().sort();
|
||||||
output_types.dedup();
|
output_types.dedup();
|
||||||
if output_types.len() == 0 {
|
if output_types.len() == 0 {
|
||||||
output_types.push(link::OutputTypeExe);
|
output_types.push(link::OutputTypeExe);
|
||||||
|
@ -890,7 +895,7 @@ pub fn build_session_options(matches: &getopts::Matches)
|
||||||
Path::new(s.as_slice())
|
Path::new(s.as_slice())
|
||||||
}).move_iter().collect();
|
}).move_iter().collect();
|
||||||
|
|
||||||
let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
|
let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
|
||||||
let test = matches.opt_present("test");
|
let test = matches.opt_present("test");
|
||||||
let write_dependency_info = (matches.opt_present("dep-info"),
|
let write_dependency_info = (matches.opt_present("dep-info"),
|
||||||
matches.opt_str("dep-info").map(|p| Path::new(p)));
|
matches.opt_str("dep-info").map(|p| Path::new(p)));
|
||||||
|
@ -1187,7 +1192,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_switch_implies_cfg_test() {
|
fn test_switch_implies_cfg_test() {
|
||||||
let matches =
|
let matches =
|
||||||
&match getopts([~"--test"], optgroups()) {
|
&match getopts([~"--test"], optgroups().as_slice()) {
|
||||||
Ok(m) => m,
|
Ok(m) => m,
|
||||||
Err(f) => fail!("test_switch_implies_cfg_test: {}", f.to_err_msg())
|
Err(f) => fail!("test_switch_implies_cfg_test: {}", f.to_err_msg())
|
||||||
};
|
};
|
||||||
|
@ -1202,7 +1207,8 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||||
let matches =
|
let matches =
|
||||||
&match getopts([~"--test", ~"--cfg=test"], optgroups()) {
|
&match getopts([~"--test", ~"--cfg=test"],
|
||||||
|
optgroups().as_slice()) {
|
||||||
Ok(m) => m,
|
Ok(m) => m,
|
||||||
Err(f) => {
|
Err(f) => {
|
||||||
fail!("test_switch_implies_cfg_test_unless_cfg_test: {}",
|
fail!("test_switch_implies_cfg_test_unless_cfg_test: {}",
|
||||||
|
|
|
@ -320,19 +320,9 @@ pub fn basic_options() -> @Options {
|
||||||
crate_types: Vec::new(),
|
crate_types: Vec::new(),
|
||||||
gc: false,
|
gc: false,
|
||||||
optimize: No,
|
optimize: No,
|
||||||
<<<<<<< HEAD
|
|
||||||
debuginfo: NoDebugInfo,
|
debuginfo: NoDebugInfo,
|
||||||
lint_opts: ~[],
|
|
||||||
output_types: ~[],
|
|
||||||
||||||| merged common ancestors
|
|
||||||
debuginfo: false,
|
|
||||||
lint_opts: ~[],
|
|
||||||
output_types: ~[],
|
|
||||||
=======
|
|
||||||
debuginfo: false,
|
|
||||||
lint_opts: Vec::new(),
|
lint_opts: Vec::new(),
|
||||||
output_types: Vec::new(),
|
output_types: Vec::new(),
|
||||||
>>>>>>> librustc: Automatically change uses of `~[T]` to `Vec<T>` in rustc.
|
|
||||||
addl_lib_search_paths: @RefCell::new(HashSet::new()),
|
addl_lib_search_paths: @RefCell::new(HashSet::new()),
|
||||||
maybe_sysroot: None,
|
maybe_sysroot: None,
|
||||||
target_triple: host_triple(),
|
target_triple: host_triple(),
|
||||||
|
@ -403,7 +393,8 @@ macro_rules! cgoptions(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_list(slot: &mut Vec<~str> , v: Option<&str>) -> bool {
|
fn parse_list(slot: &mut ::std::vec_ng::Vec<~str>, v: Option<&str>)
|
||||||
|
-> bool {
|
||||||
match v {
|
match v {
|
||||||
Some(s) => {
|
Some(s) => {
|
||||||
for s in s.words() {
|
for s in s.words() {
|
||||||
|
@ -489,7 +480,7 @@ pub fn collect_crate_types(session: &Session,
|
||||||
// If we're generating a test executable, then ignore all other output
|
// If we're generating a test executable, then ignore all other output
|
||||||
// styles at all other locations
|
// styles at all other locations
|
||||||
if session.opts.test {
|
if session.opts.test {
|
||||||
return Vec<CrateTypeExecutable> ;
|
return vec!(CrateTypeExecutable)
|
||||||
}
|
}
|
||||||
let mut base = session.opts.crate_types.clone();
|
let mut base = session.opts.crate_types.clone();
|
||||||
let mut iter = attrs.iter().filter_map(|a| {
|
let mut iter = attrs.iter().filter_map(|a| {
|
||||||
|
@ -525,7 +516,7 @@ pub fn collect_crate_types(session: &Session,
|
||||||
if base.len() == 0 {
|
if base.len() == 0 {
|
||||||
base.push(CrateTypeExecutable);
|
base.push(CrateTypeExecutable);
|
||||||
}
|
}
|
||||||
base.sort();
|
base.as_mut_slice().sort();
|
||||||
base.dedup();
|
base.dedup();
|
||||||
return base;
|
return base;
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::fold::Folder;
|
use syntax::fold::Folder;
|
||||||
use syntax::{ast, fold, attr};
|
use syntax::{ast, fold, attr};
|
||||||
use syntax::codemap;
|
use syntax::codemap;
|
||||||
|
|
|
@ -31,6 +31,7 @@ use syntax::parse::token;
|
||||||
use driver::session::Session;
|
use driver::session::Session;
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
/// This is a list of all known features since the beginning of time. This list
|
/// This is a list of all known features since the beginning of time. This list
|
||||||
/// can never shrink, it may only be expanded (in order to prevent old programs
|
/// can never shrink, it may only be expanded (in order to prevent old programs
|
||||||
|
|
|
@ -93,7 +93,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
|
||||||
path.get().push(i.ident);
|
path.get().push(i.ident);
|
||||||
}
|
}
|
||||||
debug!("current path: {}",
|
debug!("current path: {}",
|
||||||
ast_util::path_name_i(self.cx.path.get()));
|
ast_util::path_name_i(self.cx.path.get().as_slice()));
|
||||||
|
|
||||||
if is_test_fn(&self.cx, i) || is_bench_fn(i) {
|
if is_test_fn(&self.cx, i) || is_bench_fn(i) {
|
||||||
match i.node {
|
match i.node {
|
||||||
|
@ -432,11 +432,12 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
|
||||||
let span = test.span;
|
let span = test.span;
|
||||||
let path = test.path.clone();
|
let path = test.path.clone();
|
||||||
|
|
||||||
debug!("encoding {}", ast_util::path_name_i(path));
|
debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
|
||||||
|
|
||||||
let name_lit: ast::Lit =
|
let name_lit: ast::Lit =
|
||||||
nospan(ast::LitStr(token::intern_and_get_ident(
|
nospan(ast::LitStr(token::intern_and_get_ident(
|
||||||
ast_util::path_name_i(path)), ast::CookedStr));
|
ast_util::path_name_i(path.as_slice())),
|
||||||
|
ast::CookedStr));
|
||||||
|
|
||||||
let name_expr = @ast::Expr {
|
let name_expr = @ast::Expr {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
|
|
@ -53,8 +53,8 @@ use std::io;
|
||||||
use std::os;
|
use std::os;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::task;
|
use std::task;
|
||||||
use std::vec;
|
|
||||||
use std::vec_ng::Vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::diagnostic::Emitter;
|
use syntax::diagnostic::Emitter;
|
||||||
use syntax::diagnostic;
|
use syntax::diagnostic;
|
||||||
|
@ -149,7 +149,7 @@ Additional help:
|
||||||
-C help Print codegen options
|
-C help Print codegen options
|
||||||
-W help Print 'lint' options and default settings
|
-W help Print 'lint' options and default settings
|
||||||
-Z help Print internal options for debugging rustc\n",
|
-Z help Print internal options for debugging rustc\n",
|
||||||
getopts::usage(message, d::optgroups()));
|
getopts::usage(message, d::optgroups().as_slice()));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn describe_warnings() {
|
pub fn describe_warnings() {
|
||||||
|
@ -165,7 +165,7 @@ Available lint options:
|
||||||
let mut lint_dict = lint_dict.move_iter()
|
let mut lint_dict = lint_dict.move_iter()
|
||||||
.map(|(k, v)| (v, k))
|
.map(|(k, v)| (v, k))
|
||||||
.collect::<Vec<(lint::LintSpec, &'static str)> >();
|
.collect::<Vec<(lint::LintSpec, &'static str)> >();
|
||||||
lint_dict.sort();
|
lint_dict.as_mut_slice().sort();
|
||||||
|
|
||||||
let mut max_key = 0;
|
let mut max_key = 0;
|
||||||
for &(_, name) in lint_dict.iter() {
|
for &(_, name) in lint_dict.iter() {
|
||||||
|
@ -224,7 +224,7 @@ pub fn run_compiler(args: &[~str]) {
|
||||||
if args.is_empty() { usage(binary); return; }
|
if args.is_empty() { usage(binary); return; }
|
||||||
|
|
||||||
let matches =
|
let matches =
|
||||||
&match getopts::getopts(args, d::optgroups()) {
|
&match getopts::getopts(args, d::optgroups().as_slice()) {
|
||||||
Ok(m) => m,
|
Ok(m) => m,
|
||||||
Err(f) => {
|
Err(f) => {
|
||||||
d::early_error(f.to_err_msg());
|
d::early_error(f.to_err_msg());
|
||||||
|
@ -236,7 +236,9 @@ pub fn run_compiler(args: &[~str]) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lint_flags = vec_ng::append(matches.opt_strs("W"),
|
let lint_flags = vec_ng::append(matches.opt_strs("W")
|
||||||
|
.move_iter()
|
||||||
|
.collect(),
|
||||||
matches.opt_strs("warn"));
|
matches.opt_strs("warn"));
|
||||||
if lint_flags.iter().any(|x| x == &~"help") {
|
if lint_flags.iter().any(|x| x == &~"help") {
|
||||||
describe_warnings();
|
describe_warnings();
|
||||||
|
@ -312,8 +314,8 @@ pub fn run_compiler(args: &[~str]) {
|
||||||
if crate_id || crate_name || crate_file_name {
|
if crate_id || crate_name || crate_file_name {
|
||||||
let attrs = parse_crate_attrs(sess, &input);
|
let attrs = parse_crate_attrs(sess, &input);
|
||||||
let t_outputs = d::build_output_filenames(&input, &odir, &ofile,
|
let t_outputs = d::build_output_filenames(&input, &odir, &ofile,
|
||||||
attrs, sess);
|
attrs.as_slice(), sess);
|
||||||
let id = link::find_crate_id(attrs, &t_outputs);
|
let id = link::find_crate_id(attrs.as_slice(), &t_outputs);
|
||||||
|
|
||||||
if crate_id {
|
if crate_id {
|
||||||
println!("{}", id.to_str());
|
println!("{}", id.to_str());
|
||||||
|
@ -322,7 +324,8 @@ pub fn run_compiler(args: &[~str]) {
|
||||||
println!("{}", id.name);
|
println!("{}", id.name);
|
||||||
}
|
}
|
||||||
if crate_file_name {
|
if crate_file_name {
|
||||||
let crate_types = session::collect_crate_types(&sess, attrs);
|
let crate_types = session::collect_crate_types(&sess,
|
||||||
|
attrs.as_slice());
|
||||||
for &style in crate_types.iter() {
|
for &style in crate_types.iter() {
|
||||||
let fname = link::filename_for_input(&sess, style, &id,
|
let fname = link::filename_for_input(&sess, style, &id,
|
||||||
&t_outputs.with_extension(""));
|
&t_outputs.with_extension(""));
|
||||||
|
|
|
@ -58,8 +58,10 @@ pub fn read_crates(sess: Session,
|
||||||
visit::walk_crate(&mut v, krate, ());
|
visit::walk_crate(&mut v, krate, ());
|
||||||
}
|
}
|
||||||
let crate_cache = e.crate_cache.borrow();
|
let crate_cache = e.crate_cache.borrow();
|
||||||
dump_crates(*crate_cache.get());
|
dump_crates(crate_cache.get().as_slice());
|
||||||
warn_if_multiple_versions(&mut e, sess.diagnostic(), *crate_cache.get());
|
warn_if_multiple_versions(&mut e,
|
||||||
|
sess.diagnostic(),
|
||||||
|
crate_cache.get().as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ReadCrateVisitor<'a> {
|
struct ReadCrateVisitor<'a> {
|
||||||
|
@ -121,7 +123,7 @@ fn warn_if_multiple_versions(e: &mut Env,
|
||||||
struct Env {
|
struct Env {
|
||||||
sess: Session,
|
sess: Session,
|
||||||
os: loader::Os,
|
os: loader::Os,
|
||||||
crate_cache: @RefCell<vec!(cache_entry)>,
|
crate_cache: @RefCell<Vec<cache_entry>>,
|
||||||
next_crate_num: ast::CrateNum,
|
next_crate_num: ast::CrateNum,
|
||||||
intr: @IdentInterner
|
intr: @IdentInterner
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,10 @@ use metadata::decoder;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
|
|
||||||
use std::vec;
|
|
||||||
use reader = serialize::ebml::reader;
|
use reader = serialize::ebml::reader;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::diagnostic::expect;
|
use syntax::diagnostic::expect;
|
||||||
|
@ -93,7 +94,8 @@ pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem> {
|
||||||
|
|
||||||
// FIXME #1920: This path is not always correct if the crate is not linked
|
// FIXME #1920: This path is not always correct if the crate is not linked
|
||||||
// into the root namespace.
|
// into the root namespace.
|
||||||
vec_ng::append(vec!(ast_map::PathMod(token::intern(cdata.name))), path)
|
vec_ng::append(vec!(ast_map::PathMod(token::intern(cdata.name))),
|
||||||
|
path.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum found_ast {
|
pub enum found_ast {
|
||||||
|
|
|
@ -18,6 +18,7 @@ use metadata::decoder;
|
||||||
use metadata::loader;
|
use metadata::loader;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use extra::c_vec::CVec;
|
use extra::c_vec::CVec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
|
@ -33,7 +33,7 @@ use std::io;
|
||||||
use std::io::extensions::u64_from_be_bytes;
|
use std::io::extensions::u64_from_be_bytes;
|
||||||
use std::option;
|
use std::option;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
use serialize::ebml::reader;
|
use serialize::ebml::reader;
|
||||||
use serialize::ebml;
|
use serialize::ebml;
|
||||||
use serialize::Decodable;
|
use serialize::Decodable;
|
||||||
|
@ -304,7 +304,7 @@ fn item_path(item_doc: ebml::Doc) -> Vec<ast_map::PathElem> {
|
||||||
let len_doc = reader::get_doc(path_doc, tag_path_len);
|
let len_doc = reader::get_doc(path_doc, tag_path_len);
|
||||||
let len = reader::doc_as_u32(len_doc) as uint;
|
let len = reader::doc_as_u32(len_doc) as uint;
|
||||||
|
|
||||||
let mut result = vec::with_capacity(len);
|
let mut result = Vec::with_capacity(len);
|
||||||
reader::docs(path_doc, |tag, elt_doc| {
|
reader::docs(path_doc, |tag, elt_doc| {
|
||||||
if tag == tag_path_elem_mod {
|
if tag == tag_path_elem_mod {
|
||||||
let s = elt_doc.as_str_slice();
|
let s = elt_doc.as_str_slice();
|
||||||
|
@ -682,7 +682,7 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,
|
||||||
-> csearch::found_ast {
|
-> csearch::found_ast {
|
||||||
debug!("Looking up item: {}", id);
|
debug!("Looking up item: {}", id);
|
||||||
let item_doc = lookup_item(id, cdata.data());
|
let item_doc = lookup_item(id, cdata.data());
|
||||||
let path = item_path(item_doc).init().to_owned();
|
let path = Vec::from_slice(item_path(item_doc).init());
|
||||||
match decode_inlined_item(cdata, tcx, path, item_doc) {
|
match decode_inlined_item(cdata, tcx, path, item_doc) {
|
||||||
Ok(ref ii) => csearch::found(*ii),
|
Ok(ref ii) => csearch::found(*ii),
|
||||||
Err(path) => {
|
Err(path) => {
|
||||||
|
@ -1072,7 +1072,7 @@ fn get_attributes(md: ebml::Doc) -> Vec<ast::Attribute> {
|
||||||
// Currently it's only possible to have a single meta item on
|
// Currently it's only possible to have a single meta item on
|
||||||
// an attribute
|
// an attribute
|
||||||
assert_eq!(meta_items.len(), 1u);
|
assert_eq!(meta_items.len(), 1u);
|
||||||
let meta_item = meta_items[0];
|
let meta_item = *meta_items.get(0);
|
||||||
attrs.push(
|
attrs.push(
|
||||||
codemap::Spanned {
|
codemap::Spanned {
|
||||||
node: ast::Attribute_ {
|
node: ast::Attribute_ {
|
||||||
|
|
|
@ -32,6 +32,7 @@ use std::hash;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::io::MemWriter;
|
use std::io::MemWriter;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
|
@ -367,9 +368,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||||
encode_index(ebml_w, bkts, write_i64);
|
encode_index(ebml_w, bkts, write_i64);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if vi[i].disr_val != disr_val {
|
if vi.get(i).disr_val != disr_val {
|
||||||
encode_disr_val(ecx, ebml_w, vi[i].disr_val);
|
encode_disr_val(ecx, ebml_w, vi.get(i).disr_val);
|
||||||
disr_val = vi[i].disr_val;
|
disr_val = vi.get(i).disr_val;
|
||||||
}
|
}
|
||||||
encode_bounds_and_type(ebml_w, ecx,
|
encode_bounds_and_type(ebml_w, ecx,
|
||||||
&lookup_item_type(ecx.tcx, def_id));
|
&lookup_item_type(ecx.tcx, def_id));
|
||||||
|
@ -1390,7 +1391,7 @@ fn create_index<T:Clone + Hash + 'static>(
|
||||||
}
|
}
|
||||||
for elt in index.iter() {
|
for elt in index.iter() {
|
||||||
let h = hash::hash(&elt.val) as uint;
|
let h = hash::hash(&elt.val) as uint;
|
||||||
let mut bucket = buckets[h % 256].borrow_mut();
|
let mut bucket = buckets.get_mut(h % 256).borrow_mut();
|
||||||
bucket.get().push((*elt).clone());
|
bucket.get().push((*elt).clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1770,7 +1771,7 @@ pub static metadata_encoding_version : &'static [u8] =
|
||||||
pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> Vec<u8> {
|
pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> Vec<u8> {
|
||||||
let mut wr = MemWriter::new();
|
let mut wr = MemWriter::new();
|
||||||
encode_metadata_inner(&mut wr, parms, krate);
|
encode_metadata_inner(&mut wr, parms, krate);
|
||||||
wr.unwrap()
|
wr.unwrap().move_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate) {
|
fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate) {
|
||||||
|
@ -1822,7 +1823,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
|
||||||
|
|
||||||
let mut i = ebml_w.writer.tell().unwrap();
|
let mut i = ebml_w.writer.tell().unwrap();
|
||||||
let crate_attrs = synthesize_crate_attrs(&ecx, krate);
|
let crate_attrs = synthesize_crate_attrs(&ecx, krate);
|
||||||
encode_attributes(&mut ebml_w, crate_attrs);
|
encode_attributes(&mut ebml_w, crate_attrs.as_slice());
|
||||||
ecx.stats.attr_bytes.set(ebml_w.writer.tell().unwrap() - i);
|
ecx.stats.attr_bytes.set(ebml_w.writer.tell().unwrap() - i);
|
||||||
|
|
||||||
i = ebml_w.writer.tell().unwrap();
|
i = ebml_w.writer.tell().unwrap();
|
||||||
|
|
|
@ -14,6 +14,7 @@ use std::cell::RefCell;
|
||||||
use std::option;
|
use std::option;
|
||||||
use std::os;
|
use std::os;
|
||||||
use std::io::fs;
|
use std::io::fs;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
|
|
||||||
pub enum FileMatch { FileMatches, FileDoesntMatch }
|
pub enum FileMatch { FileMatches, FileDoesntMatch }
|
||||||
|
|
|
@ -31,6 +31,7 @@ use std::io;
|
||||||
use std::os::consts::{macos, freebsd, linux, android, win32};
|
use std::os::consts::{macos, freebsd, linux, android, win32};
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use flate;
|
use flate;
|
||||||
|
@ -205,7 +206,7 @@ impl<'a> Context<'a> {
|
||||||
// libraries or not.
|
// libraries or not.
|
||||||
match libraries.len() {
|
match libraries.len() {
|
||||||
0 => None,
|
0 => None,
|
||||||
1 => Some(libraries[0]),
|
1 => Some(libraries.move_iter().next().unwrap()),
|
||||||
_ => {
|
_ => {
|
||||||
self.sess.span_err(self.span,
|
self.sess.span_err(self.span,
|
||||||
format!("multiple matching crates for `{}`",
|
format!("multiple matching crates for `{}`",
|
||||||
|
|
|
@ -20,6 +20,7 @@ use middle::ty;
|
||||||
|
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::uint;
|
use std::uint;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::abi;
|
use syntax::abi;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
|
@ -19,6 +19,7 @@ use std::io;
|
||||||
use std::io::MemWriter;
|
use std::io::MemWriter;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
use middle::ty::param_ty;
|
use middle::ty::param_ty;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
|
|
@ -654,7 +654,7 @@ pub fn encode_vtable_res(ecx: &e::EncodeContext,
|
||||||
// ty::t doesn't work, and there is no way (atm) to have
|
// ty::t doesn't work, and there is no way (atm) to have
|
||||||
// hand-written encoding routines combine with auto-generated
|
// hand-written encoding routines combine with auto-generated
|
||||||
// ones. perhaps we should fix this.
|
// ones. perhaps we should fix this.
|
||||||
ebml_w.emit_from_vec(*dr, |ebml_w, param_tables| {
|
ebml_w.emit_from_vec(dr.as_slice(), |ebml_w, param_tables| {
|
||||||
encode_vtable_param_res(ecx, ebml_w, *param_tables);
|
encode_vtable_param_res(ecx, ebml_w, *param_tables);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -662,7 +662,7 @@ pub fn encode_vtable_res(ecx: &e::EncodeContext,
|
||||||
pub fn encode_vtable_param_res(ecx: &e::EncodeContext,
|
pub fn encode_vtable_param_res(ecx: &e::EncodeContext,
|
||||||
ebml_w: &mut writer::Encoder,
|
ebml_w: &mut writer::Encoder,
|
||||||
param_tables: typeck::vtable_param_res) {
|
param_tables: typeck::vtable_param_res) {
|
||||||
ebml_w.emit_from_vec(*param_tables, |ebml_w, vtable_origin| {
|
ebml_w.emit_from_vec(param_tables.as_slice(), |ebml_w, vtable_origin| {
|
||||||
encode_vtable_origin(ecx, ebml_w, vtable_origin)
|
encode_vtable_origin(ecx, ebml_w, vtable_origin)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -679,7 +679,7 @@ pub fn encode_vtable_origin(ecx: &e::EncodeContext,
|
||||||
ebml_w.emit_def_id(def_id)
|
ebml_w.emit_def_id(def_id)
|
||||||
});
|
});
|
||||||
ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
|
ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
|
||||||
ebml_w.emit_tys(ecx, *tys);
|
ebml_w.emit_tys(ecx, tys.as_slice());
|
||||||
});
|
});
|
||||||
ebml_w.emit_enum_variant_arg(2u, |ebml_w| {
|
ebml_w.emit_enum_variant_arg(2u, |ebml_w| {
|
||||||
encode_vtable_res(ecx, ebml_w, vtable_res);
|
encode_vtable_res(ecx, ebml_w, vtable_res);
|
||||||
|
@ -718,6 +718,8 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> {
|
||||||
-> typeck::vtable_res {
|
-> typeck::vtable_res {
|
||||||
@self.read_to_vec(|this|
|
@self.read_to_vec(|this|
|
||||||
this.read_vtable_param_res(tcx, cdata))
|
this.read_vtable_param_res(tcx, cdata))
|
||||||
|
.move_iter()
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_vtable_param_res(&mut self,
|
fn read_vtable_param_res(&mut self,
|
||||||
|
@ -725,6 +727,8 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> {
|
||||||
-> typeck::vtable_param_res {
|
-> typeck::vtable_param_res {
|
||||||
@self.read_to_vec(|this|
|
@self.read_to_vec(|this|
|
||||||
this.read_vtable_origin(tcx, cdata))
|
this.read_vtable_origin(tcx, cdata))
|
||||||
|
.move_iter()
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_vtable_origin(&mut self,
|
fn read_vtable_origin(&mut self,
|
||||||
|
@ -985,7 +989,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| {
|
ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
||||||
ebml_w.emit_tys(ecx, **tys)
|
ebml_w.emit_tys(ecx, tys.as_slice())
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -998,7 +1002,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
ebml_w.tag(c::tag_table_freevars, |ebml_w| {
|
ebml_w.tag(c::tag_table_freevars, |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
||||||
ebml_w.emit_from_vec(**fv, |ebml_w, fv_entry| {
|
ebml_w.emit_from_vec(fv.as_slice(), |ebml_w, fv_entry| {
|
||||||
encode_freevar_entry(ebml_w, *fv_entry)
|
encode_freevar_entry(ebml_w, *fv_entry)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1077,7 +1081,8 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
|
ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
ebml_w.tag(c::tag_table_val, |ebml_w| {
|
||||||
ebml_w.emit_from_vec(*cap_vars.borrow(), |ebml_w, cap_var| {
|
ebml_w.emit_from_vec(cap_vars.borrow().as_slice(),
|
||||||
|
|ebml_w, cap_var| {
|
||||||
cap_var.encode(ebml_w);
|
cap_var.encode(ebml_w);
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1139,6 +1144,8 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
|
||||||
tcx: ty::ctxt,
|
tcx: ty::ctxt,
|
||||||
cdata: @cstore::crate_metadata) -> Vec<ty::t> {
|
cdata: @cstore::crate_metadata) -> Vec<ty::t> {
|
||||||
self.read_to_vec(|this| this.read_ty_noxcx(tcx, cdata) )
|
self.read_to_vec(|this| this.read_ty_noxcx(tcx, cdata) )
|
||||||
|
.move_iter()
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t {
|
fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t {
|
||||||
|
@ -1170,7 +1177,7 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> {
|
fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> {
|
||||||
self.read_to_vec(|this| this.read_ty(xcx) )
|
self.read_to_vec(|this| this.read_ty(xcx)).move_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
|
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
|
||||||
|
@ -1197,7 +1204,9 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
|
||||||
0,
|
0,
|
||||||
|this| {
|
|this| {
|
||||||
Rc::new(this.read_to_vec(|this|
|
Rc::new(this.read_to_vec(|this|
|
||||||
this.read_type_param_def(xcx)))
|
this.read_type_param_def(xcx))
|
||||||
|
.move_iter()
|
||||||
|
.collect())
|
||||||
}),
|
}),
|
||||||
region_param_defs:
|
region_param_defs:
|
||||||
this.read_struct_field("region_param_defs",
|
this.read_struct_field("region_param_defs",
|
||||||
|
@ -1357,7 +1366,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
|
||||||
c::tag_table_freevars => {
|
c::tag_table_freevars => {
|
||||||
let fv_info = @val_dsr.read_to_vec(|val_dsr| {
|
let fv_info = @val_dsr.read_to_vec(|val_dsr| {
|
||||||
@val_dsr.read_freevar_entry(xcx)
|
@val_dsr.read_freevar_entry(xcx)
|
||||||
});
|
}).move_iter().collect();
|
||||||
let mut freevars = dcx.tcx.freevars.borrow_mut();
|
let mut freevars = dcx.tcx.freevars.borrow_mut();
|
||||||
freevars.get().insert(id, fv_info);
|
freevars.get().insert(id, fv_info);
|
||||||
}
|
}
|
||||||
|
@ -1394,7 +1403,9 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
|
||||||
}
|
}
|
||||||
c::tag_table_capture_map => {
|
c::tag_table_capture_map => {
|
||||||
let cvars =
|
let cvars =
|
||||||
val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx));
|
val_dsr.read_to_vec(|val_dsr| val_dsr.read_capture_var(xcx))
|
||||||
|
.move_iter()
|
||||||
|
.collect();
|
||||||
let mut capture_map = dcx.maps
|
let mut capture_map = dcx.maps
|
||||||
.capture_map
|
.capture_map
|
||||||
.borrow_mut();
|
.borrow_mut();
|
||||||
|
|
|
@ -22,6 +22,7 @@ use mc = middle::mem_categorization;
|
||||||
use middle::borrowck::*;
|
use middle::borrowck::*;
|
||||||
use middle::moves;
|
use middle::moves;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
|
|
@ -27,6 +27,7 @@ use util::common::indenter;
|
||||||
use util::ppaux::{Repr};
|
use util::ppaux::{Repr};
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::ast_util::IdRange;
|
use syntax::ast_util::IdRange;
|
||||||
|
|
|
@ -12,7 +12,8 @@
|
||||||
* Computes the restrictions that result from a borrow.
|
* Computes the restrictions that result from a borrow.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use middle::borrowck::*;
|
use middle::borrowck::*;
|
||||||
use mc = middle::mem_categorization;
|
use mc = middle::mem_categorization;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
@ -173,9 +174,11 @@ impl<'a> RestrictionsContext<'a> {
|
||||||
Safe => Safe,
|
Safe => Safe,
|
||||||
SafeIf(base_lp, base_vec) => {
|
SafeIf(base_lp, base_vec) => {
|
||||||
let lp = @LpExtend(base_lp, mc, elem);
|
let lp = @LpExtend(base_lp, mc, elem);
|
||||||
SafeIf(lp, vec::append_one(base_vec,
|
SafeIf(lp, vec_ng::append_one(base_vec,
|
||||||
Restriction {loan_path: lp,
|
Restriction {
|
||||||
set: restrictions}))
|
loan_path: lp,
|
||||||
|
set: restrictions
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ use std::cell::{Cell, RefCell};
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use std::ops::{BitOr, BitAnd};
|
use std::ops::{BitOr, BitAnd};
|
||||||
use std::result::{Result};
|
use std::result::{Result};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
|
@ -146,7 +147,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
|
||||||
body);
|
body);
|
||||||
|
|
||||||
check_loans::check_loans(this, &loan_dfcx, flowed_moves,
|
check_loans::check_loans(this, &loan_dfcx, flowed_moves,
|
||||||
*all_loans.get(), body);
|
all_loans.get().as_slice(), body);
|
||||||
|
|
||||||
visit::walk_fn(this, fk, decl, body, sp, id, ());
|
visit::walk_fn(this, fk, decl, body, sp, id, ());
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ comments in the section "Moves and initialization" and in `doc.rs`.
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::uint;
|
use std::uint;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use middle::borrowck::*;
|
use middle::borrowck::*;
|
||||||
use middle::dataflow::DataFlowContext;
|
use middle::dataflow::DataFlowContext;
|
||||||
|
@ -184,47 +185,47 @@ impl MoveData {
|
||||||
|
|
||||||
fn path_loan_path(&self, index: MovePathIndex) -> @LoanPath {
|
fn path_loan_path(&self, index: MovePathIndex) -> @LoanPath {
|
||||||
let paths = self.paths.borrow();
|
let paths = self.paths.borrow();
|
||||||
paths.get()[index.get()].loan_path
|
paths.get().get(index.get()).loan_path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
|
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
|
||||||
let paths = self.paths.borrow();
|
let paths = self.paths.borrow();
|
||||||
paths.get()[index.get()].parent
|
paths.get().get(index.get()).parent
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
|
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
|
||||||
let paths = self.paths.borrow();
|
let paths = self.paths.borrow();
|
||||||
paths.get()[index.get()].first_move
|
paths.get().get(index.get()).first_move
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
|
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
|
||||||
let paths = self.paths.borrow();
|
let paths = self.paths.borrow();
|
||||||
paths.get()[index.get()].first_child
|
paths.get().get(index.get()).first_child
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
|
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
|
||||||
let paths = self.paths.borrow();
|
let paths = self.paths.borrow();
|
||||||
paths.get()[index.get()].next_sibling
|
paths.get().get(index.get()).next_sibling
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_path_first_move(&self,
|
fn set_path_first_move(&self,
|
||||||
index: MovePathIndex,
|
index: MovePathIndex,
|
||||||
first_move: MoveIndex) {
|
first_move: MoveIndex) {
|
||||||
let mut paths = self.paths.borrow_mut();
|
let mut paths = self.paths.borrow_mut();
|
||||||
paths.get()[index.get()].first_move = first_move
|
paths.get().get_mut(index.get()).first_move = first_move
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_path_first_child(&self,
|
fn set_path_first_child(&self,
|
||||||
index: MovePathIndex,
|
index: MovePathIndex,
|
||||||
first_child: MovePathIndex) {
|
first_child: MovePathIndex) {
|
||||||
let mut paths = self.paths.borrow_mut();
|
let mut paths = self.paths.borrow_mut();
|
||||||
paths.get()[index.get()].first_child = first_child
|
paths.get().get_mut(index.get()).first_child = first_child
|
||||||
}
|
}
|
||||||
|
|
||||||
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
|
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
|
||||||
//! Type safe indexing operator
|
//! Type safe indexing operator
|
||||||
let moves = self.moves.borrow();
|
let moves = self.moves.borrow();
|
||||||
moves.get()[index.get()].next_move
|
moves.get().get(index.get()).next_move
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_var_path(&self, index: MovePathIndex) -> bool {
|
fn is_var_path(&self, index: MovePathIndex) -> bool {
|
||||||
|
@ -605,7 +606,7 @@ impl FlowedMoveData {
|
||||||
|
|
||||||
self.dfcx_moves.each_gen_bit_frozen(id, |index| {
|
self.dfcx_moves.each_gen_bit_frozen(id, |index| {
|
||||||
let moves = self.move_data.moves.borrow();
|
let moves = self.move_data.moves.borrow();
|
||||||
let move = &moves.get()[index];
|
let move = moves.get().get(index);
|
||||||
let moved_path = move.path;
|
let moved_path = move.path;
|
||||||
f(move, self.move_data.path_loan_path(moved_path))
|
f(move, self.move_data.path_loan_path(moved_path))
|
||||||
})
|
})
|
||||||
|
@ -644,7 +645,7 @@ impl FlowedMoveData {
|
||||||
|
|
||||||
self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
|
self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
|
||||||
let moves = self.move_data.moves.borrow();
|
let moves = self.move_data.moves.borrow();
|
||||||
let move = &moves.get()[index];
|
let move = moves.get().get(index);
|
||||||
let moved_path = move.path;
|
let moved_path = move.path;
|
||||||
if base_indices.iter().any(|x| x == &moved_path) {
|
if base_indices.iter().any(|x| x == &moved_path) {
|
||||||
// Scenario 1 or 2: `loan_path` or some base path of
|
// Scenario 1 or 2: `loan_path` or some base path of
|
||||||
|
@ -702,7 +703,7 @@ impl FlowedMoveData {
|
||||||
|
|
||||||
self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
|
self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
|
||||||
let var_assignments = self.move_data.var_assignments.borrow();
|
let var_assignments = self.move_data.var_assignments.borrow();
|
||||||
let assignment = &var_assignments.get()[index];
|
let assignment = var_assignments.get().get(index);
|
||||||
if assignment.path == loan_path_index && !f(assignment) {
|
if assignment.path == loan_path_index && !f(assignment) {
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -12,6 +12,7 @@ use middle::cfg::*;
|
||||||
use middle::graph;
|
use middle::graph;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::opt_vec;
|
use syntax::opt_vec;
|
||||||
|
@ -328,7 +329,7 @@ impl CFGBuilder {
|
||||||
|
|
||||||
ast::ExprRet(v) => {
|
ast::ExprRet(v) => {
|
||||||
let v_exit = self.opt_expr(v, pred);
|
let v_exit = self.opt_expr(v, pred);
|
||||||
let loop_scope = self.loop_scopes[0];
|
let loop_scope = *self.loop_scopes.get(0);
|
||||||
self.add_exiting_edge(expr, v_exit,
|
self.add_exiting_edge(expr, v_exit,
|
||||||
loop_scope, loop_scope.break_index);
|
loop_scope, loop_scope.break_index);
|
||||||
self.add_node(expr.id, [])
|
self.add_node(expr.id, [])
|
||||||
|
@ -377,7 +378,7 @@ impl CFGBuilder {
|
||||||
let base_exit = self.opt_expr(base, pred);
|
let base_exit = self.opt_expr(base, pred);
|
||||||
let field_exprs: Vec<@ast::Expr> =
|
let field_exprs: Vec<@ast::Expr> =
|
||||||
fields.iter().map(|f| f.expr).collect();
|
fields.iter().map(|f| f.expr).collect();
|
||||||
self.straightline(expr, base_exit, field_exprs)
|
self.straightline(expr, base_exit, field_exprs.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ExprRepeat(elem, count, _) => {
|
ast::ExprRepeat(elem, count, _) => {
|
||||||
|
|
|
@ -15,6 +15,7 @@ use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
use util::ppaux;
|
use util::ppaux;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::{ast_util, ast_map};
|
use syntax::{ast_util, ast_map};
|
||||||
use syntax::visit::Visitor;
|
use syntax::visit::Visitor;
|
||||||
|
|
|
@ -21,7 +21,8 @@ use util::ppaux::ty_to_str;
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::ast_util::{unguarded_pat, walk_pat};
|
use syntax::ast_util::{unguarded_pat, walk_pat};
|
||||||
use syntax::codemap::{DUMMY_SP, Span};
|
use syntax::codemap::{DUMMY_SP, Span};
|
||||||
|
@ -152,7 +153,7 @@ fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
|
||||||
});
|
});
|
||||||
|
|
||||||
let v = vec!(*pat);
|
let v = vec!(*pat);
|
||||||
match is_useful(cx, &seen, v) {
|
match is_useful(cx, &seen, v.as_slice()) {
|
||||||
not_useful => {
|
not_useful => {
|
||||||
cx.tcx.sess.span_err(pat.span, "unreachable pattern");
|
cx.tcx.sess.span_err(pat.span, "unreachable pattern");
|
||||||
}
|
}
|
||||||
|
@ -250,10 +251,14 @@ enum ctor {
|
||||||
// Note: is_useful doesn't work on empty types, as the paper notes.
|
// Note: is_useful doesn't work on empty types, as the paper notes.
|
||||||
// So it assumes that v is non-empty.
|
// So it assumes that v is non-empty.
|
||||||
fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
|
fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
|
||||||
if m.len() == 0u { return useful_; }
|
if m.len() == 0u {
|
||||||
if m[0].len() == 0u { return not_useful; }
|
return useful_;
|
||||||
let real_pat = match m.iter().find(|r| r[0].id != 0) {
|
}
|
||||||
Some(r) => r[0], None => v[0]
|
if m.get(0).len() == 0u {
|
||||||
|
return not_useful
|
||||||
|
}
|
||||||
|
let real_pat = match m.iter().find(|r| r.get(0).id != 0) {
|
||||||
|
Some(r) => *r.get(0), None => v[0]
|
||||||
};
|
};
|
||||||
let left_ty = if real_pat.id == 0 { ty::mk_nil() }
|
let left_ty = if real_pat.id == 0 { ty::mk_nil() }
|
||||||
else { ty::node_id_to_type(cx.tcx, real_pat.id) };
|
else { ty::node_id_to_type(cx.tcx, real_pat.id) };
|
||||||
|
@ -290,7 +295,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
|
||||||
}
|
}
|
||||||
ty::ty_unboxed_vec(..) | ty::ty_vec(..) => {
|
ty::ty_unboxed_vec(..) | ty::ty_vec(..) => {
|
||||||
let max_len = m.rev_iter().fold(0, |max_len, r| {
|
let max_len = m.rev_iter().fold(0, |max_len, r| {
|
||||||
match r[0].node {
|
match r.get(0).node {
|
||||||
PatVec(ref before, _, ref after) => {
|
PatVec(ref before, _, ref after) => {
|
||||||
cmp::max(before.len() + after.len(), max_len)
|
cmp::max(before.len() + after.len(), max_len)
|
||||||
}
|
}
|
||||||
|
@ -313,7 +318,9 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
|
||||||
}
|
}
|
||||||
Some(ref ctor) => {
|
Some(ref ctor) => {
|
||||||
match is_useful(cx,
|
match is_useful(cx,
|
||||||
&m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(),
|
&m.iter().filter_map(|r| {
|
||||||
|
default(cx, r.as_slice())
|
||||||
|
}).collect::<matrix>(),
|
||||||
v.tail()) {
|
v.tail()) {
|
||||||
useful_ => useful(left_ty, (*ctor).clone()),
|
useful_ => useful(left_ty, (*ctor).clone()),
|
||||||
ref u => (*u).clone(),
|
ref u => (*u).clone(),
|
||||||
|
@ -335,9 +342,11 @@ fn is_useful_specialized(cx: &MatchCheckCtxt,
|
||||||
arity: uint,
|
arity: uint,
|
||||||
lty: ty::t)
|
lty: ty::t)
|
||||||
-> useful {
|
-> useful {
|
||||||
let ms = m.iter().filter_map(|r| specialize(cx, *r, &ctor, arity, lty)).collect::<matrix>();
|
let ms = m.iter().filter_map(|r| {
|
||||||
|
specialize(cx, r.as_slice(), &ctor, arity, lty)
|
||||||
|
}).collect::<matrix>();
|
||||||
let could_be_useful = is_useful(
|
let could_be_useful = is_useful(
|
||||||
cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap());
|
cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap().as_slice());
|
||||||
match could_be_useful {
|
match could_be_useful {
|
||||||
useful_ => useful(lty, ctor),
|
useful_ => useful(lty, ctor),
|
||||||
ref u => (*u).clone(),
|
ref u => (*u).clone(),
|
||||||
|
@ -408,14 +417,14 @@ fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(..) | ty::ty_tup(_) |
|
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(..) | ty::ty_tup(_) |
|
||||||
ty::ty_struct(..) => {
|
ty::ty_struct(..) => {
|
||||||
for r in m.iter() {
|
for r in m.iter() {
|
||||||
if !is_wild(cx, r[0]) { return None; }
|
if !is_wild(cx, *r.get(0)) { return None; }
|
||||||
}
|
}
|
||||||
return Some(single);
|
return Some(single);
|
||||||
}
|
}
|
||||||
ty::ty_enum(eid, _) => {
|
ty::ty_enum(eid, _) => {
|
||||||
let mut found = Vec::new();
|
let mut found = Vec::new();
|
||||||
for r in m.iter() {
|
for r in m.iter() {
|
||||||
let r = pat_ctor_id(cx, r[0]);
|
let r = pat_ctor_id(cx, *r.get(0));
|
||||||
for id in r.iter() {
|
for id in r.iter() {
|
||||||
if !found.contains(id) {
|
if !found.contains(id) {
|
||||||
found.push((*id).clone());
|
found.push((*id).clone());
|
||||||
|
@ -437,7 +446,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
let mut true_found = false;
|
let mut true_found = false;
|
||||||
let mut false_found = false;
|
let mut false_found = false;
|
||||||
for r in m.iter() {
|
for r in m.iter() {
|
||||||
match pat_ctor_id(cx, r[0]) {
|
match pat_ctor_id(cx, *r.get(0)) {
|
||||||
None => (),
|
None => (),
|
||||||
Some(val(const_bool(true))) => true_found = true,
|
Some(val(const_bool(true))) => true_found = true,
|
||||||
Some(val(const_bool(false))) => false_found = true,
|
Some(val(const_bool(false))) => false_found = true,
|
||||||
|
@ -452,7 +461,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
let mut missing = true;
|
let mut missing = true;
|
||||||
let mut wrong = false;
|
let mut wrong = false;
|
||||||
for r in m.iter() {
|
for r in m.iter() {
|
||||||
match r[0].node {
|
match r.get(0).node {
|
||||||
PatVec(ref before, ref slice, ref after) => {
|
PatVec(ref before, ref slice, ref after) => {
|
||||||
let count = before.len() + after.len();
|
let count = before.len() + after.len();
|
||||||
if (count < n && slice.is_none()) || count > n {
|
if (count < n && slice.is_none()) || count > n {
|
||||||
|
@ -475,7 +484,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
|
|
||||||
// Find the lengths and slices of all vector patterns.
|
// Find the lengths and slices of all vector patterns.
|
||||||
let mut vec_pat_lens = m.iter().filter_map(|r| {
|
let mut vec_pat_lens = m.iter().filter_map(|r| {
|
||||||
match r[0].node {
|
match r.get(0).node {
|
||||||
PatVec(ref before, ref slice, ref after) => {
|
PatVec(ref before, ref slice, ref after) => {
|
||||||
Some((before.len() + after.len(), slice.is_some()))
|
Some((before.len() + after.len(), slice.is_some()))
|
||||||
}
|
}
|
||||||
|
@ -566,10 +575,11 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
Pat{id: pat_id, node: n, span: pat_span} =>
|
Pat{id: pat_id, node: n, span: pat_span} =>
|
||||||
match n {
|
match n {
|
||||||
PatWild => {
|
PatWild => {
|
||||||
Some(vec_ng::append(vec::from_elem(arity, wild()), r.tail()))
|
Some(vec_ng::append(Vec::from_elem(arity, wild()), r.tail()))
|
||||||
}
|
}
|
||||||
PatWildMulti => {
|
PatWildMulti => {
|
||||||
Some(vec_ng::append(vec::from_elem(arity, wild_multi()), r.tail()))
|
Some(vec_ng::append(Vec::from_elem(arity, wild_multi()),
|
||||||
|
r.tail()))
|
||||||
}
|
}
|
||||||
PatIdent(_, _, _) => {
|
PatIdent(_, _, _) => {
|
||||||
let opt_def = {
|
let opt_def = {
|
||||||
|
@ -579,7 +589,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
match opt_def {
|
match opt_def {
|
||||||
Some(DefVariant(_, id, _)) => {
|
Some(DefVariant(_, id, _)) => {
|
||||||
if variant(id) == *ctor_id {
|
if variant(id) == *ctor_id {
|
||||||
Some(r.tail().to_owned())
|
Some(Vec::from_slice(r.tail()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -617,7 +627,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
_ => fail!("type error")
|
_ => fail!("type error")
|
||||||
};
|
};
|
||||||
if match_ {
|
if match_ {
|
||||||
Some(r.tail().to_owned())
|
Some(Vec::from_slice(r.tail()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -625,7 +635,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
_ => {
|
_ => {
|
||||||
Some(
|
Some(
|
||||||
vec_ng::append(
|
vec_ng::append(
|
||||||
vec::from_elem(arity, wild()),
|
Vec::from_elem(arity, wild()),
|
||||||
r.tail()
|
r.tail()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -668,7 +678,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
_ => fail!("type error")
|
_ => fail!("type error")
|
||||||
};
|
};
|
||||||
if match_ {
|
if match_ {
|
||||||
Some(r.tail().to_owned())
|
Some(Vec::from_slice(r.tail()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -676,7 +686,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
DefVariant(_, id, _) if variant(id) == *ctor_id => {
|
DefVariant(_, id, _) if variant(id) == *ctor_id => {
|
||||||
let args = match args {
|
let args = match args {
|
||||||
Some(args) => args.iter().map(|x| *x).collect(),
|
Some(args) => args.iter().map(|x| *x).collect(),
|
||||||
None => vec::from_elem(arity, wild())
|
None => Vec::from_elem(arity, wild())
|
||||||
};
|
};
|
||||||
Some(vec_ng::append(args, r.tail()))
|
Some(vec_ng::append(args, r.tail()))
|
||||||
}
|
}
|
||||||
|
@ -689,7 +699,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
Some(args) => {
|
Some(args) => {
|
||||||
new_args = args.iter().map(|x| *x).collect()
|
new_args = args.iter().map(|x| *x).collect()
|
||||||
}
|
}
|
||||||
None => new_args = vec::from_elem(arity, wild())
|
None => new_args = Vec::from_elem(arity, wild())
|
||||||
}
|
}
|
||||||
Some(vec_ng::append(new_args, r.tail()))
|
Some(vec_ng::append(new_args, r.tail()))
|
||||||
}
|
}
|
||||||
|
@ -781,13 +791,17 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
single => true,
|
single => true,
|
||||||
_ => fail!("type error")
|
_ => fail!("type error")
|
||||||
};
|
};
|
||||||
if match_ { Some(r.tail().to_owned()) } else { None }
|
if match_ {
|
||||||
|
Some(Vec::from_slice(r.tail()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
PatRange(lo, hi) => {
|
PatRange(lo, hi) => {
|
||||||
let (c_lo, c_hi) = match *ctor_id {
|
let (c_lo, c_hi) = match *ctor_id {
|
||||||
val(ref v) => ((*v).clone(), (*v).clone()),
|
val(ref v) => ((*v).clone(), (*v).clone()),
|
||||||
range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()),
|
range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()),
|
||||||
single => return Some(r.tail().to_owned()),
|
single => return Some(Vec::from_slice(r.tail())),
|
||||||
_ => fail!("type error")
|
_ => fail!("type error")
|
||||||
};
|
};
|
||||||
let v_lo = eval_const_expr(cx.tcx, lo);
|
let v_lo = eval_const_expr(cx.tcx, lo);
|
||||||
|
@ -797,7 +811,7 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
let m2 = compare_const_vals(&c_hi, &v_hi);
|
let m2 = compare_const_vals(&c_hi, &v_hi);
|
||||||
match (m1, m2) {
|
match (m1, m2) {
|
||||||
(Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => {
|
(Some(val1), Some(val2)) if val1 >= 0 && val2 <= 0 => {
|
||||||
Some(r.tail().to_owned())
|
Some(Vec::from_slice(r.tail()))
|
||||||
},
|
},
|
||||||
(Some(_), Some(_)) => None,
|
(Some(_), Some(_)) => None,
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -850,8 +864,11 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<Vec<@Pat> > {
|
fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<Vec<@Pat> > {
|
||||||
if is_wild(cx, r[0]) { Some(r.tail().to_owned()) }
|
if is_wild(cx, r[0]) {
|
||||||
else { None }
|
Some(Vec::from_slice(r.tail()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_local(v: &mut CheckMatchVisitor,
|
fn check_local(v: &mut CheckMatchVisitor,
|
||||||
|
|
|
@ -27,6 +27,7 @@ use syntax::{ast, ast_map, ast_util};
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
//
|
//
|
||||||
// This pass classifies expressions by their constant-ness.
|
// This pass classifies expressions by their constant-ness.
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::uint;
|
use std::uint;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::ast_util::IdRange;
|
use syntax::ast_util::IdRange;
|
||||||
|
@ -79,7 +80,8 @@ struct PropagationContext<'a, O> {
|
||||||
|
|
||||||
struct LoopScope<'a> {
|
struct LoopScope<'a> {
|
||||||
loop_id: ast::NodeId,
|
loop_id: ast::NodeId,
|
||||||
break_bits: Vec<uint> }
|
break_bits: Vec<uint>
|
||||||
|
}
|
||||||
|
|
||||||
impl<O:DataFlowOperator> pprust::PpAnn for DataFlowContext<O> {
|
impl<O:DataFlowOperator> pprust::PpAnn for DataFlowContext<O> {
|
||||||
fn pre(&self, node: pprust::AnnNode) -> io::IoResult<()> {
|
fn pre(&self, node: pprust::AnnNode) -> io::IoResult<()> {
|
||||||
|
@ -467,12 +469,12 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
||||||
let mut body_bits = in_out.to_owned();
|
let mut body_bits = in_out.to_owned();
|
||||||
loop_scopes.push(LoopScope {
|
loop_scopes.push(LoopScope {
|
||||||
loop_id: expr.id,
|
loop_id: expr.id,
|
||||||
break_bits: in_out.to_owned()
|
break_bits: Vec::from_slice(in_out)
|
||||||
});
|
});
|
||||||
self.walk_block(blk, body_bits, loop_scopes);
|
self.walk_block(blk, body_bits, loop_scopes);
|
||||||
self.add_to_entry_set(expr.id, body_bits);
|
self.add_to_entry_set(expr.id, body_bits);
|
||||||
let new_loop_scope = loop_scopes.pop().unwrap();
|
let new_loop_scope = loop_scopes.pop().unwrap();
|
||||||
copy_bits(new_loop_scope.break_bits, in_out);
|
copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"),
|
ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"),
|
||||||
|
@ -491,14 +493,14 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
||||||
self.reset(in_out);
|
self.reset(in_out);
|
||||||
loop_scopes.push(LoopScope {
|
loop_scopes.push(LoopScope {
|
||||||
loop_id: expr.id,
|
loop_id: expr.id,
|
||||||
break_bits: in_out.to_owned()
|
break_bits: Vec::from_slice(in_out)
|
||||||
});
|
});
|
||||||
self.walk_block(blk, body_bits, loop_scopes);
|
self.walk_block(blk, body_bits, loop_scopes);
|
||||||
self.add_to_entry_set(expr.id, body_bits);
|
self.add_to_entry_set(expr.id, body_bits);
|
||||||
|
|
||||||
let new_loop_scope = loop_scopes.pop().unwrap();
|
let new_loop_scope = loop_scopes.pop().unwrap();
|
||||||
assert_eq!(new_loop_scope.loop_id, expr.id);
|
assert_eq!(new_loop_scope.loop_id, expr.id);
|
||||||
copy_bits(new_loop_scope.break_bits, in_out);
|
copy_bits(new_loop_scope.break_bits.as_slice(), in_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ExprMatch(discr, ref arms) => {
|
ast::ExprMatch(discr, ref arms) => {
|
||||||
|
@ -689,7 +691,9 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
||||||
in_out: &mut [uint]) {
|
in_out: &mut [uint]) {
|
||||||
self.pop_scopes(from_expr, to_scope, in_out);
|
self.pop_scopes(from_expr, to_scope, in_out);
|
||||||
self.dfcx.apply_kill(from_expr.id, in_out);
|
self.dfcx.apply_kill(from_expr.id, in_out);
|
||||||
join_bits(&self.dfcx.oper, in_out, to_scope.break_bits);
|
join_bits(&self.dfcx.oper,
|
||||||
|
in_out,
|
||||||
|
to_scope.break_bits.as_mut_slice());
|
||||||
debug!("break_from_to(from_expr={}, to_scope={}) final break_bits={}",
|
debug!("break_from_to(from_expr={}, to_scope={}) final break_bits={}",
|
||||||
from_expr.repr(self.tcx()),
|
from_expr.repr(self.tcx()),
|
||||||
to_scope.loop_id,
|
to_scope.loop_id,
|
||||||
|
@ -767,10 +771,12 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_scope<'a>(&self,
|
fn find_scope<'a,'b>(
|
||||||
|
&self,
|
||||||
expr: &ast::Expr,
|
expr: &ast::Expr,
|
||||||
label: Option<ast::Ident>,
|
label: Option<ast::Ident>,
|
||||||
loop_scopes: &'a mut Vec<LoopScope> ) -> &'a mut LoopScope {
|
loop_scopes: &'a mut Vec<LoopScope<'b>>)
|
||||||
|
-> &'a mut LoopScope<'b> {
|
||||||
let index = match label {
|
let index = match label {
|
||||||
None => {
|
None => {
|
||||||
let len = loop_scopes.len();
|
let len = loop_scopes.len();
|
||||||
|
@ -800,7 +806,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
&mut loop_scopes[index]
|
loop_scopes.get_mut(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_method_call(&self, expr: &ast::Expr) -> bool {
|
fn is_method_call(&self, expr: &ast::Expr) -> bool {
|
||||||
|
|
|
@ -19,6 +19,7 @@ use middle::typeck;
|
||||||
use util::nodemap::NodeSet;
|
use util::nodemap::NodeSet;
|
||||||
|
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::ast_util::{local_def, def_id_of_def, is_local};
|
use syntax::ast_util::{local_def, def_id_of_def, is_local};
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
|
|
||||||
use driver::session;
|
use driver::session;
|
||||||
use driver::session::Session;
|
use driver::session::Session;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::{Crate, Name, NodeId, Item, ItemFn};
|
use syntax::ast::{Crate, Name, NodeId, Item, ItemFn};
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
|
|
|
@ -17,6 +17,7 @@ use middle::resolve;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use util::nodemap::{NodeMap, NodeSet};
|
use util::nodemap::{NodeMap, NodeSet};
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::{ast, ast_util};
|
use syntax::{ast, ast_util};
|
||||||
use syntax::visit;
|
use syntax::visit;
|
||||||
|
|
|
@ -35,7 +35,7 @@ be indexed by the direction (see the type `Direction`).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::uint;
|
use std::uint;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
pub struct Graph<N,E> {
|
pub struct Graph<N,E> {
|
||||||
priv nodes: Vec<Node<N>> ,
|
priv nodes: Vec<Node<N>> ,
|
||||||
|
@ -77,13 +77,18 @@ impl EdgeIndex {
|
||||||
|
|
||||||
impl<N,E> Graph<N,E> {
|
impl<N,E> Graph<N,E> {
|
||||||
pub fn new() -> Graph<N,E> {
|
pub fn new() -> Graph<N,E> {
|
||||||
Graph {nodes: Vec::new(), edges: Vec::new()}
|
Graph {
|
||||||
|
nodes: Vec::new(),
|
||||||
|
edges: Vec::new(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_capacity(num_nodes: uint,
|
pub fn with_capacity(num_nodes: uint,
|
||||||
num_edges: uint) -> Graph<N,E> {
|
num_edges: uint) -> Graph<N,E> {
|
||||||
Graph {nodes: vec::with_capacity(num_nodes),
|
Graph {
|
||||||
edges: vec::with_capacity(num_edges)}
|
nodes: Vec::with_capacity(num_nodes),
|
||||||
|
edges: Vec::with_capacity(num_edges),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -91,13 +96,13 @@ impl<N,E> Graph<N,E> {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
|
pub fn all_nodes<'a>(&'a self) -> &'a [Node<N>] {
|
||||||
let nodes: &'a [Node<N>] = self.nodes;
|
let nodes: &'a [Node<N>] = self.nodes.as_slice();
|
||||||
nodes
|
nodes
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
|
pub fn all_edges<'a>(&'a self) -> &'a [Edge<E>] {
|
||||||
let edges: &'a [Edge<E>] = self.edges;
|
let edges: &'a [Edge<E>] = self.edges.as_slice();
|
||||||
edges
|
edges
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,15 +123,15 @@ impl<N,E> Graph<N,E> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
|
pub fn mut_node_data<'a>(&'a mut self, idx: NodeIndex) -> &'a mut N {
|
||||||
&mut self.nodes[idx.get()].data
|
&mut self.nodes.get_mut(idx.get()).data
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
|
pub fn node_data<'a>(&'a self, idx: NodeIndex) -> &'a N {
|
||||||
&self.nodes[idx.get()].data
|
&self.nodes.get(idx.get()).data
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
|
pub fn node<'a>(&'a self, idx: NodeIndex) -> &'a Node<N> {
|
||||||
&self.nodes[idx.get()]
|
self.nodes.get(idx.get())
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -143,8 +148,10 @@ impl<N,E> Graph<N,E> {
|
||||||
let idx = self.next_edge_index();
|
let idx = self.next_edge_index();
|
||||||
|
|
||||||
// read current first of the list of edges from each node
|
// read current first of the list of edges from each node
|
||||||
let source_first = self.nodes[source.get()].first_edge[Outgoing.repr];
|
let source_first = self.nodes.get(source.get())
|
||||||
let target_first = self.nodes[target.get()].first_edge[Incoming.repr];
|
.first_edge[Outgoing.repr];
|
||||||
|
let target_first = self.nodes.get(target.get())
|
||||||
|
.first_edge[Incoming.repr];
|
||||||
|
|
||||||
// create the new edge, with the previous firsts from each node
|
// create the new edge, with the previous firsts from each node
|
||||||
// as the next pointers
|
// as the next pointers
|
||||||
|
@ -156,22 +163,22 @@ impl<N,E> Graph<N,E> {
|
||||||
});
|
});
|
||||||
|
|
||||||
// adjust the firsts for each node target be the next object.
|
// adjust the firsts for each node target be the next object.
|
||||||
self.nodes[source.get()].first_edge[Outgoing.repr] = idx;
|
self.nodes.get_mut(source.get()).first_edge[Outgoing.repr] = idx;
|
||||||
self.nodes[target.get()].first_edge[Incoming.repr] = idx;
|
self.nodes.get_mut(target.get()).first_edge[Incoming.repr] = idx;
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
|
pub fn mut_edge_data<'a>(&'a mut self, idx: EdgeIndex) -> &'a mut E {
|
||||||
&mut self.edges[idx.get()].data
|
&mut self.edges.get_mut(idx.get()).data
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
|
pub fn edge_data<'a>(&'a self, idx: EdgeIndex) -> &'a E {
|
||||||
&self.edges[idx.get()].data
|
&self.edges.get(idx.get()).data
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
|
pub fn edge<'a>(&'a self, idx: EdgeIndex) -> &'a Edge<E> {
|
||||||
&self.edges[idx.get()]
|
self.edges.get(idx.get())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
|
pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {
|
||||||
|
@ -179,7 +186,7 @@ impl<N,E> Graph<N,E> {
|
||||||
//! This is useful if you wish to modify the graph while walking
|
//! This is useful if you wish to modify the graph while walking
|
||||||
//! the linked list of edges.
|
//! the linked list of edges.
|
||||||
|
|
||||||
self.nodes[node.get()].first_edge[dir.repr]
|
self.nodes.get(node.get()).first_edge[dir.repr]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
|
pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {
|
||||||
|
@ -187,7 +194,7 @@ impl<N,E> Graph<N,E> {
|
||||||
//! This is useful if you wish to modify the graph while walking
|
//! This is useful if you wish to modify the graph while walking
|
||||||
//! the linked list of edges.
|
//! the linked list of edges.
|
||||||
|
|
||||||
self.edges[edge.get()].next_edge[dir.repr]
|
self.edges.get(edge.get()).next_edge[dir.repr]
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
|
@ -231,7 +238,7 @@ impl<N,E> Graph<N,E> {
|
||||||
|
|
||||||
let mut edge_idx = self.first_adjacent(node, dir);
|
let mut edge_idx = self.first_adjacent(node, dir);
|
||||||
while edge_idx != InvalidEdgeIndex {
|
while edge_idx != InvalidEdgeIndex {
|
||||||
let edge = &self.edges[edge_idx.get()];
|
let edge = self.edges.get(edge_idx.get());
|
||||||
if !f(edge_idx, edge) {
|
if !f(edge_idx, edge) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ use middle::typeck;
|
||||||
use util::ppaux::{Repr, ty_to_str};
|
use util::ppaux::{Repr, ty_to_str};
|
||||||
use util::ppaux::UserString;
|
use util::ppaux::UserString;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
|
|
@ -33,6 +33,7 @@ use syntax::visit;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use std::iter::Enumerate;
|
use std::iter::Enumerate;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
// The actual lang items defined come at the end of this file in one handy table.
|
// The actual lang items defined come at the end of this file in one handy table.
|
||||||
// So you probably just want to nip down to the end.
|
// So you probably just want to nip down to the end.
|
||||||
|
@ -72,12 +73,14 @@ impl LanguageItems {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> {
|
pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> {
|
||||||
match self.items[it as uint] {
|
match self.items.get(it as uint) {
|
||||||
Some(id) => Ok(id),
|
&Some(id) => Ok(id),
|
||||||
None => Err(format!("requires `{}` lang_item",
|
&None => {
|
||||||
|
Err(format!("requires `{}` lang_item",
|
||||||
LanguageItems::item_name(it as uint)))
|
LanguageItems::item_name(it as uint)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_builtin_kind(&self, id: ast::DefId) -> Option<BuiltinBound> {
|
pub fn to_builtin_kind(&self, id: ast::DefId) -> Option<BuiltinBound> {
|
||||||
if Some(id) == self.freeze_trait() {
|
if Some(id) == self.freeze_trait() {
|
||||||
|
@ -95,7 +98,7 @@ impl LanguageItems {
|
||||||
|
|
||||||
$(
|
$(
|
||||||
pub fn $method(&self) -> Option<ast::DefId> {
|
pub fn $method(&self) -> Option<ast::DefId> {
|
||||||
self.items[$variant as uint]
|
*self.items.get($variant as uint)
|
||||||
}
|
}
|
||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
|
@ -147,18 +150,18 @@ impl LanguageItemCollector {
|
||||||
|
|
||||||
pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) {
|
pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) {
|
||||||
// Check for duplicates.
|
// Check for duplicates.
|
||||||
match self.items.items[item_index] {
|
match self.items.items.get(item_index) {
|
||||||
Some(original_def_id) if original_def_id != item_def_id => {
|
&Some(original_def_id) if original_def_id != item_def_id => {
|
||||||
self.session.err(format!("duplicate entry for `{}`",
|
self.session.err(format!("duplicate entry for `{}`",
|
||||||
LanguageItems::item_name(item_index)));
|
LanguageItems::item_name(item_index)));
|
||||||
}
|
}
|
||||||
Some(_) | None => {
|
&Some(_) | &None => {
|
||||||
// OK.
|
// OK.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Matched.
|
// Matched.
|
||||||
self.items.items[item_index] = Some(item_def_id);
|
*self.items.items.get_mut(item_index) = Some(item_def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_local_language_items(&mut self, krate: &ast::Crate) {
|
pub fn collect_local_language_items(&mut self, krate: &ast::Crate) {
|
||||||
|
|
|
@ -45,7 +45,6 @@ use middle::ty;
|
||||||
use middle::typeck::astconv::{ast_ty_to_ty, AstConv};
|
use middle::typeck::astconv::{ast_ty_to_ty, AstConv};
|
||||||
use middle::typeck::infer;
|
use middle::typeck::infer;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
use std::to_str::ToStr;
|
|
||||||
use util::ppaux::{ty_to_str};
|
use util::ppaux::{ty_to_str};
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
@ -54,10 +53,12 @@ use std::i16;
|
||||||
use std::i32;
|
use std::i32;
|
||||||
use std::i64;
|
use std::i64;
|
||||||
use std::i8;
|
use std::i8;
|
||||||
|
use std::to_str::ToStr;
|
||||||
use std::u16;
|
use std::u16;
|
||||||
use std::u32;
|
use std::u32;
|
||||||
use std::u64;
|
use std::u64;
|
||||||
use std::u8;
|
use std::u8;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::SmallIntMap;
|
use collections::SmallIntMap;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::ast_util::IdVisitingOperation;
|
use syntax::ast_util::IdVisitingOperation;
|
||||||
|
@ -1091,7 +1092,7 @@ fn check_unused_result(cx: &Context, s: &ast::Stmt) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
csearch::get_item_attrs(cx.tcx.sess.cstore, did, |attrs| {
|
csearch::get_item_attrs(cx.tcx.sess.cstore, did, |attrs| {
|
||||||
if attr::contains_name(attrs, "must_use") {
|
if attr::contains_name(attrs.as_slice(), "must_use") {
|
||||||
cx.span_lint(UnusedMustUse, s.span,
|
cx.span_lint(UnusedMustUse, s.span,
|
||||||
"unused result which must be used");
|
"unused result which must be used");
|
||||||
warned = true;
|
warned = true;
|
||||||
|
|
|
@ -116,7 +116,7 @@ use std::fmt;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::uint;
|
use std::uint;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::parse::token::special_idents;
|
use syntax::parse::token::special_idents;
|
||||||
|
@ -260,7 +260,7 @@ pub struct IrMaps {
|
||||||
num_vars: Cell<uint>,
|
num_vars: Cell<uint>,
|
||||||
live_node_map: RefCell<NodeMap<LiveNode>>,
|
live_node_map: RefCell<NodeMap<LiveNode>>,
|
||||||
variable_map: RefCell<NodeMap<Variable>>,
|
variable_map: RefCell<NodeMap<Variable>>,
|
||||||
capture_info_map: RefCell<NodeMap<@~[CaptureInfo]>>,
|
capture_info_map: RefCell<NodeMap<@Vec<CaptureInfo> >>,
|
||||||
var_kinds: RefCell<Vec<VarKind> >,
|
var_kinds: RefCell<Vec<VarKind> >,
|
||||||
lnks: RefCell<Vec<LiveNodeKind> >,
|
lnks: RefCell<Vec<LiveNodeKind> >,
|
||||||
}
|
}
|
||||||
|
@ -339,11 +339,11 @@ impl IrMaps {
|
||||||
|
|
||||||
pub fn variable_name(&self, var: Variable) -> ~str {
|
pub fn variable_name(&self, var: Variable) -> ~str {
|
||||||
let var_kinds = self.var_kinds.borrow();
|
let var_kinds = self.var_kinds.borrow();
|
||||||
match var_kinds.get()[var.get()] {
|
match var_kinds.get().get(var.get()) {
|
||||||
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
|
&Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
|
||||||
token::get_ident(nm).get().to_str()
|
token::get_ident(nm).get().to_str()
|
||||||
},
|
},
|
||||||
ImplicitRet => ~"<implicit-ret>"
|
&ImplicitRet => ~"<implicit-ret>"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -364,7 +364,7 @@ impl IrMaps {
|
||||||
|
|
||||||
pub fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
|
pub fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
|
||||||
let lnks = self.lnks.borrow();
|
let lnks = self.lnks.borrow();
|
||||||
lnks.get()[ln.get()]
|
*lnks.get().get(ln.get())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -607,9 +607,9 @@ fn Liveness(ir: @IrMaps, specials: Specials) -> Liveness {
|
||||||
ir: ir,
|
ir: ir,
|
||||||
tcx: ir.tcx,
|
tcx: ir.tcx,
|
||||||
s: specials,
|
s: specials,
|
||||||
successors: @RefCell::new(vec::from_elem(ir.num_live_nodes.get(),
|
successors: @RefCell::new(Vec::from_elem(ir.num_live_nodes.get(),
|
||||||
invalid_node())),
|
invalid_node())),
|
||||||
users: @RefCell::new(vec::from_elem(ir.num_live_nodes.get() *
|
users: @RefCell::new(Vec::from_elem(ir.num_live_nodes.get() *
|
||||||
ir.num_vars.get(),
|
ir.num_vars.get(),
|
||||||
invalid_users())),
|
invalid_users())),
|
||||||
loop_scope: @RefCell::new(Vec::new()),
|
loop_scope: @RefCell::new(Vec::new()),
|
||||||
|
@ -686,7 +686,7 @@ impl Liveness {
|
||||||
-> Option<LiveNodeKind> {
|
-> Option<LiveNodeKind> {
|
||||||
assert!(ln.is_valid());
|
assert!(ln.is_valid());
|
||||||
let users = self.users.borrow();
|
let users = self.users.borrow();
|
||||||
let reader = users.get()[self.idx(ln, var)].reader;
|
let reader = users.get().get(self.idx(ln, var)).reader;
|
||||||
if reader.is_valid() {Some(self.ir.lnk(reader))} else {None}
|
if reader.is_valid() {Some(self.ir.lnk(reader))} else {None}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -697,7 +697,7 @@ impl Liveness {
|
||||||
-> Option<LiveNodeKind> {
|
-> Option<LiveNodeKind> {
|
||||||
let successor = {
|
let successor = {
|
||||||
let successors = self.successors.borrow();
|
let successors = self.successors.borrow();
|
||||||
successors.get()[ln.get()]
|
*successors.get().get(ln.get())
|
||||||
};
|
};
|
||||||
self.live_on_entry(successor, var)
|
self.live_on_entry(successor, var)
|
||||||
}
|
}
|
||||||
|
@ -705,14 +705,14 @@ impl Liveness {
|
||||||
pub fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
|
pub fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
|
||||||
assert!(ln.is_valid());
|
assert!(ln.is_valid());
|
||||||
let users = self.users.borrow();
|
let users = self.users.borrow();
|
||||||
users.get()[self.idx(ln, var)].used
|
users.get().get(self.idx(ln, var)).used
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
|
pub fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
|
||||||
-> Option<LiveNodeKind> {
|
-> Option<LiveNodeKind> {
|
||||||
assert!(ln.is_valid());
|
assert!(ln.is_valid());
|
||||||
let users = self.users.borrow();
|
let users = self.users.borrow();
|
||||||
let writer = users.get()[self.idx(ln, var)].writer;
|
let writer = users.get().get(self.idx(ln, var)).writer;
|
||||||
if writer.is_valid() {Some(self.ir.lnk(writer))} else {None}
|
if writer.is_valid() {Some(self.ir.lnk(writer))} else {None}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -720,7 +720,7 @@ impl Liveness {
|
||||||
-> Option<LiveNodeKind> {
|
-> Option<LiveNodeKind> {
|
||||||
let successor = {
|
let successor = {
|
||||||
let successors = self.successors.borrow();
|
let successors = self.successors.borrow();
|
||||||
successors.get()[ln.get()]
|
*successors.get().get(ln.get())
|
||||||
};
|
};
|
||||||
self.assigned_on_entry(successor, var)
|
self.assigned_on_entry(successor, var)
|
||||||
}
|
}
|
||||||
|
@ -795,14 +795,14 @@ impl Liveness {
|
||||||
write!(wr,
|
write!(wr,
|
||||||
"[ln({}) of kind {:?} reads",
|
"[ln({}) of kind {:?} reads",
|
||||||
ln.get(),
|
ln.get(),
|
||||||
lnks.and_then(|lnks| Some(lnks.get()[ln.get()])));
|
lnks.and_then(|lnks| Some(*lnks.get().get(ln.get()))));
|
||||||
}
|
}
|
||||||
let users = self.users.try_borrow();
|
let users = self.users.try_borrow();
|
||||||
match users {
|
match users {
|
||||||
Some(users) => {
|
Some(users) => {
|
||||||
self.write_vars(wr, ln, |idx| users.get()[idx].reader);
|
self.write_vars(wr, ln, |idx| users.get().get(idx).reader);
|
||||||
write!(wr, " writes");
|
write!(wr, " writes");
|
||||||
self.write_vars(wr, ln, |idx| users.get()[idx].writer);
|
self.write_vars(wr, ln, |idx| users.get().get(idx).writer);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
write!(wr, " (users borrowed)");
|
write!(wr, " (users borrowed)");
|
||||||
|
@ -811,7 +811,9 @@ impl Liveness {
|
||||||
let successors = self.successors.try_borrow();
|
let successors = self.successors.try_borrow();
|
||||||
match successors {
|
match successors {
|
||||||
Some(successors) => {
|
Some(successors) => {
|
||||||
write!(wr, " precedes {}]", successors.get()[ln.get()].to_str());
|
write!(wr,
|
||||||
|
" precedes {}]",
|
||||||
|
successors.get().get(ln.get()).to_str());
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
write!(wr, " precedes (successors borrowed)]");
|
write!(wr, " precedes (successors borrowed)]");
|
||||||
|
@ -824,7 +826,7 @@ impl Liveness {
|
||||||
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
|
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
|
||||||
{
|
{
|
||||||
let mut successors = self.successors.borrow_mut();
|
let mut successors = self.successors.borrow_mut();
|
||||||
successors.get()[ln.get()] = succ_ln;
|
*successors.get().get_mut(ln.get()) = succ_ln;
|
||||||
}
|
}
|
||||||
|
|
||||||
// It is not necessary to initialize the
|
// It is not necessary to initialize the
|
||||||
|
@ -841,12 +843,12 @@ impl Liveness {
|
||||||
// more efficient version of init_empty() / merge_from_succ()
|
// more efficient version of init_empty() / merge_from_succ()
|
||||||
{
|
{
|
||||||
let mut successors = self.successors.borrow_mut();
|
let mut successors = self.successors.borrow_mut();
|
||||||
successors.get()[ln.get()] = succ_ln;
|
*successors.get().get_mut(ln.get()) = succ_ln;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.indices2(ln, succ_ln, |idx, succ_idx| {
|
self.indices2(ln, succ_ln, |idx, succ_idx| {
|
||||||
let mut users = self.users.borrow_mut();
|
let mut users = self.users.borrow_mut();
|
||||||
users.get()[idx] = users.get()[succ_idx]
|
*users.get().get_mut(idx) = *users.get().get(succ_idx)
|
||||||
});
|
});
|
||||||
debug!("init_from_succ(ln={}, succ={})",
|
debug!("init_from_succ(ln={}, succ={})",
|
||||||
self.ln_str(ln), self.ln_str(succ_ln));
|
self.ln_str(ln), self.ln_str(succ_ln));
|
||||||
|
@ -862,12 +864,12 @@ impl Liveness {
|
||||||
let mut changed = false;
|
let mut changed = false;
|
||||||
self.indices2(ln, succ_ln, |idx, succ_idx| {
|
self.indices2(ln, succ_ln, |idx, succ_idx| {
|
||||||
let mut users = self.users.borrow_mut();
|
let mut users = self.users.borrow_mut();
|
||||||
changed |= copy_if_invalid(users.get()[succ_idx].reader,
|
changed |= copy_if_invalid(users.get().get(succ_idx).reader,
|
||||||
&mut users.get()[idx].reader);
|
&mut users.get().get_mut(idx).reader);
|
||||||
changed |= copy_if_invalid(users.get()[succ_idx].writer,
|
changed |= copy_if_invalid(users.get().get(succ_idx).writer,
|
||||||
&mut users.get()[idx].writer);
|
&mut users.get().get_mut(idx).writer);
|
||||||
if users.get()[succ_idx].used && !users.get()[idx].used {
|
if users.get().get(succ_idx).used && !users.get().get(idx).used {
|
||||||
users.get()[idx].used = true;
|
users.get().get_mut(idx).used = true;
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -893,8 +895,8 @@ impl Liveness {
|
||||||
pub fn define(&self, writer: LiveNode, var: Variable) {
|
pub fn define(&self, writer: LiveNode, var: Variable) {
|
||||||
let idx = self.idx(writer, var);
|
let idx = self.idx(writer, var);
|
||||||
let mut users = self.users.borrow_mut();
|
let mut users = self.users.borrow_mut();
|
||||||
users.get()[idx].reader = invalid_node();
|
users.get().get_mut(idx).reader = invalid_node();
|
||||||
users.get()[idx].writer = invalid_node();
|
users.get().get_mut(idx).writer = invalid_node();
|
||||||
|
|
||||||
debug!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(),
|
debug!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(),
|
||||||
idx, self.ln_str(writer));
|
idx, self.ln_str(writer));
|
||||||
|
@ -904,7 +906,7 @@ impl Liveness {
|
||||||
pub fn acc(&self, ln: LiveNode, var: Variable, acc: uint) {
|
pub fn acc(&self, ln: LiveNode, var: Variable, acc: uint) {
|
||||||
let idx = self.idx(ln, var);
|
let idx = self.idx(ln, var);
|
||||||
let mut users = self.users.borrow_mut();
|
let mut users = self.users.borrow_mut();
|
||||||
let user = &mut users.get()[idx];
|
let user = users.get().get_mut(idx);
|
||||||
|
|
||||||
if (acc & ACC_WRITE) != 0 {
|
if (acc & ACC_WRITE) != 0 {
|
||||||
user.reader = invalid_node();
|
user.reader = invalid_node();
|
||||||
|
|
|
@ -65,6 +65,7 @@
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use util::ppaux::{ty_to_str, region_ptr_to_str, Repr};
|
use util::ppaux::{ty_to_str, region_ptr_to_str, Repr};
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::{MutImmutable, MutMutable};
|
use syntax::ast::{MutImmutable, MutMutable};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
|
|
@ -139,6 +139,7 @@ use util::nodemap::{NodeMap, NodeSet};
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::visit;
|
use syntax::visit;
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
use middle::resolve;
|
use middle::resolve;
|
||||||
|
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::ast_util::{path_to_ident, walk_pat};
|
use syntax::ast_util::{path_to_ident, walk_pat};
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
|
|
@ -21,6 +21,7 @@ use middle::privacy;
|
||||||
use util::nodemap::NodeSet;
|
use util::nodemap::NodeSet;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
|
|
|
@ -27,6 +27,7 @@ use middle::ty;
|
||||||
use util::nodemap::NodeMap;
|
use util::nodemap::NodeMap;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::{ast, visit};
|
use syntax::{ast, visit};
|
||||||
|
@ -287,7 +288,7 @@ impl RegionMaps {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < queue.len() {
|
while i < queue.len() {
|
||||||
let free_region_map = self.free_region_map.borrow();
|
let free_region_map = self.free_region_map.borrow();
|
||||||
match free_region_map.get().find(&queue[i]) {
|
match free_region_map.get().find(queue.get(i)) {
|
||||||
Some(parents) => {
|
Some(parents) => {
|
||||||
for parent in parents.iter() {
|
for parent in parents.iter() {
|
||||||
if *parent == sup {
|
if *parent == sup {
|
||||||
|
@ -369,7 +370,7 @@ impl RegionMaps {
|
||||||
// where they diverge. If one vector is a suffix of the other,
|
// where they diverge. If one vector is a suffix of the other,
|
||||||
// then the corresponding scope is a superscope of the other.
|
// then the corresponding scope is a superscope of the other.
|
||||||
|
|
||||||
if a_ancestors[a_index] != b_ancestors[b_index] {
|
if *a_ancestors.get(a_index) != *b_ancestors.get(b_index) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -380,8 +381,8 @@ impl RegionMaps {
|
||||||
if b_index == 0u { return Some(scope_b); }
|
if b_index == 0u { return Some(scope_b); }
|
||||||
a_index -= 1u;
|
a_index -= 1u;
|
||||||
b_index -= 1u;
|
b_index -= 1u;
|
||||||
if a_ancestors[a_index] != b_ancestors[b_index] {
|
if *a_ancestors.get(a_index) != *b_ancestors.get(b_index) {
|
||||||
return Some(a_ancestors[a_index + 1u]);
|
return Some(*a_ancestors.get(a_index + 1u));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ use syntax::visit::Visitor;
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::uint;
|
use std::uint;
|
||||||
use std::mem::replace;
|
use std::mem::replace;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
|
|
||||||
// Definition mapping
|
// Definition mapping
|
||||||
|
@ -826,7 +827,7 @@ fn Resolver(session: Session,
|
||||||
|
|
||||||
primitive_type_table: @PrimitiveTypeTable(),
|
primitive_type_table: @PrimitiveTypeTable(),
|
||||||
|
|
||||||
namespaces: Vec<TypeNS, ValueNS > ,
|
namespaces: vec!(TypeNS, ValueNS),
|
||||||
|
|
||||||
def_map: @RefCell::new(NodeMap::new()),
|
def_map: @RefCell::new(NodeMap::new()),
|
||||||
export_map2: @RefCell::new(NodeMap::new()),
|
export_map2: @RefCell::new(NodeMap::new()),
|
||||||
|
@ -1972,7 +1973,7 @@ impl Resolver {
|
||||||
SingleImport(target, _) => {
|
SingleImport(target, _) => {
|
||||||
debug!("(building import directive) building import \
|
debug!("(building import directive) building import \
|
||||||
directive: {}::{}",
|
directive: {}::{}",
|
||||||
self.idents_to_str(directive.module_path),
|
self.idents_to_str(directive.module_path.as_slice()),
|
||||||
token::get_ident(target));
|
token::get_ident(target));
|
||||||
|
|
||||||
let mut import_resolutions = module_.import_resolutions
|
let mut import_resolutions = module_.import_resolutions
|
||||||
|
@ -2085,13 +2086,14 @@ impl Resolver {
|
||||||
let import_count = imports.get().len();
|
let import_count = imports.get().len();
|
||||||
while module.resolved_import_count.get() < import_count {
|
while module.resolved_import_count.get() < import_count {
|
||||||
let import_index = module.resolved_import_count.get();
|
let import_index = module.resolved_import_count.get();
|
||||||
let import_directive = imports.get()[import_index];
|
let import_directive = *imports.get().get(import_index);
|
||||||
match self.resolve_import_for_module(module, import_directive) {
|
match self.resolve_import_for_module(module, import_directive) {
|
||||||
Failed => {
|
Failed => {
|
||||||
// We presumably emitted an error. Continue.
|
// We presumably emitted an error. Continue.
|
||||||
let msg = format!("failed to resolve import `{}`",
|
let msg = format!("failed to resolve import `{}`",
|
||||||
self.import_path_to_str(
|
self.import_path_to_str(
|
||||||
import_directive.module_path,
|
import_directive.module_path
|
||||||
|
.as_slice(),
|
||||||
*import_directive.subclass));
|
*import_directive.subclass));
|
||||||
self.resolve_error(import_directive.span, msg);
|
self.resolve_error(import_directive.span, msg);
|
||||||
}
|
}
|
||||||
|
@ -2128,7 +2130,7 @@ impl Resolver {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|seg| seg.identifier)
|
.map(|seg| seg.identifier)
|
||||||
.collect();
|
.collect();
|
||||||
self.idents_to_str(identifiers)
|
self.idents_to_str(identifiers.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import_directive_subclass_to_str(&mut self,
|
fn import_directive_subclass_to_str(&mut self,
|
||||||
|
@ -2169,7 +2171,7 @@ impl Resolver {
|
||||||
|
|
||||||
debug!("(resolving import for module) resolving import `{}::...` in \
|
debug!("(resolving import for module) resolving import `{}::...` in \
|
||||||
`{}`",
|
`{}`",
|
||||||
self.idents_to_str(*module_path),
|
self.idents_to_str(module_path.as_slice()),
|
||||||
self.module_to_str(module_));
|
self.module_to_str(module_));
|
||||||
|
|
||||||
// First, resolve the module path for the directive, if necessary.
|
// First, resolve the module path for the directive, if necessary.
|
||||||
|
@ -2178,7 +2180,7 @@ impl Resolver {
|
||||||
Some((self.graph_root.get_module(), LastMod(AllPublic)))
|
Some((self.graph_root.get_module(), LastMod(AllPublic)))
|
||||||
} else {
|
} else {
|
||||||
match self.resolve_module_path(module_,
|
match self.resolve_module_path(module_,
|
||||||
*module_path,
|
module_path.as_slice(),
|
||||||
DontUseLexicalScope,
|
DontUseLexicalScope,
|
||||||
import_directive.span,
|
import_directive.span,
|
||||||
ImportSearch) {
|
ImportSearch) {
|
||||||
|
@ -3274,15 +3276,15 @@ impl Resolver {
|
||||||
if index != import_count {
|
if index != import_count {
|
||||||
let sn = self.session
|
let sn = self.session
|
||||||
.codemap
|
.codemap
|
||||||
.span_to_snippet(imports.get()[index].span)
|
.span_to_snippet(imports.get().get(index).span)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if sn.contains("::") {
|
if sn.contains("::") {
|
||||||
self.resolve_error(imports.get()[index].span,
|
self.resolve_error(imports.get().get(index).span,
|
||||||
"unresolved import");
|
"unresolved import");
|
||||||
} else {
|
} else {
|
||||||
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
||||||
sn.slice(0, sn.len()));
|
sn.slice(0, sn.len()));
|
||||||
self.resolve_error(imports.get()[index].span, err);
|
self.resolve_error(imports.get().get(index).span, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3520,7 +3522,7 @@ impl Resolver {
|
||||||
|
|
||||||
let mut rib_index = rib_index + 1;
|
let mut rib_index = rib_index + 1;
|
||||||
while rib_index < ribs.len() {
|
while rib_index < ribs.len() {
|
||||||
match ribs[rib_index].kind {
|
match ribs.get(rib_index).kind {
|
||||||
NormalRibKind => {
|
NormalRibKind => {
|
||||||
// Nothing to do. Continue.
|
// Nothing to do. Continue.
|
||||||
}
|
}
|
||||||
|
@ -3621,7 +3623,7 @@ impl Resolver {
|
||||||
while i != 0 {
|
while i != 0 {
|
||||||
i -= 1;
|
i -= 1;
|
||||||
let binding_opt = {
|
let binding_opt = {
|
||||||
let bindings = ribs[i].bindings.borrow();
|
let bindings = ribs.get(i).bindings.borrow();
|
||||||
bindings.get().find_copy(&name)
|
bindings.get().find_copy(&name)
|
||||||
};
|
};
|
||||||
match binding_opt {
|
match binding_opt {
|
||||||
|
@ -4492,8 +4494,9 @@ impl Resolver {
|
||||||
{
|
{
|
||||||
let mut value_ribs =
|
let mut value_ribs =
|
||||||
this.value_ribs.borrow_mut();
|
this.value_ribs.borrow_mut();
|
||||||
let last_rib = value_ribs.get()[
|
let length = value_ribs.get().len();
|
||||||
value_ribs.get().len() - 1];
|
let last_rib = value_ribs.get().get(
|
||||||
|
length - 1);
|
||||||
let mut bindings =
|
let mut bindings =
|
||||||
last_rib.bindings.borrow_mut();
|
last_rib.bindings.borrow_mut();
|
||||||
bindings.get().insert(renamed,
|
bindings.get().insert(renamed,
|
||||||
|
@ -4518,8 +4521,9 @@ impl Resolver {
|
||||||
{
|
{
|
||||||
let mut value_ribs =
|
let mut value_ribs =
|
||||||
this.value_ribs.borrow_mut();
|
this.value_ribs.borrow_mut();
|
||||||
let last_rib = value_ribs.get()[
|
let length = value_ribs.get().len();
|
||||||
value_ribs.get().len() - 1];
|
let last_rib = value_ribs.get().get(
|
||||||
|
length - 1);
|
||||||
let mut bindings =
|
let mut bindings =
|
||||||
last_rib.bindings.borrow_mut();
|
last_rib.bindings.borrow_mut();
|
||||||
bindings.get().insert(renamed,
|
bindings.get().insert(renamed,
|
||||||
|
@ -5064,7 +5068,7 @@ impl Resolver {
|
||||||
while j != 0 {
|
while j != 0 {
|
||||||
j -= 1;
|
j -= 1;
|
||||||
let value_ribs = this.value_ribs.borrow();
|
let value_ribs = this.value_ribs.borrow();
|
||||||
let bindings = value_ribs.get()[j].bindings.borrow();
|
let bindings = value_ribs.get().get(j).bindings.borrow();
|
||||||
for (&k, _) in bindings.get().iter() {
|
for (&k, _) in bindings.get().iter() {
|
||||||
maybes.push(token::get_name(k));
|
maybes.push(token::get_name(k));
|
||||||
values.push(uint::MAX);
|
values.push(uint::MAX);
|
||||||
|
@ -5073,20 +5077,20 @@ impl Resolver {
|
||||||
|
|
||||||
let mut smallest = 0;
|
let mut smallest = 0;
|
||||||
for (i, other) in maybes.iter().enumerate() {
|
for (i, other) in maybes.iter().enumerate() {
|
||||||
values[i] = name.lev_distance(other.get());
|
*values.get_mut(i) = name.lev_distance(other.get());
|
||||||
|
|
||||||
if values[i] <= values[smallest] {
|
if *values.get(i) <= *values.get(smallest) {
|
||||||
smallest = i;
|
smallest = i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if values.len() > 0 &&
|
if values.len() > 0 &&
|
||||||
values[smallest] != uint::MAX &&
|
*values.get(smallest) != uint::MAX &&
|
||||||
values[smallest] < name.len() + 2 &&
|
*values.get(smallest) < name.len() + 2 &&
|
||||||
values[smallest] <= max_distance &&
|
*values.get(smallest) <= max_distance &&
|
||||||
name != maybes[smallest].get() {
|
name != maybes.get(smallest).get() {
|
||||||
|
|
||||||
Some(maybes[smallest].get().to_str())
|
Some(maybes.get(smallest).get().to_str())
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -5212,8 +5216,8 @@ impl Resolver {
|
||||||
let def_like = DlDef(DefLabel(expr.id));
|
let def_like = DlDef(DefLabel(expr.id));
|
||||||
{
|
{
|
||||||
let mut label_ribs = this.label_ribs.borrow_mut();
|
let mut label_ribs = this.label_ribs.borrow_mut();
|
||||||
let rib = label_ribs.get()[label_ribs.get().len() -
|
let length = label_ribs.get().len();
|
||||||
1];
|
let rib = label_ribs.get().get(length - 1);
|
||||||
let mut bindings = rib.bindings.borrow_mut();
|
let mut bindings = rib.bindings.borrow_mut();
|
||||||
let renamed = mtwt::resolve(label);
|
let renamed = mtwt::resolve(label);
|
||||||
bindings.get().insert(renamed, def_like);
|
bindings.get().insert(renamed, def_like);
|
||||||
|
@ -5516,7 +5520,9 @@ impl Resolver {
|
||||||
if idents.len() == 0 {
|
if idents.len() == 0 {
|
||||||
return ~"???";
|
return ~"???";
|
||||||
}
|
}
|
||||||
return self.idents_to_str(idents.move_rev_iter().collect::<Vec<ast::Ident> >());
|
return self.idents_to_str(idents.move_rev_iter()
|
||||||
|
.collect::<Vec<ast::Ident>>()
|
||||||
|
.as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)] // useful for debugging
|
#[allow(dead_code)] // useful for debugging
|
||||||
|
|
|
@ -16,6 +16,7 @@ use middle::ty_fold::TypeFolder;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::opt_vec::OptVec;
|
use syntax::opt_vec::OptVec;
|
||||||
|
|
||||||
|
@ -88,7 +89,7 @@ impl<'a> TypeFolder for SubstFolder<'a> {
|
||||||
match ty::get(t).sty {
|
match ty::get(t).sty {
|
||||||
ty::ty_param(p) => {
|
ty::ty_param(p) => {
|
||||||
if p.idx < self.substs.tps.len() {
|
if p.idx < self.substs.tps.len() {
|
||||||
self.substs.tps[p.idx]
|
*self.substs.tps.get(p.idx)
|
||||||
} else {
|
} else {
|
||||||
let root_msg = match self.root_ty {
|
let root_msg = match self.root_ty {
|
||||||
Some(root) => format!(" in the substitution of `{}`",
|
Some(root) => format!(" in the substitution of `{}`",
|
||||||
|
|
|
@ -223,9 +223,10 @@ use middle::ty;
|
||||||
use util::common::indenter;
|
use util::common::indenter;
|
||||||
use util::ppaux::{Repr, vec_map_to_str};
|
use util::ppaux::{Repr, vec_map_to_str};
|
||||||
|
|
||||||
use std::cell::Cell;
|
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use std::vec;
|
use std::cell::Cell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast::Ident;
|
use syntax::ast::Ident;
|
||||||
use syntax::ast_util::path_to_ident;
|
use syntax::ast_util::path_to_ident;
|
||||||
|
@ -438,7 +439,7 @@ impl<'a,'b> Repr for Match<'a,'b> {
|
||||||
|
|
||||||
fn has_nested_bindings(m: &[Match], col: uint) -> bool {
|
fn has_nested_bindings(m: &[Match], col: uint) -> bool {
|
||||||
for br in m.iter() {
|
for br in m.iter() {
|
||||||
match br.pats[col].node {
|
match br.pats.get(col).node {
|
||||||
ast::PatIdent(_, _, Some(_)) => return true,
|
ast::PatIdent(_, _, Some(_)) => return true,
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
|
@ -451,7 +452,7 @@ fn expand_nested_bindings<'r,'b>(
|
||||||
m: &[Match<'r,'b>],
|
m: &[Match<'r,'b>],
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
|
debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -459,14 +460,14 @@ fn expand_nested_bindings<'r,'b>(
|
||||||
bcx.val_to_str(val));
|
bcx.val_to_str(val));
|
||||||
let _indenter = indenter();
|
let _indenter = indenter();
|
||||||
|
|
||||||
m.map(|br| {
|
m.iter().map(|br| {
|
||||||
match br.pats[col].node {
|
match br.pats.get(col).node {
|
||||||
ast::PatIdent(_, ref path, Some(inner)) => {
|
ast::PatIdent(_, ref path, Some(inner)) => {
|
||||||
let pats = vec_ng::append(
|
let pats = vec_ng::append(
|
||||||
br.pats.slice(0u, col).to_owned(),
|
Vec::from_slice(br.pats.slice(0u, col)),
|
||||||
vec_ng::append(vec!(inner),
|
vec_ng::append(vec!(inner),
|
||||||
br.pats.slice(col + 1u,
|
br.pats.slice(col + 1u,
|
||||||
br.pats.len())));
|
br.pats.len())).as_slice());
|
||||||
|
|
||||||
let mut res = Match {
|
let mut res = Match {
|
||||||
pats: pats,
|
pats: pats,
|
||||||
|
@ -478,7 +479,7 @@ fn expand_nested_bindings<'r,'b>(
|
||||||
}
|
}
|
||||||
_ => (*br).clone(),
|
_ => (*br).clone(),
|
||||||
}
|
}
|
||||||
})
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
|
fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
|
||||||
|
@ -499,7 +500,7 @@ fn enter_match<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef,
|
val: ValueRef,
|
||||||
e: enter_pat)
|
e: enter_pat)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_match(bcx={}, m={}, col={}, val={})",
|
debug!("enter_match(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -509,14 +510,14 @@ fn enter_match<'r,'b>(
|
||||||
|
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
for br in m.iter() {
|
for br in m.iter() {
|
||||||
match e(br.pats[col]) {
|
match e(*br.pats.get(col)) {
|
||||||
Some(sub) => {
|
Some(sub) => {
|
||||||
let pats =
|
let pats =
|
||||||
vec_ng::append(
|
vec_ng::append(
|
||||||
vec_ng::append(sub, br.pats.slice(0u, col)),
|
vec_ng::append(sub, br.pats.slice(0u, col)),
|
||||||
br.pats.slice(col + 1u, br.pats.len()));
|
br.pats.slice(col + 1u, br.pats.len()));
|
||||||
|
|
||||||
let this = br.pats[col];
|
let this = *br.pats.get(col);
|
||||||
let mut bound_ptrs = br.bound_ptrs.clone();
|
let mut bound_ptrs = br.bound_ptrs.clone();
|
||||||
match this.node {
|
match this.node {
|
||||||
ast::PatIdent(_, ref path, None) => {
|
ast::PatIdent(_, ref path, None) => {
|
||||||
|
@ -549,7 +550,7 @@ fn enter_default<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef,
|
val: ValueRef,
|
||||||
chk: &FailureHandler)
|
chk: &FailureHandler)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_default(bcx={}, m={}, col={}, val={})",
|
debug!("enter_default(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -620,7 +621,7 @@ fn enter_opt<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
variant_size: uint,
|
variant_size: uint,
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})",
|
debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -651,7 +652,7 @@ fn enter_opt<'r,'b>(
|
||||||
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
|
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
|
||||||
// FIXME: Must we clone?
|
// FIXME: Must we clone?
|
||||||
match *subpats {
|
match *subpats {
|
||||||
None => Some(vec::from_elem(variant_size, dummy)),
|
None => Some(Vec::from_elem(variant_size, dummy)),
|
||||||
Some(ref subpats) => {
|
Some(ref subpats) => {
|
||||||
Some((*subpats).iter().map(|x| *x).collect())
|
Some((*subpats).iter().map(|x| *x).collect())
|
||||||
}
|
}
|
||||||
|
@ -761,7 +762,7 @@ fn enter_opt<'r,'b>(
|
||||||
// cause the default match to fire spuriously.
|
// cause the default match to fire spuriously.
|
||||||
match *opt {
|
match *opt {
|
||||||
vec_len(..) => None,
|
vec_len(..) => None,
|
||||||
_ => Some(vec::from_elem(variant_size, dummy))
|
_ => Some(Vec::from_elem(variant_size, dummy))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -777,7 +778,7 @@ fn enter_rec_or_struct<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
fields: &[ast::Ident],
|
fields: &[ast::Ident],
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})",
|
debug!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -800,7 +801,7 @@ fn enter_rec_or_struct<'r,'b>(
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
assert_is_binding_or_wild(bcx, p);
|
assert_is_binding_or_wild(bcx, p);
|
||||||
Some(vec::from_elem(fields.len(), dummy))
|
Some(Vec::from_elem(fields.len(), dummy))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -813,7 +814,7 @@ fn enter_tup<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef,
|
val: ValueRef,
|
||||||
n_elts: uint)
|
n_elts: uint)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_tup(bcx={}, m={}, col={}, val={})",
|
debug!("enter_tup(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -833,7 +834,7 @@ fn enter_tup<'r,'b>(
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
assert_is_binding_or_wild(bcx, p);
|
assert_is_binding_or_wild(bcx, p);
|
||||||
Some(vec::from_elem(n_elts, dummy))
|
Some(Vec::from_elem(n_elts, dummy))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -846,7 +847,7 @@ fn enter_tuple_struct<'r,'b>(
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef,
|
val: ValueRef,
|
||||||
n_elts: uint)
|
n_elts: uint)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_tuple_struct(bcx={}, m={}, col={}, val={})",
|
debug!("enter_tuple_struct(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -862,7 +863,7 @@ fn enter_tuple_struct<'r,'b>(
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
assert_is_binding_or_wild(bcx, p);
|
assert_is_binding_or_wild(bcx, p);
|
||||||
Some(vec::from_elem(n_elts, dummy))
|
Some(Vec::from_elem(n_elts, dummy))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -874,7 +875,7 @@ fn enter_uniq<'r,'b>(
|
||||||
m: &[Match<'r,'b>],
|
m: &[Match<'r,'b>],
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_uniq(bcx={}, m={}, col={}, val={})",
|
debug!("enter_uniq(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -903,7 +904,7 @@ fn enter_region<'r,
|
||||||
m: &[Match<'r,'b>],
|
m: &[Match<'r,'b>],
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> vec!(Match<'r,'b>) {
|
-> Vec<Match<'r,'b>> {
|
||||||
debug!("enter_region(bcx={}, m={}, col={}, val={})",
|
debug!("enter_region(bcx={}, m={}, col={}, val={})",
|
||||||
bcx.to_str(),
|
bcx.to_str(),
|
||||||
m.repr(bcx.tcx()),
|
m.repr(bcx.tcx()),
|
||||||
|
@ -945,15 +946,18 @@ fn get_options(bcx: &Block, m: &[Match], col: uint) -> Vec<Opt> {
|
||||||
// to add, then extend its range. Otherwise, make a new
|
// to add, then extend its range. Otherwise, make a new
|
||||||
// vec_len with a range just covering the new entry.
|
// vec_len with a range just covering the new entry.
|
||||||
Some(&vec_len(len2, vlo2, (start, end)))
|
Some(&vec_len(len2, vlo2, (start, end)))
|
||||||
if len == len2 && vlo == vlo2 =>
|
if len == len2 && vlo == vlo2 => {
|
||||||
set[set.len() - 1] = vec_len(len, vlo, (start, end+1)),
|
let length = set.len();
|
||||||
|
*set.get_mut(length - 1) =
|
||||||
|
vec_len(len, vlo, (start, end+1))
|
||||||
|
}
|
||||||
_ => set.push(vec_len(len, vlo, (i, i)))
|
_ => set.push(vec_len(len, vlo, (i, i)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut found = Vec::new();
|
let mut found = Vec::new();
|
||||||
for (i, br) in m.iter().enumerate() {
|
for (i, br) in m.iter().enumerate() {
|
||||||
let cur = br.pats[col];
|
let cur = *br.pats.get(col);
|
||||||
match cur.node {
|
match cur.node {
|
||||||
ast::PatLit(l) => {
|
ast::PatLit(l) => {
|
||||||
add_to_set(ccx.tcx, &mut found, lit(ExprLit(l)));
|
add_to_set(ccx.tcx, &mut found, lit(ExprLit(l)));
|
||||||
|
@ -1030,7 +1034,7 @@ fn extract_variant_args<'a>(
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> ExtractedBlock<'a> {
|
-> ExtractedBlock<'a> {
|
||||||
let _icx = push_ctxt("match::extract_variant_args");
|
let _icx = push_ctxt("match::extract_variant_args");
|
||||||
let args = vec::from_fn(adt::num_args(repr, disr_val), |i| {
|
let args = Vec::from_fn(adt::num_args(repr, disr_val), |i| {
|
||||||
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
|
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1065,7 +1069,7 @@ fn extract_vec_elems<'a>(
|
||||||
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
|
let (base, len) = vec_datum.get_vec_base_and_len(bcx);
|
||||||
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
|
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
|
||||||
|
|
||||||
let mut elems = vec::from_fn(elem_count, |i| {
|
let mut elems = Vec::from_fn(elem_count, |i| {
|
||||||
match slice {
|
match slice {
|
||||||
None => GEPi(bcx, base, [i]),
|
None => GEPi(bcx, base, [i]),
|
||||||
Some(n) if i < n => GEPi(bcx, base, [i]),
|
Some(n) if i < n => GEPi(bcx, base, [i]),
|
||||||
|
@ -1091,7 +1095,7 @@ fn extract_vec_elems<'a>(
|
||||||
Store(bcx, slice_begin,
|
Store(bcx, slice_begin,
|
||||||
GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
|
GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
|
||||||
Store(bcx, slice_len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
|
Store(bcx, slice_len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
|
||||||
elems[n] = scratch.val;
|
*elems.get_mut(n) = scratch.val;
|
||||||
}
|
}
|
||||||
|
|
||||||
ExtractedBlock { vals: elems, bcx: bcx }
|
ExtractedBlock { vals: elems, bcx: bcx }
|
||||||
|
@ -1111,9 +1115,9 @@ fn collect_record_or_struct_fields<'a>(
|
||||||
let mut fields: Vec<ast::Ident> = Vec::new();
|
let mut fields: Vec<ast::Ident> = Vec::new();
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
for br in m.iter() {
|
for br in m.iter() {
|
||||||
match br.pats[col].node {
|
match br.pats.get(col).node {
|
||||||
ast::PatStruct(_, ref fs, _) => {
|
ast::PatStruct(_, ref fs, _) => {
|
||||||
match ty::get(node_id_type(bcx, br.pats[col].id)).sty {
|
match ty::get(node_id_type(bcx, br.pats.get(col).id)).sty {
|
||||||
ty::ty_struct(..) => {
|
ty::ty_struct(..) => {
|
||||||
extend(&mut fields, fs.as_slice());
|
extend(&mut fields, fs.as_slice());
|
||||||
found = true;
|
found = true;
|
||||||
|
@ -1142,7 +1146,7 @@ fn collect_record_or_struct_fields<'a>(
|
||||||
|
|
||||||
fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool {
|
fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool {
|
||||||
m.iter().any(|br| {
|
m.iter().any(|br| {
|
||||||
let pat_id = br.pats[col].id;
|
let pat_id = br.pats.get(col).id;
|
||||||
let key = root_map_key {id: pat_id, derefs: 0u };
|
let key = root_map_key {id: pat_id, derefs: 0u };
|
||||||
let root_map = bcx.ccx().maps.root_map.borrow();
|
let root_map = bcx.ccx().maps.root_map.borrow();
|
||||||
root_map.get().contains_key(&key)
|
root_map.get().contains_key(&key)
|
||||||
|
@ -1156,7 +1160,7 @@ fn pats_require_rooting(bcx: &Block, m: &[Match], col: uint) -> bool {
|
||||||
macro_rules! any_pat (
|
macro_rules! any_pat (
|
||||||
($m:expr, $pattern:pat) => (
|
($m:expr, $pattern:pat) => (
|
||||||
($m).iter().any(|br| {
|
($m).iter().any(|br| {
|
||||||
match br.pats[col].node {
|
match br.pats.get(col).node {
|
||||||
$pattern => true,
|
$pattern => true,
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
|
@ -1178,7 +1182,7 @@ fn any_tup_pat(m: &[Match], col: uint) -> bool {
|
||||||
|
|
||||||
fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool {
|
fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool {
|
||||||
m.iter().any(|br| {
|
m.iter().any(|br| {
|
||||||
let pat = br.pats[col];
|
let pat = *br.pats.get(col);
|
||||||
match pat.node {
|
match pat.node {
|
||||||
ast::PatEnum(_, Some(_)) => {
|
ast::PatEnum(_, Some(_)) => {
|
||||||
let def_map = bcx.tcx().def_map.borrow();
|
let def_map = bcx.tcx().def_map.borrow();
|
||||||
|
@ -1255,10 +1259,10 @@ fn pick_col(m: &[Match]) -> uint {
|
||||||
_ => 0u
|
_ => 0u
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut scores = vec::from_elem(m[0].pats.len(), 0u);
|
let mut scores = Vec::from_elem(m[0].pats.len(), 0u);
|
||||||
for br in m.iter() {
|
for br in m.iter() {
|
||||||
for (i, p) in br.pats.iter().enumerate() {
|
for (i, p) in br.pats.iter().enumerate() {
|
||||||
scores[i] += score(*p);
|
*scores.get_mut(i) += score(*p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut max_score = 0u;
|
let mut max_score = 0u;
|
||||||
|
@ -1511,7 +1515,12 @@ fn compile_submatch<'r,
|
||||||
|
|
||||||
if has_nested_bindings(m, col) {
|
if has_nested_bindings(m, col) {
|
||||||
let expanded = expand_nested_bindings(bcx, m, col, val);
|
let expanded = expand_nested_bindings(bcx, m, col, val);
|
||||||
compile_submatch_continue(bcx, expanded, vals, chk, col, val)
|
compile_submatch_continue(bcx,
|
||||||
|
expanded.as_slice(),
|
||||||
|
vals,
|
||||||
|
chk,
|
||||||
|
col,
|
||||||
|
val)
|
||||||
} else {
|
} else {
|
||||||
compile_submatch_continue(bcx, m, vals, chk, col, val)
|
compile_submatch_continue(bcx, m, vals, chk, col, val)
|
||||||
}
|
}
|
||||||
|
@ -1529,7 +1538,7 @@ fn compile_submatch_continue<'r,
|
||||||
let tcx = bcx.tcx();
|
let tcx = bcx.tcx();
|
||||||
let dm = tcx.def_map;
|
let dm = tcx.def_map;
|
||||||
|
|
||||||
let vals_left = vec_ng::append(vals.slice(0u, col).to_owned(),
|
let vals_left = vec_ng::append(Vec::from_slice(vals.slice(0u, col)),
|
||||||
vals.slice(col + 1u, vals.len()));
|
vals.slice(col + 1u, vals.len()));
|
||||||
let ccx = bcx.fcx.ccx;
|
let ccx = bcx.fcx.ccx;
|
||||||
let mut pat_id = 0;
|
let mut pat_id = 0;
|
||||||
|
@ -1537,7 +1546,7 @@ fn compile_submatch_continue<'r,
|
||||||
// Find a real id (we're adding placeholder wildcard patterns, but
|
// Find a real id (we're adding placeholder wildcard patterns, but
|
||||||
// each column is guaranteed to have at least one real pattern)
|
// each column is guaranteed to have at least one real pattern)
|
||||||
if pat_id == 0 {
|
if pat_id == 0 {
|
||||||
pat_id = br.pats[col].id;
|
pat_id = br.pats.get(col).id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1556,8 +1565,14 @@ fn compile_submatch_continue<'r,
|
||||||
});
|
});
|
||||||
compile_submatch(
|
compile_submatch(
|
||||||
bcx,
|
bcx,
|
||||||
enter_rec_or_struct(bcx, dm, m, col, *rec_fields, val),
|
enter_rec_or_struct(bcx,
|
||||||
vec_ng::append(rec_vals, vals_left),
|
dm,
|
||||||
|
m,
|
||||||
|
col,
|
||||||
|
rec_fields.as_slice(),
|
||||||
|
val).as_slice(),
|
||||||
|
vec_ng::append(rec_vals,
|
||||||
|
vals_left.as_slice()).as_slice(),
|
||||||
chk);
|
chk);
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
|
@ -1572,11 +1587,19 @@ fn compile_submatch_continue<'r,
|
||||||
ty::ty_tup(ref elts) => elts.len(),
|
ty::ty_tup(ref elts) => elts.len(),
|
||||||
_ => ccx.sess.bug("non-tuple type in tuple pattern")
|
_ => ccx.sess.bug("non-tuple type in tuple pattern")
|
||||||
};
|
};
|
||||||
let tup_vals = vec::from_fn(n_tup_elts, |i| {
|
let tup_vals = Vec::from_fn(n_tup_elts, |i| {
|
||||||
adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
|
adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
|
||||||
});
|
});
|
||||||
compile_submatch(bcx, enter_tup(bcx, dm, m, col, val, n_tup_elts),
|
compile_submatch(bcx,
|
||||||
vec_ng::append(tup_vals, vals_left), chk);
|
enter_tup(bcx,
|
||||||
|
dm,
|
||||||
|
m,
|
||||||
|
col,
|
||||||
|
val,
|
||||||
|
n_tup_elts).as_slice(),
|
||||||
|
vec_ng::append(tup_vals,
|
||||||
|
vals_left.as_slice()).as_slice(),
|
||||||
|
chk);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1594,28 +1617,35 @@ fn compile_submatch_continue<'r,
|
||||||
}
|
}
|
||||||
|
|
||||||
let struct_repr = adt::represent_type(bcx.ccx(), struct_ty);
|
let struct_repr = adt::represent_type(bcx.ccx(), struct_ty);
|
||||||
let llstructvals = vec::from_fn(struct_element_count, |i| {
|
let llstructvals = Vec::from_fn(struct_element_count, |i| {
|
||||||
adt::trans_field_ptr(bcx, struct_repr, val, 0, i)
|
adt::trans_field_ptr(bcx, struct_repr, val, 0, i)
|
||||||
});
|
});
|
||||||
compile_submatch(bcx,
|
compile_submatch(bcx,
|
||||||
enter_tuple_struct(bcx, dm, m, col, val,
|
enter_tuple_struct(bcx, dm, m, col, val,
|
||||||
struct_element_count),
|
struct_element_count).as_slice(),
|
||||||
vec_ng::append(llstructvals, vals_left),
|
vec_ng::append(llstructvals,
|
||||||
|
vals_left.as_slice()).as_slice(),
|
||||||
chk);
|
chk);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if any_uniq_pat(m, col) {
|
if any_uniq_pat(m, col) {
|
||||||
let llbox = Load(bcx, val);
|
let llbox = Load(bcx, val);
|
||||||
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
|
compile_submatch(bcx,
|
||||||
vec_ng::append(vec!(llbox), vals_left), chk);
|
enter_uniq(bcx, dm, m, col, val).as_slice(),
|
||||||
|
vec_ng::append(vec!(llbox),
|
||||||
|
vals_left.as_slice()).as_slice(),
|
||||||
|
chk);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if any_region_pat(m, col) {
|
if any_region_pat(m, col) {
|
||||||
let loaded_val = Load(bcx, val);
|
let loaded_val = Load(bcx, val);
|
||||||
compile_submatch(bcx, enter_region(bcx, dm, m, col, val),
|
compile_submatch(bcx,
|
||||||
vec_ng::append(vec!(loaded_val), vals_left), chk);
|
enter_region(bcx, dm, m, col, val).as_slice(),
|
||||||
|
vec_ng::append(vec!(loaded_val),
|
||||||
|
vals_left.as_slice()).as_slice(),
|
||||||
|
chk);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1626,7 +1656,7 @@ fn compile_submatch_continue<'r,
|
||||||
let mut test_val = val;
|
let mut test_val = val;
|
||||||
debug!("test_val={}", bcx.val_to_str(test_val));
|
debug!("test_val={}", bcx.val_to_str(test_val));
|
||||||
if opts.len() > 0u {
|
if opts.len() > 0u {
|
||||||
match opts[0] {
|
match *opts.get(0) {
|
||||||
var(_, repr) => {
|
var(_, repr) => {
|
||||||
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
|
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
|
||||||
kind = the_kind;
|
kind = the_kind;
|
||||||
|
@ -1795,12 +1825,20 @@ fn compile_submatch_continue<'r,
|
||||||
lit(_) | range(_, _) => ()
|
lit(_) | range(_, _) => ()
|
||||||
}
|
}
|
||||||
let opt_ms = enter_opt(opt_cx, m, opt, col, size, val);
|
let opt_ms = enter_opt(opt_cx, m, opt, col, size, val);
|
||||||
let opt_vals = vec_ng::append(unpacked, vals_left);
|
let opt_vals = vec_ng::append(unpacked, vals_left.as_slice());
|
||||||
|
|
||||||
match branch_chk {
|
match branch_chk {
|
||||||
None => compile_submatch(opt_cx, opt_ms, opt_vals, chk),
|
None => {
|
||||||
|
compile_submatch(opt_cx,
|
||||||
|
opt_ms.as_slice(),
|
||||||
|
opt_vals.as_slice(),
|
||||||
|
chk)
|
||||||
|
}
|
||||||
Some(branch_chk) => {
|
Some(branch_chk) => {
|
||||||
compile_submatch(opt_cx, opt_ms, opt_vals, &branch_chk)
|
compile_submatch(opt_cx,
|
||||||
|
opt_ms.as_slice(),
|
||||||
|
opt_vals.as_slice(),
|
||||||
|
&branch_chk)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1811,7 +1849,10 @@ fn compile_submatch_continue<'r,
|
||||||
Br(bcx, else_cx.llbb);
|
Br(bcx, else_cx.llbb);
|
||||||
}
|
}
|
||||||
if kind != single {
|
if kind != single {
|
||||||
compile_submatch(else_cx, defaults, vals_left, chk);
|
compile_submatch(else_cx,
|
||||||
|
defaults.as_slice(),
|
||||||
|
vals_left.as_slice(),
|
||||||
|
chk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1921,7 +1962,7 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let lldiscr = discr_datum.val;
|
let lldiscr = discr_datum.val;
|
||||||
compile_submatch(bcx, matches, [lldiscr], &chk);
|
compile_submatch(bcx, matches.as_slice(), [lldiscr], &chk);
|
||||||
|
|
||||||
let mut arm_cxs = Vec::new();
|
let mut arm_cxs = Vec::new();
|
||||||
for arm_data in arm_datas.iter() {
|
for arm_data in arm_datas.iter() {
|
||||||
|
@ -1944,7 +1985,7 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
|
||||||
arm_cxs.push(bcx);
|
arm_cxs.push(bcx);
|
||||||
}
|
}
|
||||||
|
|
||||||
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs);
|
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.as_slice());
|
||||||
return bcx;
|
return bcx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,17 +53,18 @@ use middle::trans::_match;
|
||||||
use middle::trans::build::*;
|
use middle::trans::build::*;
|
||||||
use middle::trans::common::*;
|
use middle::trans::common::*;
|
||||||
use middle::trans::machine;
|
use middle::trans::machine;
|
||||||
|
use middle::trans::type_::Type;
|
||||||
use middle::trans::type_of;
|
use middle::trans::type_of;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::ty::Disr;
|
use middle::ty::Disr;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::abi::{X86, X86_64, Arm, Mips};
|
use syntax::abi::{X86, X86_64, Arm, Mips};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::attr::IntType;
|
use syntax::attr::IntType;
|
||||||
use util::ppaux::ty_to_str;
|
use util::ppaux::ty_to_str;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
|
||||||
|
|
||||||
type Hint = attr::ReprAttr;
|
type Hint = attr::ReprAttr;
|
||||||
|
|
||||||
|
|
||||||
|
@ -136,7 +137,7 @@ pub fn represent_type(cx: &CrateContext, t: ty::t) -> @Repr {
|
||||||
fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||||
match ty::get(t).sty {
|
match ty::get(t).sty {
|
||||||
ty::ty_tup(ref elems) => {
|
ty::ty_tup(ref elems) => {
|
||||||
return Univariant(mk_struct(cx, *elems, false), false)
|
return Univariant(mk_struct(cx, elems.as_slice(), false), false)
|
||||||
}
|
}
|
||||||
ty::ty_struct(def_id, ref substs) => {
|
ty::ty_struct(def_id, ref substs) => {
|
||||||
let fields = ty::lookup_struct_fields(cx.tcx, def_id);
|
let fields = ty::lookup_struct_fields(cx.tcx, def_id);
|
||||||
|
@ -147,7 +148,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||||
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
|
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
|
||||||
if dtor { ftys.push(ty::mk_bool()); }
|
if dtor { ftys.push(ty::mk_bool()); }
|
||||||
|
|
||||||
return Univariant(mk_struct(cx, ftys, packed), dtor)
|
return Univariant(mk_struct(cx, ftys.as_slice(), packed), dtor)
|
||||||
}
|
}
|
||||||
ty::ty_enum(def_id, ref substs) => {
|
ty::ty_enum(def_id, ref substs) => {
|
||||||
let cases = get_cases(cx.tcx, def_id, substs);
|
let cases = get_cases(cx.tcx, def_id, substs);
|
||||||
|
@ -185,23 +186,29 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||||
// Equivalent to a struct/tuple/newtype.
|
// Equivalent to a struct/tuple/newtype.
|
||||||
// (Typechecking will reject discriminant-sizing attrs.)
|
// (Typechecking will reject discriminant-sizing attrs.)
|
||||||
assert_eq!(hint, attr::ReprAny);
|
assert_eq!(hint, attr::ReprAny);
|
||||||
return Univariant(mk_struct(cx, cases[0].tys, false), false)
|
return Univariant(mk_struct(cx,
|
||||||
|
cases.get(0).tys.as_slice(),
|
||||||
|
false),
|
||||||
|
false)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cases.len() == 2 && hint == attr::ReprAny {
|
if cases.len() == 2 && hint == attr::ReprAny {
|
||||||
// Nullable pointer optimization
|
// Nullable pointer optimization
|
||||||
let mut discr = 0;
|
let mut discr = 0;
|
||||||
while discr < 2 {
|
while discr < 2 {
|
||||||
if cases[1 - discr].is_zerolen(cx) {
|
if cases.get(1 - discr).is_zerolen(cx) {
|
||||||
match cases[discr].find_ptr() {
|
match cases.get(discr).find_ptr() {
|
||||||
Some(ptrfield) => {
|
Some(ptrfield) => {
|
||||||
return NullablePointer {
|
return NullablePointer {
|
||||||
nndiscr: discr,
|
nndiscr: discr as u64,
|
||||||
nonnull: mk_struct(cx,
|
nonnull: mk_struct(cx,
|
||||||
cases[discr].tys,
|
cases.get(discr)
|
||||||
|
.tys
|
||||||
|
.as_slice(),
|
||||||
false),
|
false),
|
||||||
ptrfield: ptrfield,
|
ptrfield: ptrfield,
|
||||||
nullfields: cases[1 - discr].tys.clone()
|
nullfields: cases.get(1 - discr).tys
|
||||||
|
.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => { }
|
None => { }
|
||||||
|
@ -216,8 +223,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||||
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
|
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
|
||||||
slo: 0, shi: (cases.len() - 1) as i64 };
|
slo: 0, shi: (cases.len() - 1) as i64 };
|
||||||
let ity = range_to_inttype(cx, hint, &bounds);
|
let ity = range_to_inttype(cx, hint, &bounds);
|
||||||
|
return General(ity, cases.map(|c| {
|
||||||
let discr = vec!(ty_of_inttype(ity));
|
let discr = vec!(ty_of_inttype(ity));
|
||||||
return General(ity, cases.map(|c| mk_struct(cx, discr + c.tys, false)))
|
mk_struct(cx,
|
||||||
|
vec_ng::append(discr, c.tys.as_slice()).as_slice(),
|
||||||
|
false)
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
_ => cx.sess.bug("adt::represent_type called on non-ADT type")
|
_ => cx.sess.bug("adt::represent_type called on non-ADT type")
|
||||||
}
|
}
|
||||||
|
@ -256,7 +267,7 @@ pub fn is_ffi_safe(tcx: ty::ctxt, def_id: ast::DefId) -> bool {
|
||||||
struct Case { discr: Disr, tys: Vec<ty::t> }
|
struct Case { discr: Disr, tys: Vec<ty::t> }
|
||||||
impl Case {
|
impl Case {
|
||||||
fn is_zerolen(&self, cx: &CrateContext) -> bool {
|
fn is_zerolen(&self, cx: &CrateContext) -> bool {
|
||||||
mk_struct(cx, self.tys, false).size == 0
|
mk_struct(cx, self.tys.as_slice(), false).size == 0
|
||||||
}
|
}
|
||||||
fn find_ptr(&self) -> Option<uint> {
|
fn find_ptr(&self) -> Option<uint> {
|
||||||
self.tys.iter().position(|&ty| mono_data_classify(ty) == MonoNonNull)
|
self.tys.iter().position(|&ty| mono_data_classify(ty) == MonoNonNull)
|
||||||
|
@ -280,7 +291,7 @@ fn mk_struct(cx: &CrateContext, tys: &[ty::t], packed: bool) -> Struct {
|
||||||
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
|
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
|
||||||
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
|
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
|
||||||
packed: packed,
|
packed: packed,
|
||||||
fields: tys.to_owned(),
|
fields: Vec::from_slice(tys),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -393,7 +404,8 @@ pub fn finish_type_of(cx: &CrateContext, r: &Repr, llty: &mut Type) {
|
||||||
match *r {
|
match *r {
|
||||||
CEnum(..) | General(..) => { }
|
CEnum(..) | General(..) => { }
|
||||||
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } =>
|
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } =>
|
||||||
llty.set_struct_body(struct_llfields(cx, st, false), st.packed)
|
llty.set_struct_body(struct_llfields(cx, st, false).as_slice(),
|
||||||
|
st.packed)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -402,7 +414,10 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
|
||||||
CEnum(ity, _, _) => ll_inttype(cx, ity),
|
CEnum(ity, _, _) => ll_inttype(cx, ity),
|
||||||
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } => {
|
Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } => {
|
||||||
match name {
|
match name {
|
||||||
None => Type::struct_(struct_llfields(cx, st, sizing), st.packed),
|
None => {
|
||||||
|
Type::struct_(struct_llfields(cx, st, sizing).as_slice(),
|
||||||
|
st.packed)
|
||||||
|
}
|
||||||
Some(name) => { assert_eq!(sizing, false); Type::named_struct(name) }
|
Some(name) => { assert_eq!(sizing, false); Type::named_struct(name) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -441,10 +456,10 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
|
||||||
Type::array(&discr_ty, align / discr_size - 1),
|
Type::array(&discr_ty, align / discr_size - 1),
|
||||||
pad_ty);
|
pad_ty);
|
||||||
match name {
|
match name {
|
||||||
None => Type::struct_(fields, false),
|
None => Type::struct_(fields.as_slice(), false),
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
let mut llty = Type::named_struct(name);
|
let mut llty = Type::named_struct(name);
|
||||||
llty.set_struct_body(fields, false);
|
llty.set_struct_body(fields.as_slice(), false);
|
||||||
llty
|
llty
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -517,7 +532,7 @@ fn nullable_bitdiscr(bcx: &Block, nonnull: &Struct, nndiscr: Disr, ptrfield: uin
|
||||||
scrutinee: ValueRef) -> ValueRef {
|
scrutinee: ValueRef) -> ValueRef {
|
||||||
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
|
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
|
||||||
let llptr = Load(bcx, GEPi(bcx, scrutinee, [0, ptrfield]));
|
let llptr = Load(bcx, GEPi(bcx, scrutinee, [0, ptrfield]));
|
||||||
let llptrty = type_of::type_of(bcx.ccx(), nonnull.fields[ptrfield]);
|
let llptrty = type_of::type_of(bcx.ccx(), *nonnull.fields.get(ptrfield));
|
||||||
ICmp(bcx, cmp, llptr, C_null(llptrty))
|
ICmp(bcx, cmp, llptr, C_null(llptrty))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -598,7 +613,8 @@ pub fn trans_start_init(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr) {
|
||||||
NullablePointer{ nonnull: ref nonnull, nndiscr, ptrfield, .. } => {
|
NullablePointer{ nonnull: ref nonnull, nndiscr, ptrfield, .. } => {
|
||||||
if discr != nndiscr {
|
if discr != nndiscr {
|
||||||
let llptrptr = GEPi(bcx, val, [0, ptrfield]);
|
let llptrptr = GEPi(bcx, val, [0, ptrfield]);
|
||||||
let llptrty = type_of::type_of(bcx.ccx(), nonnull.fields[ptrfield]);
|
let llptrty = type_of::type_of(bcx.ccx(),
|
||||||
|
*nonnull.fields.get(ptrfield));
|
||||||
Store(bcx, C_null(llptrty), llptrptr)
|
Store(bcx, C_null(llptrty), llptrptr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,7 +639,7 @@ pub fn num_args(r: &Repr, discr: Disr) -> uint {
|
||||||
assert_eq!(discr, 0);
|
assert_eq!(discr, 0);
|
||||||
st.fields.len() - (if dtor { 1 } else { 0 })
|
st.fields.len() - (if dtor { 1 } else { 0 })
|
||||||
}
|
}
|
||||||
General(_, ref cases) => cases[discr].fields.len() - 1,
|
General(_, ref cases) => cases.get(discr as uint).fields.len() - 1,
|
||||||
NullablePointer{ nonnull: ref nonnull, nndiscr,
|
NullablePointer{ nonnull: ref nonnull, nndiscr,
|
||||||
nullfields: ref nullfields, .. } => {
|
nullfields: ref nullfields, .. } => {
|
||||||
if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() }
|
if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() }
|
||||||
|
@ -638,11 +654,11 @@ pub fn deref_ty(ccx: &CrateContext, r: &Repr) -> ty::t {
|
||||||
ccx.sess.bug("deref of c-like enum")
|
ccx.sess.bug("deref of c-like enum")
|
||||||
}
|
}
|
||||||
Univariant(ref st, _) => {
|
Univariant(ref st, _) => {
|
||||||
st.fields[0]
|
*st.fields.get(0)
|
||||||
}
|
}
|
||||||
General(_, ref cases) => {
|
General(_, ref cases) => {
|
||||||
assert!(cases.len() == 1);
|
assert!(cases.len() == 1);
|
||||||
cases[0].fields[0]
|
*cases.get(0).fields.get(0)
|
||||||
}
|
}
|
||||||
NullablePointer{ .. } => {
|
NullablePointer{ .. } => {
|
||||||
ccx.sess.bug("deref of nullable ptr")
|
ccx.sess.bug("deref of nullable ptr")
|
||||||
|
@ -665,7 +681,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
|
||||||
struct_field_ptr(bcx, st, val, ix, false)
|
struct_field_ptr(bcx, st, val, ix, false)
|
||||||
}
|
}
|
||||||
General(_, ref cases) => {
|
General(_, ref cases) => {
|
||||||
struct_field_ptr(bcx, &cases[discr], val, ix + 1, true)
|
struct_field_ptr(bcx, cases.get(discr as uint), val, ix + 1, true)
|
||||||
}
|
}
|
||||||
NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields,
|
NullablePointer{ nonnull: ref nonnull, nullfields: ref nullfields,
|
||||||
nndiscr, .. } => {
|
nndiscr, .. } => {
|
||||||
|
@ -674,7 +690,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
|
||||||
} else {
|
} else {
|
||||||
// The unit-like case might have a nonzero number of unit-like fields.
|
// The unit-like case might have a nonzero number of unit-like fields.
|
||||||
// (e.g., Result or Either with () as one side.)
|
// (e.g., Result or Either with () as one side.)
|
||||||
let ty = type_of::type_of(bcx.ccx(), nullfields[ix]);
|
let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
|
||||||
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
|
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
|
||||||
// The contents of memory at this pointer can't matter, but use
|
// The contents of memory at this pointer can't matter, but use
|
||||||
// the value that's "reasonable" in case of pointer comparison.
|
// the value that's "reasonable" in case of pointer comparison.
|
||||||
|
@ -690,7 +706,7 @@ fn struct_field_ptr(bcx: &Block, st: &Struct, val: ValueRef, ix: uint,
|
||||||
|
|
||||||
let val = if needs_cast {
|
let val = if needs_cast {
|
||||||
let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
|
let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
|
||||||
let real_ty = Type::struct_(fields, st.packed);
|
let real_ty = Type::struct_(fields.as_slice(), st.packed);
|
||||||
PointerCast(bcx, val, real_ty.ptr_to())
|
PointerCast(bcx, val, real_ty.ptr_to())
|
||||||
} else {
|
} else {
|
||||||
val
|
val
|
||||||
|
@ -737,27 +753,40 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
|
||||||
C_integral(ll_inttype(ccx, ity), discr as u64, true)
|
C_integral(ll_inttype(ccx, ity), discr as u64, true)
|
||||||
}
|
}
|
||||||
General(ity, ref cases) => {
|
General(ity, ref cases) => {
|
||||||
let case = &cases[discr];
|
let case = cases.get(discr as uint);
|
||||||
let max_sz = cases.iter().map(|x| x.size).max().unwrap();
|
let max_sz = cases.iter().map(|x| x.size).max().unwrap();
|
||||||
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
|
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
|
||||||
let contents = build_const_struct(ccx, case, vec!(lldiscr) + vals);
|
let contents = build_const_struct(ccx,
|
||||||
C_struct(contents + &[padding(max_sz - case.size)], false)
|
case,
|
||||||
|
vec_ng::append(
|
||||||
|
vec!(lldiscr),
|
||||||
|
vals).as_slice());
|
||||||
|
C_struct(vec_ng::append(
|
||||||
|
contents,
|
||||||
|
&[padding(max_sz - case.size)]).as_slice(),
|
||||||
|
false)
|
||||||
}
|
}
|
||||||
Univariant(ref st, _dro) => {
|
Univariant(ref st, _dro) => {
|
||||||
assert!(discr == 0);
|
assert!(discr == 0);
|
||||||
let contents = build_const_struct(ccx, st, vals);
|
let contents = build_const_struct(ccx, st, vals);
|
||||||
C_struct(contents, st.packed)
|
C_struct(contents.as_slice(), st.packed)
|
||||||
}
|
}
|
||||||
NullablePointer{ nonnull: ref nonnull, nndiscr, .. } => {
|
NullablePointer{ nonnull: ref nonnull, nndiscr, .. } => {
|
||||||
if discr == nndiscr {
|
if discr == nndiscr {
|
||||||
C_struct(build_const_struct(ccx, nonnull, vals), false)
|
C_struct(build_const_struct(ccx,
|
||||||
|
nonnull,
|
||||||
|
vals.as_slice()).as_slice(),
|
||||||
|
false)
|
||||||
} else {
|
} else {
|
||||||
let vals = nonnull.fields.map(|&ty| {
|
let vals = nonnull.fields.map(|&ty| {
|
||||||
// Always use null even if it's not the `ptrfield`th
|
// Always use null even if it's not the `ptrfield`th
|
||||||
// field; see #8506.
|
// field; see #8506.
|
||||||
C_null(type_of::sizing_type_of(ccx, ty))
|
C_null(type_of::sizing_type_of(ccx, ty))
|
||||||
}).collect::<Vec<ValueRef> >();
|
}).move_iter().collect::<Vec<ValueRef> >();
|
||||||
C_struct(build_const_struct(ccx, nonnull, vals), false)
|
C_struct(build_const_struct(ccx,
|
||||||
|
nonnull,
|
||||||
|
vals.as_slice()).as_slice(),
|
||||||
|
false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,6 @@
|
||||||
# Translation of inline assembly.
|
# Translation of inline assembly.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::c_str::ToCStr;
|
|
||||||
|
|
||||||
use lib;
|
use lib;
|
||||||
use middle::trans::build::*;
|
use middle::trans::build::*;
|
||||||
use middle::trans::callee;
|
use middle::trans::callee;
|
||||||
|
@ -22,9 +20,10 @@ use middle::trans::cleanup;
|
||||||
use middle::trans::cleanup::CleanupMethods;
|
use middle::trans::cleanup::CleanupMethods;
|
||||||
use middle::trans::expr;
|
use middle::trans::expr;
|
||||||
use middle::trans::type_of;
|
use middle::trans::type_of;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
|
use std::c_str::ToCStr;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
// Take an inline assembly expression and splat it out via LLVM
|
// Take an inline assembly expression and splat it out via LLVM
|
||||||
|
@ -88,9 +87,9 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
|
||||||
let output_type = if num_outputs == 0 {
|
let output_type = if num_outputs == 0 {
|
||||||
Type::void()
|
Type::void()
|
||||||
} else if num_outputs == 1 {
|
} else if num_outputs == 1 {
|
||||||
output_types[0]
|
*output_types.get(0)
|
||||||
} else {
|
} else {
|
||||||
Type::struct_(output_types, false)
|
Type::struct_(output_types.as_slice(), false)
|
||||||
};
|
};
|
||||||
|
|
||||||
let dialect = match ia.dialect {
|
let dialect = match ia.dialect {
|
||||||
|
|
|
@ -73,11 +73,12 @@ use util::sha2::Sha256;
|
||||||
use util::nodemap::NodeMap;
|
use util::nodemap::NodeMap;
|
||||||
|
|
||||||
use arena::TypedArena;
|
use arena::TypedArena;
|
||||||
|
use collections::HashMap;
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use collections::HashMap;
|
|
||||||
use std::libc::c_uint;
|
use std::libc::c_uint;
|
||||||
use std::local_data;
|
use std::local_data;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
|
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
|
||||||
use syntax::ast_map::PathName;
|
use syntax::ast_map::PathName;
|
||||||
use syntax::ast_util::{local_def, is_local};
|
use syntax::ast_util::{local_def, is_local};
|
||||||
|
@ -99,7 +100,7 @@ local_data_key!(task_local_insn_key: Vec<&'static str> )
|
||||||
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
|
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
|
||||||
local_data::get(task_local_insn_key, |c| {
|
local_data::get(task_local_insn_key, |c| {
|
||||||
match c {
|
match c {
|
||||||
Some(ctx) => blk(*ctx),
|
Some(ctx) => blk(ctx.as_slice()),
|
||||||
None => ()
|
None => ()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -543,7 +544,7 @@ pub fn get_res_dtor(ccx: @CrateContext,
|
||||||
let tsubsts = ty::substs {
|
let tsubsts = ty::substs {
|
||||||
regions: ty::ErasedRegions,
|
regions: ty::ErasedRegions,
|
||||||
self_ty: None,
|
self_ty: None,
|
||||||
tps: substs.to_owned()
|
tps: Vec::from_slice(substs),
|
||||||
};
|
};
|
||||||
|
|
||||||
let vtables = typeck::check::vtable::trans_resolve_method(ccx.tcx, did.node, &tsubsts);
|
let vtables = typeck::check::vtable::trans_resolve_method(ccx.tcx, did.node, &tsubsts);
|
||||||
|
@ -752,8 +753,8 @@ pub fn iter_structural_ty<'r,
|
||||||
|
|
||||||
match adt::trans_switch(cx, repr, av) {
|
match adt::trans_switch(cx, repr, av) {
|
||||||
(_match::single, None) => {
|
(_match::single, None) => {
|
||||||
cx = iter_variant(cx, repr, av, variants[0],
|
cx = iter_variant(cx, repr, av, *variants.get(0),
|
||||||
substs.tps, f);
|
substs.tps.as_slice(), f);
|
||||||
}
|
}
|
||||||
(_match::switch, Some(lldiscrim_a)) => {
|
(_match::switch, Some(lldiscrim_a)) => {
|
||||||
cx = f(cx, lldiscrim_a, ty::mk_int());
|
cx = f(cx, lldiscrim_a, ty::mk_int());
|
||||||
|
@ -775,8 +776,12 @@ pub fn iter_structural_ty<'r,
|
||||||
in iter_structural_ty")
|
in iter_structural_ty")
|
||||||
}
|
}
|
||||||
let variant_cx =
|
let variant_cx =
|
||||||
iter_variant(variant_cx, repr, av, *variant,
|
iter_variant(variant_cx,
|
||||||
substs.tps, |x,y,z| f(x,y,z));
|
repr,
|
||||||
|
av,
|
||||||
|
*variant,
|
||||||
|
substs.tps.as_slice(),
|
||||||
|
|x,y,z| f(x,y,z));
|
||||||
Br(variant_cx, next_cx.llbb);
|
Br(variant_cx, next_cx.llbb);
|
||||||
}
|
}
|
||||||
cx = next_cx;
|
cx = next_cx;
|
||||||
|
@ -876,7 +881,11 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
|
||||||
match fn_ty.abis.for_target(ccx.sess.targ_cfg.os,
|
match fn_ty.abis.for_target(ccx.sess.targ_cfg.os,
|
||||||
ccx.sess.targ_cfg.arch) {
|
ccx.sess.targ_cfg.arch) {
|
||||||
Some(Rust) | Some(RustIntrinsic) => {
|
Some(Rust) | Some(RustIntrinsic) => {
|
||||||
get_extern_rust_fn(ccx, fn_ty.sig.inputs, fn_ty.sig.output, name, did)
|
get_extern_rust_fn(ccx,
|
||||||
|
fn_ty.sig.inputs.as_slice(),
|
||||||
|
fn_ty.sig.output,
|
||||||
|
name,
|
||||||
|
did)
|
||||||
}
|
}
|
||||||
Some(..) | None => {
|
Some(..) | None => {
|
||||||
let c = foreign::llvm_calling_convention(ccx, fn_ty.abis);
|
let c = foreign::llvm_calling_convention(ccx, fn_ty.abis);
|
||||||
|
@ -889,7 +898,11 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::ty_closure(ref f) => {
|
ty::ty_closure(ref f) => {
|
||||||
get_extern_rust_fn(ccx, f.sig.inputs, f.sig.output, name, did)
|
get_extern_rust_fn(ccx,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output,
|
||||||
|
name,
|
||||||
|
did)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let llty = type_of(ccx, t);
|
let llty = type_of(ccx, t);
|
||||||
|
@ -935,7 +948,7 @@ pub fn invoke<'a>(
|
||||||
|
|
||||||
let llresult = Invoke(bcx,
|
let llresult = Invoke(bcx,
|
||||||
llfn,
|
llfn,
|
||||||
llargs,
|
llargs.as_slice(),
|
||||||
normal_bcx.llbb,
|
normal_bcx.llbb,
|
||||||
landing_pad,
|
landing_pad,
|
||||||
attributes);
|
attributes);
|
||||||
|
@ -951,7 +964,7 @@ pub fn invoke<'a>(
|
||||||
None => debuginfo::clear_source_location(bcx.fcx)
|
None => debuginfo::clear_source_location(bcx.fcx)
|
||||||
};
|
};
|
||||||
|
|
||||||
let llresult = Call(bcx, llfn, llargs, attributes);
|
let llresult = Call(bcx, llfn, llargs.as_slice(), attributes);
|
||||||
return (llresult, bcx);
|
return (llresult, bcx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1231,7 +1244,10 @@ pub fn new_fn_ctxt<'a>(ccx: @CrateContext,
|
||||||
let substd_output_type = match param_substs {
|
let substd_output_type = match param_substs {
|
||||||
None => output_type,
|
None => output_type,
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
ty::subst_tps(ccx.tcx, substs.tys, substs.self_ty, output_type)
|
ty::subst_tps(ccx.tcx,
|
||||||
|
substs.tys.as_slice(),
|
||||||
|
substs.self_ty,
|
||||||
|
output_type)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let uses_outptr = type_of::return_uses_outptr(ccx, substd_output_type);
|
let uses_outptr = type_of::return_uses_outptr(ccx, substd_output_type);
|
||||||
|
@ -1289,7 +1305,7 @@ pub fn init_function<'a>(
|
||||||
None => output_type,
|
None => output_type,
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
ty::subst_tps(fcx.ccx.tcx,
|
ty::subst_tps(fcx.ccx.tcx,
|
||||||
substs.tys,
|
substs.tys.as_slice(),
|
||||||
substs.self_ty,
|
substs.self_ty,
|
||||||
output_type)
|
output_type)
|
||||||
}
|
}
|
||||||
|
@ -1472,7 +1488,7 @@ pub fn trans_closure<'a>(ccx: @CrateContext,
|
||||||
|
|
||||||
// Set up arguments to the function.
|
// Set up arguments to the function.
|
||||||
let arg_tys = ty::ty_fn_args(node_id_type(bcx, id));
|
let arg_tys = ty::ty_fn_args(node_id_type(bcx, id));
|
||||||
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys);
|
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice());
|
||||||
|
|
||||||
bcx = copy_args_to_allocas(&fcx,
|
bcx = copy_args_to_allocas(&fcx,
|
||||||
arg_scope,
|
arg_scope,
|
||||||
|
@ -1583,7 +1599,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
|
||||||
let no_substs: &[ty::t] = [];
|
let no_substs: &[ty::t] = [];
|
||||||
let ty_param_substs = match param_substs {
|
let ty_param_substs = match param_substs {
|
||||||
Some(ref substs) => {
|
Some(ref substs) => {
|
||||||
let v: &[ty::t] = substs.tys;
|
let v: &[ty::t] = substs.tys.as_slice();
|
||||||
v
|
v
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
@ -1612,7 +1628,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: @CrateContext,
|
||||||
|
|
||||||
let arg_tys = ty::ty_fn_args(ctor_ty);
|
let arg_tys = ty::ty_fn_args(ctor_ty);
|
||||||
|
|
||||||
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys);
|
let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice());
|
||||||
|
|
||||||
let bcx = fcx.entry_bcx.get().unwrap();
|
let bcx = fcx.entry_bcx.get().unwrap();
|
||||||
|
|
||||||
|
@ -1636,7 +1652,7 @@ pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::EnumDef,
|
||||||
id: ast::NodeId, vi: @Vec<@ty::VariantInfo> ,
|
id: ast::NodeId, vi: @Vec<@ty::VariantInfo> ,
|
||||||
i: &mut uint) {
|
i: &mut uint) {
|
||||||
for &variant in enum_definition.variants.iter() {
|
for &variant in enum_definition.variants.iter() {
|
||||||
let disr_val = vi[*i].disr_val;
|
let disr_val = vi.get(*i).disr_val;
|
||||||
*i += 1;
|
*i += 1;
|
||||||
|
|
||||||
match variant.node.kind {
|
match variant.node.kind {
|
||||||
|
@ -1801,7 +1817,11 @@ fn register_fn(ccx: @CrateContext,
|
||||||
_ => fail!("expected bare rust fn or an intrinsic")
|
_ => fail!("expected bare rust fn or an intrinsic")
|
||||||
};
|
};
|
||||||
|
|
||||||
let llfn = decl_rust_fn(ccx, false, f.sig.inputs, f.sig.output, sym);
|
let llfn = decl_rust_fn(ccx,
|
||||||
|
false,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output,
|
||||||
|
sym);
|
||||||
finish_register_fn(ccx, sp, sym, node_id, llfn);
|
finish_register_fn(ccx, sp, sym, node_id, llfn);
|
||||||
llfn
|
llfn
|
||||||
}
|
}
|
||||||
|
@ -1893,8 +1913,10 @@ pub fn create_entry_wrapper(ccx: @CrateContext,
|
||||||
(rust_main, args)
|
(rust_main, args)
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = llvm::LLVMBuildCall(bld, start_fn,
|
let result = llvm::LLVMBuildCall(bld,
|
||||||
args.as_ptr(), args.len() as c_uint,
|
start_fn,
|
||||||
|
args.as_ptr(),
|
||||||
|
args.len() as c_uint,
|
||||||
noname());
|
noname());
|
||||||
|
|
||||||
llvm::LLVMBuildRet(bld, result);
|
llvm::LLVMBuildRet(bld, result);
|
||||||
|
@ -2476,7 +2498,7 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
|
||||||
elts.push(elt);
|
elts.push(elt);
|
||||||
}
|
}
|
||||||
unsafe {
|
unsafe {
|
||||||
llvm::LLVMSetInitializer(map, C_array(elttype, elts));
|
llvm::LLVMSetInitializer(map, C_array(elttype, elts.as_slice()));
|
||||||
}
|
}
|
||||||
return (map, keys.len())
|
return (map, keys.len())
|
||||||
}
|
}
|
||||||
|
@ -2564,7 +2586,8 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
|
||||||
});
|
});
|
||||||
lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage);
|
lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage);
|
||||||
|
|
||||||
llvm::LLVMSetInitializer(vec_elements, C_array(ccx.int_type, subcrates));
|
llvm::LLVMSetInitializer(vec_elements,
|
||||||
|
C_array(ccx.int_type, subcrates.as_slice()));
|
||||||
let (mod_map, mod_count) = create_module_map(ccx);
|
let (mod_map, mod_count) = create_module_map(ccx);
|
||||||
|
|
||||||
llvm::LLVMSetInitializer(map, C_struct(
|
llvm::LLVMSetInitializer(map, C_struct(
|
||||||
|
@ -2613,7 +2636,7 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
|
||||||
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
|
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
|
||||||
let metadata = encoder::encode_metadata(encode_parms, krate);
|
let metadata = encoder::encode_metadata(encode_parms, krate);
|
||||||
let compressed = encoder::metadata_encoding_version +
|
let compressed = encoder::metadata_encoding_version +
|
||||||
flate::deflate_bytes(metadata).as_slice();
|
flate::deflate_bytes(metadata.as_slice()).as_slice();
|
||||||
let llmeta = C_bytes(compressed);
|
let llmeta = C_bytes(compressed);
|
||||||
let llconst = C_struct([llmeta], false);
|
let llconst = C_struct([llmeta], false);
|
||||||
let name = format!("rust_metadata_{}_{}_{}", cx.link_meta.crateid.name,
|
let name = format!("rust_metadata_{}_{}_{}", cx.link_meta.crateid.name,
|
||||||
|
@ -2744,12 +2767,12 @@ pub fn trans_crate(sess: session::Session,
|
||||||
let link_meta = ccx.link_meta.clone();
|
let link_meta = ccx.link_meta.clone();
|
||||||
let llmod = ccx.llmod;
|
let llmod = ccx.llmod;
|
||||||
|
|
||||||
let mut reachable = {
|
let mut reachable: Vec<~str> = {
|
||||||
let reachable_map = ccx.reachable.borrow();
|
let reachable_map = ccx.reachable.borrow();
|
||||||
reachable_map.get().iter().filter_map(|id| {
|
reachable_map.get().iter().filter_map(|id| {
|
||||||
let item_symbols = ccx.item_symbols.borrow();
|
let item_symbols = ccx.item_symbols.borrow();
|
||||||
item_symbols.get().find(id).map(|s| s.to_owned())
|
item_symbols.get().find(id).map(|s| s.to_owned())
|
||||||
}).to_owned_vec()
|
}).collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Make sure that some other crucial symbols are not eliminated from the
|
// Make sure that some other crucial symbols are not eliminated from the
|
||||||
|
|
|
@ -17,8 +17,10 @@ use middle::trans::base;
|
||||||
use middle::trans::common::*;
|
use middle::trans::common::*;
|
||||||
use middle::trans::machine::llalign_of_pref;
|
use middle::trans::machine::llalign_of_pref;
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
use std::libc::{c_uint, c_ulonglong, c_char};
|
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
use std::libc::{c_uint, c_ulonglong, c_char};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
|
||||||
pub struct Builder<'a> {
|
pub struct Builder<'a> {
|
||||||
|
@ -542,7 +544,7 @@ impl<'a> Builder<'a> {
|
||||||
} else {
|
} else {
|
||||||
let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<Vec<ValueRef> >();
|
let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<Vec<ValueRef> >();
|
||||||
self.count_insn("gepi");
|
self.count_insn("gepi");
|
||||||
self.inbounds_gep(base, v)
|
self.inbounds_gep(base, v.as_slice())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ use middle::trans::cabi_x86_64;
|
||||||
use middle::trans::cabi_arm;
|
use middle::trans::cabi_arm;
|
||||||
use middle::trans::cabi_mips;
|
use middle::trans::cabi_mips;
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi::{X86, X86_64, Arm, Mips};
|
use syntax::abi::{X86, X86_64, Arm, Mips};
|
||||||
|
|
||||||
#[deriving(Clone, Eq)]
|
#[deriving(Clone, Eq)]
|
||||||
|
|
|
@ -14,11 +14,11 @@ use lib::llvm::{llvm, Integer, Pointer, Float, Double, Struct, Array};
|
||||||
use lib::llvm::StructRetAttribute;
|
use lib::llvm::StructRetAttribute;
|
||||||
use middle::trans::cabi::{FnType, ArgType};
|
use middle::trans::cabi::{FnType, ArgType};
|
||||||
use middle::trans::context::CrateContext;
|
use middle::trans::context::CrateContext;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::option::{None, Some};
|
use std::option::{None, Some};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
fn align_up_to(off: uint, a: uint) -> uint {
|
fn align_up_to(off: uint, a: uint) -> uint {
|
||||||
return (off + a - 1u) / a * a;
|
return (off + a - 1u) / a * a;
|
||||||
|
|
|
@ -17,9 +17,10 @@ use lib::llvm::StructRetAttribute;
|
||||||
use middle::trans::context::CrateContext;
|
use middle::trans::context::CrateContext;
|
||||||
use middle::trans::context::task_llcx;
|
use middle::trans::context::task_llcx;
|
||||||
use middle::trans::cabi::*;
|
use middle::trans::cabi::*;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
fn align_up_to(off: uint, a: uint) -> uint {
|
fn align_up_to(off: uint, a: uint) -> uint {
|
||||||
return (off + a - 1u) / a * a;
|
return (off + a - 1u) / a * a;
|
||||||
}
|
}
|
||||||
|
@ -155,7 +156,7 @@ fn coerce_to_int(size: uint) -> Vec<Type> {
|
||||||
fn struct_ty(ty: Type) -> Type {
|
fn struct_ty(ty: Type) -> Type {
|
||||||
let size = ty_size(ty) * 8;
|
let size = ty_size(ty) * 8;
|
||||||
let fields = coerce_to_int(size);
|
let fields = coerce_to_int(size);
|
||||||
return Type::struct_(fields, false);
|
return Type::struct_(fields.as_slice(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_abi_info(_ccx: &CrateContext,
|
pub fn compute_abi_info(_ccx: &CrateContext,
|
||||||
|
|
|
@ -15,6 +15,7 @@ use super::cabi::*;
|
||||||
use super::common::*;
|
use super::common::*;
|
||||||
use super::machine::*;
|
use super::machine::*;
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
pub fn compute_abi_info(ccx: &CrateContext,
|
pub fn compute_abi_info(ccx: &CrateContext,
|
||||||
atys: &[Type],
|
atys: &[Type],
|
||||||
|
|
|
@ -18,11 +18,10 @@ use lib::llvm::{Struct, Array, Attribute};
|
||||||
use lib::llvm::{StructRetAttribute, ByValAttribute};
|
use lib::llvm::{StructRetAttribute, ByValAttribute};
|
||||||
use middle::trans::cabi::*;
|
use middle::trans::cabi::*;
|
||||||
use middle::trans::context::CrateContext;
|
use middle::trans::context::CrateContext;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
#[deriving(Clone, Eq)]
|
#[deriving(Clone, Eq)]
|
||||||
enum RegClass {
|
enum RegClass {
|
||||||
|
@ -220,7 +219,7 @@ fn classify_ty(ty: Type) -> Vec<RegClass> {
|
||||||
unify(cls, ix + off / 8u, SSEDs);
|
unify(cls, ix + off / 8u, SSEDs);
|
||||||
}
|
}
|
||||||
Struct => {
|
Struct => {
|
||||||
classify_struct(ty.field_types(), cls, ix, off);
|
classify_struct(ty.field_types().as_slice(), cls, ix, off);
|
||||||
}
|
}
|
||||||
Array => {
|
Array => {
|
||||||
let len = ty.array_length();
|
let len = ty.array_length();
|
||||||
|
@ -282,13 +281,13 @@ fn classify_ty(ty: Type) -> Vec<RegClass> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let words = (ty_size(ty) + 7) / 8;
|
let words = (ty_size(ty) + 7) / 8;
|
||||||
let mut cls = vec::from_elem(words, NoClass);
|
let mut cls = Vec::from_elem(words, NoClass);
|
||||||
if words > 4 {
|
if words > 4 {
|
||||||
all_mem(cls);
|
all_mem(cls.as_mut_slice());
|
||||||
return cls;
|
return cls;
|
||||||
}
|
}
|
||||||
classify(ty, cls, 0, 0);
|
classify(ty, cls.as_mut_slice(), 0, 0);
|
||||||
fixup(ty, cls);
|
fixup(ty, cls.as_mut_slice());
|
||||||
return cls;
|
return cls;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -329,7 +328,7 @@ fn llreg_ty(cls: &[RegClass]) -> Type {
|
||||||
}
|
}
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
return Type::struct_(tys, false);
|
return Type::struct_(tys.as_slice(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_abi_info(_ccx: &CrateContext,
|
pub fn compute_abi_info(_ccx: &CrateContext,
|
||||||
|
@ -342,10 +341,13 @@ pub fn compute_abi_info(_ccx: &CrateContext,
|
||||||
-> ArgType {
|
-> ArgType {
|
||||||
if !ty.is_reg_ty() {
|
if !ty.is_reg_ty() {
|
||||||
let cls = classify_ty(ty);
|
let cls = classify_ty(ty);
|
||||||
if is_mem_cls(cls) {
|
if is_mem_cls(cls.as_slice()) {
|
||||||
ArgType::indirect(ty, Some(attr))
|
ArgType::indirect(ty, Some(attr))
|
||||||
} else {
|
} else {
|
||||||
ArgType::direct(ty, Some(llreg_ty(cls)), None, None)
|
ArgType::direct(ty,
|
||||||
|
Some(llreg_ty(cls.as_slice())),
|
||||||
|
None,
|
||||||
|
None)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ArgType::direct(ty, None, None, None)
|
ArgType::direct(ty, None, None, None)
|
||||||
|
|
|
@ -48,6 +48,8 @@ use util::ppaux::Repr;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
|
@ -174,7 +176,12 @@ pub fn trans_fn_ref(bcx: &Block, def_id: ast::DefId,
|
||||||
debug!("trans_fn_ref(def_id={}, ref_id={:?}, type_params={}, vtables={})",
|
debug!("trans_fn_ref(def_id={}, ref_id={:?}, type_params={}, vtables={})",
|
||||||
def_id.repr(bcx.tcx()), ref_id, type_params.repr(bcx.tcx()),
|
def_id.repr(bcx.tcx()), ref_id, type_params.repr(bcx.tcx()),
|
||||||
vtables.repr(bcx.tcx()));
|
vtables.repr(bcx.tcx()));
|
||||||
trans_fn_ref_with_vtables(bcx, def_id, ref_id, is_method, type_params, vtables)
|
trans_fn_ref_with_vtables(bcx,
|
||||||
|
def_id,
|
||||||
|
ref_id,
|
||||||
|
is_method,
|
||||||
|
type_params.as_slice(),
|
||||||
|
vtables)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trans_fn_ref_with_vtables_to_callee<'a>(bcx: &'a Block<'a>,
|
fn trans_fn_ref_with_vtables_to_callee<'a>(bcx: &'a Block<'a>,
|
||||||
|
@ -221,7 +228,8 @@ fn resolve_default_method_vtables(bcx: &Block,
|
||||||
None => vec::from_elem(num_method_vtables, @Vec::new())
|
None => vec::from_elem(num_method_vtables, @Vec::new())
|
||||||
};
|
};
|
||||||
|
|
||||||
let param_vtables = @(*trait_vtables_fixed + method_vtables);
|
let param_vtables = @(vec_ng::append((*trait_vtables_fixed).clone(),
|
||||||
|
method_vtables));
|
||||||
|
|
||||||
let self_vtables = resolve_param_vtables_under_param_substs(
|
let self_vtables = resolve_param_vtables_under_param_substs(
|
||||||
bcx.tcx(), param_substs, impl_res.self_vtables);
|
bcx.tcx(), param_substs, impl_res.self_vtables);
|
||||||
|
@ -272,7 +280,7 @@ pub fn trans_fn_ref_with_vtables(
|
||||||
|
|
||||||
let substs = ty::substs { regions: ty::ErasedRegions,
|
let substs = ty::substs { regions: ty::ErasedRegions,
|
||||||
self_ty: None,
|
self_ty: None,
|
||||||
tps: /*bad*/ type_params.to_owned() };
|
tps: /*bad*/ Vec::from_slice(type_params) };
|
||||||
|
|
||||||
// Load the info for the appropriate trait if necessary.
|
// Load the info for the appropriate trait if necessary.
|
||||||
match ty::trait_of_method(tcx, def_id) {
|
match ty::trait_of_method(tcx, def_id) {
|
||||||
|
@ -683,7 +691,11 @@ pub fn trans_call_inner<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Invoke the actual rust fn and update bcx/llresult.
|
// Invoke the actual rust fn and update bcx/llresult.
|
||||||
let (llret, b) = base::invoke(bcx, llfn, llargs, attrs, call_info);
|
let (llret, b) = base::invoke(bcx,
|
||||||
|
llfn,
|
||||||
|
llargs,
|
||||||
|
attrs.as_slice(),
|
||||||
|
call_info);
|
||||||
bcx = b;
|
bcx = b;
|
||||||
llresult = llret;
|
llresult = llret;
|
||||||
|
|
||||||
|
@ -712,8 +724,12 @@ pub fn trans_call_inner<'a>(
|
||||||
ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(),
|
ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(),
|
||||||
_ => fail!("expected arg exprs.")
|
_ => fail!("expected arg exprs.")
|
||||||
};
|
};
|
||||||
bcx = foreign::trans_native_call(bcx, callee_ty,
|
bcx = foreign::trans_native_call(bcx,
|
||||||
llfn, opt_llretslot.unwrap(), llargs, arg_tys);
|
callee_ty,
|
||||||
|
llfn,
|
||||||
|
opt_llretslot.unwrap(),
|
||||||
|
llargs.as_slice(),
|
||||||
|
arg_tys);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the caller doesn't care about the result of this fn call,
|
// If the caller doesn't care about the result of this fn call,
|
||||||
|
@ -770,7 +786,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
|
||||||
assert!(variadic);
|
assert!(variadic);
|
||||||
expr_ty_adjusted(cx, *arg_expr)
|
expr_ty_adjusted(cx, *arg_expr)
|
||||||
} else {
|
} else {
|
||||||
arg_tys[i]
|
*arg_tys.get(i)
|
||||||
};
|
};
|
||||||
llargs.push(unpack_result!(bcx, {
|
llargs.push(unpack_result!(bcx, {
|
||||||
trans_arg_expr(bcx, arg_ty, *arg_expr,
|
trans_arg_expr(bcx, arg_ty, *arg_expr,
|
||||||
|
@ -783,7 +799,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
|
||||||
assert!(!variadic);
|
assert!(!variadic);
|
||||||
|
|
||||||
llargs.push(unpack_result!(bcx, {
|
llargs.push(unpack_result!(bcx, {
|
||||||
trans_arg_expr(bcx, arg_tys[0], arg_expr,
|
trans_arg_expr(bcx, *arg_tys.get(0), arg_expr,
|
||||||
arg_cleanup_scope,
|
arg_cleanup_scope,
|
||||||
DontAutorefArg)
|
DontAutorefArg)
|
||||||
}));
|
}));
|
||||||
|
@ -793,7 +809,7 @@ fn trans_args<'a>(cx: &'a Block<'a>,
|
||||||
assert_eq!(arg_tys.len(), 2);
|
assert_eq!(arg_tys.len(), 2);
|
||||||
|
|
||||||
llargs.push(unpack_result!(bcx, {
|
llargs.push(unpack_result!(bcx, {
|
||||||
trans_arg_expr(bcx, arg_tys[1], arg2_expr,
|
trans_arg_expr(bcx, *arg_tys.get(1), arg2_expr,
|
||||||
arg_cleanup_scope,
|
arg_cleanup_scope,
|
||||||
DoAutorefArg)
|
DoAutorefArg)
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -349,7 +349,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
||||||
assert!(self.is_valid_custom_scope(custom_scope));
|
assert!(self.is_valid_custom_scope(custom_scope));
|
||||||
|
|
||||||
let mut scopes = self.scopes.borrow_mut();
|
let mut scopes = self.scopes.borrow_mut();
|
||||||
let scope = &mut scopes.get()[custom_scope.index];
|
let scope = scopes.get().get_mut(custom_scope.index);
|
||||||
scope.cleanups.push(cleanup);
|
scope.cleanups.push(cleanup);
|
||||||
scope.clear_cached_exits();
|
scope.clear_cached_exits();
|
||||||
}
|
}
|
||||||
|
@ -433,7 +433,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
|
||||||
fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
|
fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
|
||||||
let scopes = self.scopes.borrow();
|
let scopes = self.scopes.borrow();
|
||||||
custom_scope.index < scopes.get().len() &&
|
custom_scope.index < scopes.get().len() &&
|
||||||
scopes.get()[custom_scope.index].kind.is_temp()
|
scopes.get().get(custom_scope.index).kind.is_temp()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trans_scope_cleanups(&self, // cannot borrow self, will recurse
|
fn trans_scope_cleanups(&self, // cannot borrow self, will recurse
|
||||||
|
|
|
@ -27,6 +27,7 @@ use util::ppaux::Repr;
|
||||||
use util::ppaux::ty_to_str;
|
use util::ppaux::ty_to_str;
|
||||||
|
|
||||||
use arena::TypedArena;
|
use arena::TypedArena;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
|
|
||||||
|
@ -139,12 +140,12 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
|
||||||
// is the actual types that will be stored in the map, not the
|
// is the actual types that will be stored in the map, not the
|
||||||
// logical types as the user sees them, so by-ref upvars must be
|
// logical types as the user sees them, so by-ref upvars must be
|
||||||
// converted to ptrs.
|
// converted to ptrs.
|
||||||
let bound_tys = bound_values.map(|bv| {
|
let bound_tys = bound_values.iter().map(|bv| {
|
||||||
match bv.action {
|
match bv.action {
|
||||||
EnvCopy | EnvMove => bv.datum.ty,
|
EnvCopy | EnvMove => bv.datum.ty,
|
||||||
EnvRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
|
EnvRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
|
||||||
}
|
}
|
||||||
});
|
}).collect();
|
||||||
let cdata_ty = ty::mk_tup(tcx, bound_tys);
|
let cdata_ty = ty::mk_tup(tcx, bound_tys);
|
||||||
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
|
debug!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
|
||||||
return cdata_ty;
|
return cdata_ty;
|
||||||
|
@ -199,7 +200,7 @@ pub fn store_environment<'a>(
|
||||||
let tcx = ccx.tcx;
|
let tcx = ccx.tcx;
|
||||||
|
|
||||||
// compute the type of the closure
|
// compute the type of the closure
|
||||||
let cdata_ty = mk_closure_tys(tcx, bound_values);
|
let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice());
|
||||||
|
|
||||||
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
|
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
|
||||||
// tuple. This could be a ptr in uniq or a box or on stack,
|
// tuple. This could be a ptr in uniq or a box or on stack,
|
||||||
|
@ -387,7 +388,11 @@ pub fn trans_expr_fn<'a>(
|
||||||
let s = tcx.map.with_path(id, |path| {
|
let s = tcx.map.with_path(id, |path| {
|
||||||
mangle_internal_name_by_path_and_seq(path, "closure")
|
mangle_internal_name_by_path_and_seq(path, "closure")
|
||||||
});
|
});
|
||||||
let llfn = decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, s);
|
let llfn = decl_internal_rust_fn(ccx,
|
||||||
|
true,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output,
|
||||||
|
s);
|
||||||
|
|
||||||
// set an inline hint for all closures
|
// set an inline hint for all closures
|
||||||
set_inline_hint(llfn);
|
set_inline_hint(llfn);
|
||||||
|
@ -396,11 +401,17 @@ pub fn trans_expr_fn<'a>(
|
||||||
let capture_map = ccx.maps.capture_map.borrow();
|
let capture_map = ccx.maps.capture_map.borrow();
|
||||||
capture_map.get().get_copy(&id)
|
capture_map.get().get_copy(&id)
|
||||||
};
|
};
|
||||||
let ClosureResult {llbox, cdata_ty, bcx} = build_closure(bcx, *cap_vars.borrow(), sigil);
|
let ClosureResult {llbox, cdata_ty, bcx} =
|
||||||
|
build_closure(bcx, cap_vars.borrow().as_slice(), sigil);
|
||||||
trans_closure(ccx, decl, body, llfn,
|
trans_closure(ccx, decl, body, llfn,
|
||||||
bcx.fcx.param_substs, id,
|
bcx.fcx.param_substs, id,
|
||||||
[], ty::ty_fn_ret(fty),
|
[], ty::ty_fn_ret(fty),
|
||||||
|bcx| load_environment(bcx, cdata_ty, *cap_vars.borrow(), sigil));
|
|bcx| {
|
||||||
|
load_environment(bcx,
|
||||||
|
cdata_ty,
|
||||||
|
cap_vars.borrow().as_slice(),
|
||||||
|
sigil)
|
||||||
|
});
|
||||||
fill_fn_pair(bcx, dest_addr, llfn, llbox);
|
fill_fn_pair(bcx, dest_addr, llfn, llbox);
|
||||||
|
|
||||||
bcx
|
bcx
|
||||||
|
@ -447,9 +458,13 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
|
||||||
mangle_internal_name_by_path_and_seq(path, "as_closure")
|
mangle_internal_name_by_path_and_seq(path, "as_closure")
|
||||||
});
|
});
|
||||||
let llfn = if is_local {
|
let llfn = if is_local {
|
||||||
decl_internal_rust_fn(ccx, true, f.sig.inputs, f.sig.output, name)
|
decl_internal_rust_fn(ccx,
|
||||||
|
true,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output,
|
||||||
|
name)
|
||||||
} else {
|
} else {
|
||||||
decl_rust_fn(ccx, true, f.sig.inputs, f.sig.output, name)
|
decl_rust_fn(ccx, true, f.sig.inputs.as_slice(), f.sig.output, name)
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -470,7 +485,9 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
|
||||||
init_function(&fcx, true, f.sig.output, None);
|
init_function(&fcx, true, f.sig.output, None);
|
||||||
let bcx = fcx.entry_bcx.get().unwrap();
|
let bcx = fcx.entry_bcx.get().unwrap();
|
||||||
|
|
||||||
let args = create_datums_for_fn_args(&fcx, ty::ty_fn_args(closure_ty));
|
let args = create_datums_for_fn_args(&fcx,
|
||||||
|
ty::ty_fn_args(closure_ty)
|
||||||
|
.as_slice());
|
||||||
let mut llargs = Vec::new();
|
let mut llargs = Vec::new();
|
||||||
match fcx.llretptr.get() {
|
match fcx.llretptr.get() {
|
||||||
Some(llretptr) => {
|
Some(llretptr) => {
|
||||||
|
@ -480,7 +497,7 @@ pub fn get_wrapper_for_bare_fn(ccx: @CrateContext,
|
||||||
}
|
}
|
||||||
llargs.extend(&mut args.iter().map(|arg| arg.val));
|
llargs.extend(&mut args.iter().map(|arg| arg.val));
|
||||||
|
|
||||||
let retval = Call(bcx, fn_ptr, llargs, []);
|
let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
|
||||||
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
|
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
|
||||||
RetVoid(bcx);
|
RetVoid(bcx);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -32,10 +32,11 @@ use util::ppaux::Repr;
|
||||||
use util::nodemap::NodeMap;
|
use util::nodemap::NodeMap;
|
||||||
|
|
||||||
use arena::TypedArena;
|
use arena::TypedArena;
|
||||||
|
use collections::HashMap;
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use collections::HashMap;
|
|
||||||
use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
|
use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::Ident;
|
use syntax::ast::Ident;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map::{PathElem, PathName};
|
use syntax::ast_map::{PathElem, PathName};
|
||||||
|
@ -50,8 +51,9 @@ fn type_is_newtype_immediate(ccx: &CrateContext, ty: ty::t) -> bool {
|
||||||
ty::ty_struct(def_id, ref substs) => {
|
ty::ty_struct(def_id, ref substs) => {
|
||||||
let fields = ty::struct_fields(ccx.tcx, def_id, substs);
|
let fields = ty::struct_fields(ccx.tcx, def_id, substs);
|
||||||
fields.len() == 1 &&
|
fields.len() == 1 &&
|
||||||
fields[0].ident.name == token::special_idents::unnamed_field.name &&
|
fields.get(0).ident.name ==
|
||||||
type_is_immediate(ccx, fields[0].mt.ty)
|
token::special_idents::unnamed_field.name &&
|
||||||
|
type_is_immediate(ccx, fields.get(0).mt.ty)
|
||||||
}
|
}
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
|
@ -781,7 +783,7 @@ pub fn align_to(cx: &Block, off: ValueRef, align: ValueRef) -> ValueRef {
|
||||||
pub fn monomorphize_type(bcx: &Block, t: ty::t) -> ty::t {
|
pub fn monomorphize_type(bcx: &Block, t: ty::t) -> ty::t {
|
||||||
match bcx.fcx.param_substs {
|
match bcx.fcx.param_substs {
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
ty::subst_tps(bcx.tcx(), substs.tys, substs.self_ty, t)
|
ty::subst_tps(bcx.tcx(), substs.tys.as_slice(), substs.self_ty, t)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
assert!(!ty::type_has_params(t));
|
assert!(!ty::type_has_params(t));
|
||||||
|
@ -824,7 +826,7 @@ pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> Vec
|
||||||
match bcx.fcx.param_substs {
|
match bcx.fcx.param_substs {
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
params.iter().map(|t| {
|
params.iter().map(|t| {
|
||||||
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
|
ty::subst_tps(tcx, substs.tys.as_slice(), substs.self_ty, *t)
|
||||||
}).collect()
|
}).collect()
|
||||||
}
|
}
|
||||||
_ => params
|
_ => params
|
||||||
|
@ -881,10 +883,13 @@ pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,
|
||||||
let tys = match param_substs {
|
let tys = match param_substs {
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
tys.iter().map(|t| {
|
tys.iter().map(|t| {
|
||||||
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
|
ty::subst_tps(tcx,
|
||||||
|
substs.tys.as_slice(),
|
||||||
|
substs.self_ty,
|
||||||
|
*t)
|
||||||
}).collect()
|
}).collect()
|
||||||
}
|
}
|
||||||
_ => tys.to_owned()
|
_ => Vec::from_slice(tys.as_slice())
|
||||||
};
|
};
|
||||||
typeck::vtable_static(
|
typeck::vtable_static(
|
||||||
trait_id, tys,
|
trait_id, tys,
|
||||||
|
@ -918,10 +923,10 @@ pub fn find_vtable(tcx: ty::ctxt,
|
||||||
typeck::param_numbered(n) => {
|
typeck::param_numbered(n) => {
|
||||||
let tables = ps.vtables
|
let tables = ps.vtables
|
||||||
.expect("vtables missing where they are needed");
|
.expect("vtables missing where they are needed");
|
||||||
tables[n]
|
*tables.get(n)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
param_bounds[n_bound].clone()
|
param_bounds.get(n_bound).clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dummy_substs(tps: Vec<ty::t> ) -> ty::substs {
|
pub fn dummy_substs(tps: Vec<ty::t> ) -> ty::substs {
|
||||||
|
|
|
@ -25,15 +25,16 @@ use middle::trans::consts;
|
||||||
use middle::trans::expr;
|
use middle::trans::expr;
|
||||||
use middle::trans::inline;
|
use middle::trans::inline;
|
||||||
use middle::trans::machine;
|
use middle::trans::machine;
|
||||||
|
use middle::trans::type_::Type;
|
||||||
use middle::trans::type_of;
|
use middle::trans::type_of;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use util::ppaux::{Repr, ty_to_str};
|
use util::ppaux::{Repr, ty_to_str};
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
|
||||||
|
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::libc::c_uint;
|
use std::libc::c_uint;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::{ast, ast_util};
|
use syntax::{ast, ast_util};
|
||||||
|
|
||||||
pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
|
pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
|
||||||
|
@ -303,8 +304,8 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
|
||||||
let map_list = |exprs: &[@ast::Expr]| {
|
let map_list = |exprs: &[@ast::Expr]| {
|
||||||
exprs.iter().map(|&e| const_expr(cx, e, is_local))
|
exprs.iter().map(|&e| const_expr(cx, e, is_local))
|
||||||
.fold((Vec::new(), true),
|
.fold((Vec::new(), true),
|
||||||
|(L, all_inlineable), (val, inlineable)| {
|
|(l, all_inlineable), (val, inlineable)| {
|
||||||
(vec::append_one(L, val), all_inlineable && inlineable)
|
(vec_ng::append_one(l, val), all_inlineable && inlineable)
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -533,7 +534,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
|
||||||
let ety = ty::expr_ty(cx.tcx, e);
|
let ety = ty::expr_ty(cx.tcx, e);
|
||||||
let repr = adt::represent_type(cx, ety);
|
let repr = adt::represent_type(cx, ety);
|
||||||
let (vals, inlineable) = map_list(es.as_slice());
|
let (vals, inlineable) = map_list(es.as_slice());
|
||||||
(adt::trans_const(cx, repr, 0, vals), inlineable)
|
(adt::trans_const(cx, repr, 0, vals.as_slice()), inlineable)
|
||||||
}
|
}
|
||||||
ast::ExprStruct(_, ref fs, ref base_opt) => {
|
ast::ExprStruct(_, ref fs, ref base_opt) => {
|
||||||
let ety = ty::expr_ty(cx.tcx, e);
|
let ety = ty::expr_ty(cx.tcx, e);
|
||||||
|
@ -667,7 +668,8 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
|
||||||
let ety = ty::expr_ty(cx.tcx, e);
|
let ety = ty::expr_ty(cx.tcx, e);
|
||||||
let repr = adt::represent_type(cx, ety);
|
let repr = adt::represent_type(cx, ety);
|
||||||
let (arg_vals, inlineable) = map_list(args.as_slice());
|
let (arg_vals, inlineable) = map_list(args.as_slice());
|
||||||
(adt::trans_const(cx, repr, 0, arg_vals), inlineable)
|
(adt::trans_const(cx, repr, 0, arg_vals.as_slice()),
|
||||||
|
inlineable)
|
||||||
}
|
}
|
||||||
Some(ast::DefVariant(enum_did, variant_did, _)) => {
|
Some(ast::DefVariant(enum_did, variant_did, _)) => {
|
||||||
let ety = ty::expr_ty(cx.tcx, e);
|
let ety = ty::expr_ty(cx.tcx, e);
|
||||||
|
@ -676,8 +678,10 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
|
||||||
enum_did,
|
enum_did,
|
||||||
variant_did);
|
variant_did);
|
||||||
let (arg_vals, inlineable) = map_list(args.as_slice());
|
let (arg_vals, inlineable) = map_list(args.as_slice());
|
||||||
(adt::trans_const(cx, repr, vinfo.disr_val, arg_vals),
|
(adt::trans_const(cx,
|
||||||
inlineable)
|
repr,
|
||||||
|
vinfo.disr_val,
|
||||||
|
arg_vals.as_slice()), inlineable)
|
||||||
}
|
}
|
||||||
_ => cx.sess.span_bug(e.span, "expected a struct or variant def")
|
_ => cx.sess.span_bug(e.span, "expected a struct or variant def")
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@ use std::cell::{Cell, RefCell};
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::local_data;
|
use std::local_data;
|
||||||
use std::libc::c_uint;
|
use std::libc::c_uint;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::parse::token::InternedString;
|
use syntax::parse::token::InternedString;
|
||||||
|
|
|
@ -343,7 +343,10 @@ pub fn trans_fail<'a>(
|
||||||
let v_filename = PointerCast(bcx, v_filename, Type::i8p());
|
let v_filename = PointerCast(bcx, v_filename, Type::i8p());
|
||||||
let args = vec!(v_str, v_filename, C_int(ccx, v_line));
|
let args = vec!(v_str, v_filename, C_int(ccx, v_line));
|
||||||
let did = langcall(bcx, Some(sp), "", FailFnLangItem);
|
let did = langcall(bcx, Some(sp), "", FailFnLangItem);
|
||||||
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx;
|
let bcx = callee::trans_lang_call(bcx,
|
||||||
|
did,
|
||||||
|
args.as_slice(),
|
||||||
|
Some(expr::Ignore)).bcx;
|
||||||
Unreachable(bcx);
|
Unreachable(bcx);
|
||||||
return bcx;
|
return bcx;
|
||||||
}
|
}
|
||||||
|
@ -358,7 +361,10 @@ pub fn trans_fail_bounds_check<'a>(
|
||||||
let (filename, line) = filename_and_line_num_from_span(bcx, sp);
|
let (filename, line) = filename_and_line_num_from_span(bcx, sp);
|
||||||
let args = vec!(filename, line, index, len);
|
let args = vec!(filename, line, index, len);
|
||||||
let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem);
|
let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem);
|
||||||
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx;
|
let bcx = callee::trans_lang_call(bcx,
|
||||||
|
did,
|
||||||
|
args.as_slice(),
|
||||||
|
Some(expr::Ignore)).bcx;
|
||||||
Unreachable(bcx);
|
Unreachable(bcx);
|
||||||
return bcx;
|
return bcx;
|
||||||
}
|
}
|
||||||
|
|
|
@ -149,6 +149,7 @@ use std::libc::{c_uint, c_ulonglong, c_longlong};
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
use std::sync::atomics;
|
use std::sync::atomics;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::codemap::{Span, Pos};
|
use syntax::codemap::{Span, Pos};
|
||||||
use syntax::{abi, ast, codemap, ast_util, ast_map, opt_vec};
|
use syntax::{abi, ast, codemap, ast_util, ast_map, opt_vec};
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
|
@ -725,7 +726,10 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||||
let return_type = match param_substs {
|
let return_type = match param_substs {
|
||||||
None => return_type,
|
None => return_type,
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
ty::subst_tps(cx.tcx, substs.tys, substs.self_ty, return_type)
|
ty::subst_tps(cx.tcx,
|
||||||
|
substs.tys.as_slice(),
|
||||||
|
substs.self_ty,
|
||||||
|
return_type)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -740,7 +744,10 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||||
let arg_type = match param_substs {
|
let arg_type = match param_substs {
|
||||||
None => arg_type,
|
None => arg_type,
|
||||||
Some(substs) => {
|
Some(substs) => {
|
||||||
ty::subst_tps(cx.tcx, substs.tys, substs.self_ty, arg_type)
|
ty::subst_tps(cx.tcx,
|
||||||
|
substs.tys.as_slice(),
|
||||||
|
substs.self_ty,
|
||||||
|
arg_type)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -771,7 +778,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||||
name_to_append_suffix_to.push_char('<');
|
name_to_append_suffix_to.push_char('<');
|
||||||
|
|
||||||
// The list to be filled with template parameters:
|
// The list to be filled with template parameters:
|
||||||
let mut template_params: Vec<DIDescriptor> = vec::with_capacity(generics.ty_params.len() + 1);
|
let mut template_params: Vec<DIDescriptor> =
|
||||||
|
Vec::with_capacity(generics.ty_params.len() + 1);
|
||||||
|
|
||||||
// Handle self type
|
// Handle self type
|
||||||
if has_self_type {
|
if has_self_type {
|
||||||
|
@ -814,12 +822,12 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||||
let actual_types = match param_substs {
|
let actual_types = match param_substs {
|
||||||
Some(param_substs) => ¶m_substs.tys,
|
Some(param_substs) => ¶m_substs.tys,
|
||||||
None => {
|
None => {
|
||||||
return create_DIArray(DIB(cx), template_params);
|
return create_DIArray(DIB(cx), template_params.as_slice());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for (index, &ast::TyParam{ ident: ident, .. }) in generics.ty_params.iter().enumerate() {
|
for (index, &ast::TyParam{ ident: ident, .. }) in generics.ty_params.iter().enumerate() {
|
||||||
let actual_type = actual_types[index];
|
let actual_type = *actual_types.get(index);
|
||||||
// Add actual type name to <...> clause of function name
|
// Add actual type name to <...> clause of function name
|
||||||
let actual_type_name = ppaux::ty_to_str(cx.tcx, actual_type);
|
let actual_type_name = ppaux::ty_to_str(cx.tcx, actual_type);
|
||||||
name_to_append_suffix_to.push_str(actual_type_name);
|
name_to_append_suffix_to.push_str(actual_type_name);
|
||||||
|
@ -850,7 +858,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||||
|
|
||||||
name_to_append_suffix_to.push_char('>');
|
name_to_append_suffix_to.push_char('>');
|
||||||
|
|
||||||
return create_DIArray(DIB(cx), template_params);
|
return create_DIArray(DIB(cx), template_params.as_slice());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1250,7 +1258,7 @@ impl RecursiveTypeDescription {
|
||||||
set_members_of_composite_type(cx,
|
set_members_of_composite_type(cx,
|
||||||
metadata_stub,
|
metadata_stub,
|
||||||
llvm_type,
|
llvm_type,
|
||||||
member_descriptions,
|
member_descriptions.as_slice(),
|
||||||
file_metadata,
|
file_metadata,
|
||||||
codemap::DUMMY_SP);
|
codemap::DUMMY_SP);
|
||||||
return metadata_stub;
|
return metadata_stub;
|
||||||
|
@ -1300,7 +1308,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
|
||||||
llvm_type: tuple_llvm_type,
|
llvm_type: tuple_llvm_type,
|
||||||
file_metadata: file_metadata,
|
file_metadata: file_metadata,
|
||||||
member_description_factory: TupleMD(TupleMemberDescriptionFactory {
|
member_description_factory: TupleMD(TupleMemberDescriptionFactory {
|
||||||
component_types: component_types.to_owned(),
|
component_types: Vec::from_slice(component_types),
|
||||||
span: span,
|
span: span,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1331,7 +1339,7 @@ impl GeneralMemberDescriptionFactory {
|
||||||
let (variant_type_metadata, variant_llvm_type, member_desc_factory) =
|
let (variant_type_metadata, variant_llvm_type, member_desc_factory) =
|
||||||
describe_enum_variant(cx,
|
describe_enum_variant(cx,
|
||||||
struct_def,
|
struct_def,
|
||||||
self.variants[i],
|
*self.variants.get(i),
|
||||||
Some(self.discriminant_type_metadata),
|
Some(self.discriminant_type_metadata),
|
||||||
self.containing_scope,
|
self.containing_scope,
|
||||||
self.file_metadata,
|
self.file_metadata,
|
||||||
|
@ -1343,7 +1351,7 @@ impl GeneralMemberDescriptionFactory {
|
||||||
set_members_of_composite_type(cx,
|
set_members_of_composite_type(cx,
|
||||||
variant_type_metadata,
|
variant_type_metadata,
|
||||||
variant_llvm_type,
|
variant_llvm_type,
|
||||||
member_descriptions,
|
member_descriptions.as_slice(),
|
||||||
self.file_metadata,
|
self.file_metadata,
|
||||||
codemap::DUMMY_SP);
|
codemap::DUMMY_SP);
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
|
@ -1387,7 +1395,10 @@ fn describe_enum_variant(cx: &CrateContext,
|
||||||
file_metadata: DIFile,
|
file_metadata: DIFile,
|
||||||
span: Span)
|
span: Span)
|
||||||
-> (DICompositeType, Type, MemberDescriptionFactory) {
|
-> (DICompositeType, Type, MemberDescriptionFactory) {
|
||||||
let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)),
|
let variant_llvm_type =
|
||||||
|
Type::struct_(struct_def.fields
|
||||||
|
.map(|&t| type_of::type_of(cx, t))
|
||||||
|
.as_slice(),
|
||||||
struct_def.packed);
|
struct_def.packed);
|
||||||
// Could some consistency checks here: size, align, field count, discr type
|
// Could some consistency checks here: size, align, field count, discr type
|
||||||
|
|
||||||
|
@ -1491,7 +1502,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
||||||
loc.line as c_uint,
|
loc.line as c_uint,
|
||||||
bytes_to_bits(discriminant_size),
|
bytes_to_bits(discriminant_size),
|
||||||
bytes_to_bits(discriminant_align),
|
bytes_to_bits(discriminant_align),
|
||||||
create_DIArray(DIB(cx), enumerators_metadata),
|
create_DIArray(DIB(cx), enumerators_metadata.as_slice()),
|
||||||
discriminant_base_type_metadata)
|
discriminant_base_type_metadata)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -1507,9 +1518,10 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
||||||
assert!(variants.len() == 1);
|
assert!(variants.len() == 1);
|
||||||
let (metadata_stub,
|
let (metadata_stub,
|
||||||
variant_llvm_type,
|
variant_llvm_type,
|
||||||
member_description_factory) = describe_enum_variant(cx,
|
member_description_factory) =
|
||||||
|
describe_enum_variant(cx,
|
||||||
struct_def,
|
struct_def,
|
||||||
variants[0],
|
*variants.get(0),
|
||||||
None,
|
None,
|
||||||
containing_scope,
|
containing_scope,
|
||||||
file_metadata,
|
file_metadata,
|
||||||
|
@ -1565,9 +1577,10 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
||||||
adt::NullablePointer { nonnull: ref struct_def, nndiscr, .. } => {
|
adt::NullablePointer { nonnull: ref struct_def, nndiscr, .. } => {
|
||||||
let (metadata_stub,
|
let (metadata_stub,
|
||||||
variant_llvm_type,
|
variant_llvm_type,
|
||||||
member_description_factory) = describe_enum_variant(cx,
|
member_description_factory) =
|
||||||
|
describe_enum_variant(cx,
|
||||||
struct_def,
|
struct_def,
|
||||||
variants[nndiscr],
|
*variants.get(nndiscr as uint),
|
||||||
None,
|
None,
|
||||||
containing_scope,
|
containing_scope,
|
||||||
file_metadata,
|
file_metadata,
|
||||||
|
@ -1679,7 +1692,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let type_array = create_DIArray(DIB(cx), member_metadata);
|
let type_array = create_DIArray(DIB(cx), member_metadata.as_slice());
|
||||||
llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array);
|
llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1739,7 +1752,9 @@ fn boxed_type_metadata(cx: &CrateContext,
|
||||||
|
|
||||||
let box_llvm_type = Type::at_box(cx, content_llvm_type);
|
let box_llvm_type = Type::at_box(cx, content_llvm_type);
|
||||||
let member_llvm_types = box_llvm_type.field_types();
|
let member_llvm_types = box_llvm_type.field_types();
|
||||||
assert!(box_layout_is_correct(cx, member_llvm_types, content_llvm_type));
|
assert!(box_layout_is_correct(cx,
|
||||||
|
member_llvm_types.as_slice(),
|
||||||
|
content_llvm_type));
|
||||||
|
|
||||||
let int_type = ty::mk_int();
|
let int_type = ty::mk_int();
|
||||||
let nil_pointer_type = ty::mk_nil_ptr(cx.tcx);
|
let nil_pointer_type = ty::mk_nil_ptr(cx.tcx);
|
||||||
|
@ -1748,31 +1763,31 @@ fn boxed_type_metadata(cx: &CrateContext,
|
||||||
let member_descriptions = [
|
let member_descriptions = [
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"refcnt",
|
name: ~"refcnt",
|
||||||
llvm_type: member_llvm_types[0],
|
llvm_type: *member_llvm_types.get(0),
|
||||||
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
|
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"drop_glue",
|
name: ~"drop_glue",
|
||||||
llvm_type: member_llvm_types[1],
|
llvm_type: *member_llvm_types.get(1),
|
||||||
type_metadata: nil_pointer_type_metadata,
|
type_metadata: nil_pointer_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"prev",
|
name: ~"prev",
|
||||||
llvm_type: member_llvm_types[2],
|
llvm_type: *member_llvm_types.get(2),
|
||||||
type_metadata: nil_pointer_type_metadata,
|
type_metadata: nil_pointer_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"next",
|
name: ~"next",
|
||||||
llvm_type: member_llvm_types[3],
|
llvm_type: *member_llvm_types.get(3),
|
||||||
type_metadata: nil_pointer_type_metadata,
|
type_metadata: nil_pointer_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"val",
|
name: ~"val",
|
||||||
llvm_type: member_llvm_types[4],
|
llvm_type: *member_llvm_types.get(4),
|
||||||
type_metadata: content_type_metadata,
|
type_metadata: content_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
}
|
}
|
||||||
|
@ -1859,19 +1874,19 @@ fn vec_metadata(cx: &CrateContext,
|
||||||
let member_descriptions = [
|
let member_descriptions = [
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"fill",
|
name: ~"fill",
|
||||||
llvm_type: member_llvm_types[0],
|
llvm_type: *member_llvm_types.get(0),
|
||||||
type_metadata: int_type_metadata,
|
type_metadata: int_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"alloc",
|
name: ~"alloc",
|
||||||
llvm_type: member_llvm_types[1],
|
llvm_type: *member_llvm_types.get(1),
|
||||||
type_metadata: int_type_metadata,
|
type_metadata: int_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"elements",
|
name: ~"elements",
|
||||||
llvm_type: member_llvm_types[2],
|
llvm_type: *member_llvm_types.get(2),
|
||||||
type_metadata: array_type_metadata,
|
type_metadata: array_type_metadata,
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
}
|
}
|
||||||
|
@ -1904,20 +1919,22 @@ fn vec_slice_metadata(cx: &CrateContext,
|
||||||
let slice_type_name = ppaux::ty_to_str(cx.tcx, vec_type);
|
let slice_type_name = ppaux::ty_to_str(cx.tcx, vec_type);
|
||||||
|
|
||||||
let member_llvm_types = slice_llvm_type.field_types();
|
let member_llvm_types = slice_llvm_type.field_types();
|
||||||
assert!(slice_layout_is_correct(cx, member_llvm_types, element_type));
|
assert!(slice_layout_is_correct(cx,
|
||||||
|
member_llvm_types.as_slice(),
|
||||||
|
element_type));
|
||||||
|
|
||||||
let data_ptr_type = ty::mk_ptr(cx.tcx, ty::mt { ty: element_type, mutbl: ast::MutImmutable });
|
let data_ptr_type = ty::mk_ptr(cx.tcx, ty::mt { ty: element_type, mutbl: ast::MutImmutable });
|
||||||
|
|
||||||
let member_descriptions = [
|
let member_descriptions = [
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"data_ptr",
|
name: ~"data_ptr",
|
||||||
llvm_type: member_llvm_types[0],
|
llvm_type: *member_llvm_types.get(0),
|
||||||
type_metadata: type_metadata(cx, data_ptr_type, span),
|
type_metadata: type_metadata(cx, data_ptr_type, span),
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
MemberDescription {
|
MemberDescription {
|
||||||
name: ~"length",
|
name: ~"length",
|
||||||
llvm_type: member_llvm_types[1],
|
llvm_type: *member_llvm_types.get(1),
|
||||||
type_metadata: type_metadata(cx, ty::mk_uint(), span),
|
type_metadata: type_metadata(cx, ty::mk_uint(), span),
|
||||||
offset: ComputedMemberOffset,
|
offset: ComputedMemberOffset,
|
||||||
},
|
},
|
||||||
|
@ -1954,7 +1971,8 @@ fn subroutine_type_metadata(cx: &CrateContext,
|
||||||
let loc = span_start(cx, span);
|
let loc = span_start(cx, span);
|
||||||
let file_metadata = file_metadata(cx, loc.file.name);
|
let file_metadata = file_metadata(cx, loc.file.name);
|
||||||
|
|
||||||
let mut signature_metadata: Vec<DIType> = vec::with_capacity(signature.inputs.len() + 1);
|
let mut signature_metadata: Vec<DIType> =
|
||||||
|
Vec::with_capacity(signature.inputs.len() + 1);
|
||||||
|
|
||||||
// return type
|
// return type
|
||||||
signature_metadata.push(match ty::get(signature.output).sty {
|
signature_metadata.push(match ty::get(signature.output).sty {
|
||||||
|
@ -1971,7 +1989,7 @@ fn subroutine_type_metadata(cx: &CrateContext,
|
||||||
llvm::LLVMDIBuilderCreateSubroutineType(
|
llvm::LLVMDIBuilderCreateSubroutineType(
|
||||||
DIB(cx),
|
DIB(cx),
|
||||||
file_metadata,
|
file_metadata,
|
||||||
create_DIArray(DIB(cx), signature_metadata))
|
create_DIArray(DIB(cx), signature_metadata.as_slice()))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1993,7 +2011,7 @@ fn trait_metadata(cx: &CrateContext,
|
||||||
ident_string.get();
|
ident_string.get();
|
||||||
// Add type and region parameters
|
// Add type and region parameters
|
||||||
let name = ppaux::parameterized(cx.tcx, name, &substs.regions,
|
let name = ppaux::parameterized(cx.tcx, name, &substs.regions,
|
||||||
substs.tps, def_id, true);
|
substs.tps.as_slice(), def_id, true);
|
||||||
|
|
||||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
||||||
|
|
||||||
|
@ -2121,7 +2139,10 @@ fn type_metadata(cx: &CrateContext,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ty::ty_tup(ref elements) => {
|
ty::ty_tup(ref elements) => {
|
||||||
prepare_tuple_metadata(cx, t, *elements, usage_site_span).finalize(cx)
|
prepare_tuple_metadata(cx,
|
||||||
|
t,
|
||||||
|
elements.as_slice(),
|
||||||
|
usage_site_span).finalize(cx)
|
||||||
}
|
}
|
||||||
_ => cx.sess.bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty))
|
_ => cx.sess.bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty))
|
||||||
};
|
};
|
||||||
|
|
|
@ -71,6 +71,7 @@ use middle::trans::machine::llsize_of;
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::codemap;
|
use syntax::codemap;
|
||||||
|
@ -743,7 +744,7 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
|
||||||
let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr));
|
let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr));
|
||||||
let numbered_fields: Vec<(uint, @ast::Expr)> =
|
let numbered_fields: Vec<(uint, @ast::Expr)> =
|
||||||
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
|
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
|
||||||
trans_adt(bcx, repr, 0, numbered_fields, None, dest)
|
trans_adt(bcx, repr, 0, numbered_fields.as_slice(), None, dest)
|
||||||
}
|
}
|
||||||
ast::ExprLit(lit) => {
|
ast::ExprLit(lit) => {
|
||||||
match lit.node {
|
match lit.node {
|
||||||
|
@ -973,7 +974,7 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
|
||||||
|
|
||||||
match ty::get(ty).sty {
|
match ty::get(ty).sty {
|
||||||
ty::ty_struct(did, ref substs) => {
|
ty::ty_struct(did, ref substs) => {
|
||||||
op(0, struct_fields(tcx, did, substs))
|
op(0, struct_fields(tcx, did, substs).as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::ty_enum(_, ref substs) => {
|
ty::ty_enum(_, ref substs) => {
|
||||||
|
@ -995,7 +996,9 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
|
||||||
let variant_info = ty::enum_variant_with_id(
|
let variant_info = ty::enum_variant_with_id(
|
||||||
tcx, enum_id, variant_id);
|
tcx, enum_id, variant_id);
|
||||||
op(variant_info.disr_val,
|
op(variant_info.disr_val,
|
||||||
struct_fields(tcx, variant_id, substs))
|
struct_fields(tcx,
|
||||||
|
variant_id,
|
||||||
|
substs).as_slice())
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
tcx.sess.bug("resolve didn't map this expr to a \
|
tcx.sess.bug("resolve didn't map this expr to a \
|
||||||
|
|
|
@ -27,6 +27,7 @@ use middle::ty::FnSig;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::libc::c_uint;
|
use std::libc::c_uint;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
|
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
|
||||||
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
|
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
@ -196,14 +197,14 @@ pub fn trans_native_call<'a>(
|
||||||
ty::ty_bare_fn(ref fn_ty) => (fn_ty.abis, fn_ty.sig.clone()),
|
ty::ty_bare_fn(ref fn_ty) => (fn_ty.abis, fn_ty.sig.clone()),
|
||||||
_ => ccx.sess.bug("trans_native_call called on non-function type")
|
_ => ccx.sess.bug("trans_native_call called on non-function type")
|
||||||
};
|
};
|
||||||
let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys);
|
let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.as_slice());
|
||||||
let ret_def = !return_type_is_void(bcx.ccx(), fn_sig.output);
|
let ret_def = !return_type_is_void(bcx.ccx(), fn_sig.output);
|
||||||
let fn_type = cabi::compute_abi_info(ccx,
|
let fn_type = cabi::compute_abi_info(ccx,
|
||||||
llsig.llarg_tys,
|
llsig.llarg_tys.as_slice(),
|
||||||
llsig.llret_ty,
|
llsig.llret_ty,
|
||||||
ret_def);
|
ret_def);
|
||||||
|
|
||||||
let arg_tys: &[cabi::ArgType] = fn_type.arg_tys;
|
let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.as_slice();
|
||||||
|
|
||||||
let mut llargs_foreign = Vec::new();
|
let mut llargs_foreign = Vec::new();
|
||||||
|
|
||||||
|
@ -228,7 +229,8 @@ pub fn trans_native_call<'a>(
|
||||||
let mut llarg_rust = llarg_rust;
|
let mut llarg_rust = llarg_rust;
|
||||||
|
|
||||||
// Does Rust pass this argument by pointer?
|
// Does Rust pass this argument by pointer?
|
||||||
let rust_indirect = type_of::arg_is_indirect(ccx, passed_arg_tys[i]);
|
let rust_indirect = type_of::arg_is_indirect(ccx,
|
||||||
|
*passed_arg_tys.get(i));
|
||||||
|
|
||||||
debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}",
|
debug!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}",
|
||||||
i,
|
i,
|
||||||
|
@ -239,7 +241,10 @@ pub fn trans_native_call<'a>(
|
||||||
// Ensure that we always have the Rust value indirectly,
|
// Ensure that we always have the Rust value indirectly,
|
||||||
// because it makes bitcasting easier.
|
// because it makes bitcasting easier.
|
||||||
if !rust_indirect {
|
if !rust_indirect {
|
||||||
let scratch = base::alloca(bcx, type_of::type_of(ccx, passed_arg_tys[i]), "__arg");
|
let scratch =
|
||||||
|
base::alloca(bcx,
|
||||||
|
type_of::type_of(ccx, *passed_arg_tys.get(i)),
|
||||||
|
"__arg");
|
||||||
Store(bcx, llarg_rust, scratch);
|
Store(bcx, llarg_rust, scratch);
|
||||||
llarg_rust = scratch;
|
llarg_rust = scratch;
|
||||||
}
|
}
|
||||||
|
@ -295,7 +300,11 @@ pub fn trans_native_call<'a>(
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let attrs = sret_attr.as_slice();
|
let attrs = sret_attr.as_slice();
|
||||||
let llforeign_retval = CallWithConv(bcx, llfn, llargs_foreign, cc, attrs);
|
let llforeign_retval = CallWithConv(bcx,
|
||||||
|
llfn,
|
||||||
|
llargs_foreign.as_slice(),
|
||||||
|
cc,
|
||||||
|
attrs);
|
||||||
|
|
||||||
// If the function we just called does not use an outpointer,
|
// If the function we just called does not use an outpointer,
|
||||||
// store the result into the rust outpointer. Cast the outpointer
|
// store the result into the rust outpointer. Cast the outpointer
|
||||||
|
@ -466,7 +475,11 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
||||||
ccx.tcx.map.path_to_str(id),
|
ccx.tcx.map.path_to_str(id),
|
||||||
id, t.repr(tcx));
|
id, t.repr(tcx));
|
||||||
|
|
||||||
let llfn = base::decl_internal_rust_fn(ccx, false, f.sig.inputs, f.sig.output, ps);
|
let llfn = base::decl_internal_rust_fn(ccx,
|
||||||
|
false,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output,
|
||||||
|
ps);
|
||||||
base::set_llvm_fn_attrs(attrs, llfn);
|
base::set_llvm_fn_attrs(attrs, llfn);
|
||||||
base::trans_fn(ccx, decl, body, llfn, None, id, []);
|
base::trans_fn(ccx, decl, body, llfn, None, id, []);
|
||||||
llfn
|
llfn
|
||||||
|
@ -579,10 +592,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
|
||||||
// Careful to adapt for cases where the native convention uses
|
// Careful to adapt for cases where the native convention uses
|
||||||
// a pointer and Rust does not or vice versa.
|
// a pointer and Rust does not or vice versa.
|
||||||
for i in range(0, tys.fn_sig.inputs.len()) {
|
for i in range(0, tys.fn_sig.inputs.len()) {
|
||||||
let rust_ty = tys.fn_sig.inputs[i];
|
let rust_ty = *tys.fn_sig.inputs.get(i);
|
||||||
let llrust_ty = tys.llsig.llarg_tys[i];
|
let llrust_ty = *tys.llsig.llarg_tys.get(i);
|
||||||
let rust_indirect = type_of::arg_is_indirect(ccx, rust_ty);
|
let rust_indirect = type_of::arg_is_indirect(ccx, rust_ty);
|
||||||
let llforeign_arg_ty = tys.fn_ty.arg_tys[i];
|
let llforeign_arg_ty = *tys.fn_ty.arg_tys.get(i);
|
||||||
let foreign_indirect = llforeign_arg_ty.is_indirect();
|
let foreign_indirect = llforeign_arg_ty.is_indirect();
|
||||||
|
|
||||||
// skip padding
|
// skip padding
|
||||||
|
@ -730,7 +743,7 @@ fn foreign_signature(ccx: &CrateContext, fn_sig: &ty::FnSig, arg_tys: &[ty::t])
|
||||||
* values by pointer like we do.
|
* values by pointer like we do.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let llarg_tys = arg_tys.map(|&arg| type_of(ccx, arg));
|
let llarg_tys = arg_tys.iter().map(|&arg| type_of(ccx, arg)).collect();
|
||||||
let llret_ty = type_of::type_of(ccx, fn_sig.output);
|
let llret_ty = type_of::type_of(ccx, fn_sig.output);
|
||||||
LlvmSignature {
|
LlvmSignature {
|
||||||
llarg_tys: llarg_tys,
|
llarg_tys: llarg_tys,
|
||||||
|
@ -750,10 +763,10 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
|
||||||
ty::ty_bare_fn(ref fn_ty) => fn_ty.sig.clone(),
|
ty::ty_bare_fn(ref fn_ty) => fn_ty.sig.clone(),
|
||||||
_ => ccx.sess.bug("foreign_types_for_fn_ty called on non-function type")
|
_ => ccx.sess.bug("foreign_types_for_fn_ty called on non-function type")
|
||||||
};
|
};
|
||||||
let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs);
|
let llsig = foreign_signature(ccx, &fn_sig, fn_sig.inputs.as_slice());
|
||||||
let ret_def = !return_type_is_void(ccx, fn_sig.output);
|
let ret_def = !return_type_is_void(ccx, fn_sig.output);
|
||||||
let fn_ty = cabi::compute_abi_info(ccx,
|
let fn_ty = cabi::compute_abi_info(ccx,
|
||||||
llsig.llarg_tys,
|
llsig.llarg_tys.as_slice(),
|
||||||
llsig.llret_ty,
|
llsig.llret_ty,
|
||||||
ret_def);
|
ret_def);
|
||||||
debug!("foreign_types_for_fn_ty(\
|
debug!("foreign_types_for_fn_ty(\
|
||||||
|
@ -762,9 +775,9 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
|
||||||
fn_ty={} -> {}, \
|
fn_ty={} -> {}, \
|
||||||
ret_def={}",
|
ret_def={}",
|
||||||
ty.repr(ccx.tcx),
|
ty.repr(ccx.tcx),
|
||||||
ccx.tn.types_to_str(llsig.llarg_tys),
|
ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
|
||||||
ccx.tn.type_to_str(llsig.llret_ty),
|
ccx.tn.type_to_str(llsig.llret_ty),
|
||||||
ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty)),
|
ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty).as_slice()),
|
||||||
ccx.tn.type_to_str(fn_ty.ret_ty.ty),
|
ccx.tn.type_to_str(fn_ty.ret_ty.ty),
|
||||||
ret_def);
|
ret_def);
|
||||||
|
|
||||||
|
@ -810,9 +823,9 @@ fn lltype_for_fn_from_foreign_types(tys: &ForeignTypes) -> Type {
|
||||||
}
|
}
|
||||||
|
|
||||||
if tys.fn_sig.variadic {
|
if tys.fn_sig.variadic {
|
||||||
Type::variadic_func(llargument_tys, &llreturn_ty)
|
Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty)
|
||||||
} else {
|
} else {
|
||||||
Type::func(llargument_tys, &llreturn_ty)
|
Type::func(llargument_tys.as_slice(), &llreturn_ty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,7 @@ fn trans_struct_drop<'a>(bcx: &'a Block<'a>,
|
||||||
|
|
||||||
// Find and call the actual destructor
|
// Find and call the actual destructor
|
||||||
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did,
|
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did,
|
||||||
class_did, substs.tps.clone());
|
class_did, substs.tps.as_slice());
|
||||||
|
|
||||||
// The second argument is the "self" argument for drop
|
// The second argument is the "self" argument for drop
|
||||||
let params = unsafe {
|
let params = unsafe {
|
||||||
|
@ -262,7 +262,7 @@ fn trans_struct_drop<'a>(bcx: &'a Block<'a>,
|
||||||
// destructors if the user destructor fails.
|
// destructors if the user destructor fails.
|
||||||
let field_scope = bcx.fcx.push_custom_cleanup_scope();
|
let field_scope = bcx.fcx.push_custom_cleanup_scope();
|
||||||
|
|
||||||
let self_arg = PointerCast(bcx, v0, params[0]);
|
let self_arg = PointerCast(bcx, v0, *params.get(0));
|
||||||
let args = vec!(self_arg);
|
let args = vec!(self_arg);
|
||||||
|
|
||||||
// Add all the fields as a value which needs to be cleaned at the end of
|
// Add all the fields as a value which needs to be cleaned at the end of
|
||||||
|
|
|
@ -25,6 +25,7 @@ use middle::trans::machine;
|
||||||
use middle::trans::machine::llsize_of;
|
use middle::trans::machine::llsize_of;
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
|
@ -212,7 +213,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
let order = if split.len() == 2 {
|
let order = if split.len() == 2 {
|
||||||
lib::llvm::SequentiallyConsistent
|
lib::llvm::SequentiallyConsistent
|
||||||
} else {
|
} else {
|
||||||
match split[2] {
|
match *split.get(2) {
|
||||||
"relaxed" => lib::llvm::Monotonic,
|
"relaxed" => lib::llvm::Monotonic,
|
||||||
"acq" => lib::llvm::Acquire,
|
"acq" => lib::llvm::Acquire,
|
||||||
"rel" => lib::llvm::Release,
|
"rel" => lib::llvm::Release,
|
||||||
|
@ -221,7 +222,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match split[1] {
|
match *split.get(1) {
|
||||||
"cxchg" => {
|
"cxchg" => {
|
||||||
let old = AtomicCmpXchg(bcx, get_param(decl, first_real_arg),
|
let old = AtomicCmpXchg(bcx, get_param(decl, first_real_arg),
|
||||||
get_param(decl, first_real_arg + 1u),
|
get_param(decl, first_real_arg + 1u),
|
||||||
|
@ -284,7 +285,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
RetVoid(bcx);
|
RetVoid(bcx);
|
||||||
}
|
}
|
||||||
"size_of" => {
|
"size_of" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
||||||
Ret(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty) as uint));
|
Ret(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty) as uint));
|
||||||
}
|
}
|
||||||
|
@ -294,7 +295,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
// if the value is non-immediate. Note that, with
|
// if the value is non-immediate. Note that, with
|
||||||
// intrinsics, there are no argument cleanups to
|
// intrinsics, there are no argument cleanups to
|
||||||
// concern ourselves with, so we can use an rvalue datum.
|
// concern ourselves with, so we can use an rvalue datum.
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let mode = appropriate_rvalue_mode(ccx, tp_ty);
|
let mode = appropriate_rvalue_mode(ccx, tp_ty);
|
||||||
let src = Datum {val: get_param(decl, first_real_arg + 1u),
|
let src = Datum {val: get_param(decl, first_real_arg + 1u),
|
||||||
ty: tp_ty,
|
ty: tp_ty,
|
||||||
|
@ -303,17 +304,17 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
RetVoid(bcx);
|
RetVoid(bcx);
|
||||||
}
|
}
|
||||||
"min_align_of" => {
|
"min_align_of" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
||||||
Ret(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty) as uint));
|
Ret(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty) as uint));
|
||||||
}
|
}
|
||||||
"pref_align_of"=> {
|
"pref_align_of"=> {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
||||||
Ret(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty) as uint));
|
Ret(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty) as uint));
|
||||||
}
|
}
|
||||||
"get_tydesc" => {
|
"get_tydesc" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let static_ti = get_tydesc(ccx, tp_ty);
|
let static_ti = get_tydesc(ccx, tp_ty);
|
||||||
glue::lazily_emit_visit_glue(ccx, static_ti);
|
glue::lazily_emit_visit_glue(ccx, static_ti);
|
||||||
|
|
||||||
|
@ -328,7 +329,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
"type_id" => {
|
"type_id" => {
|
||||||
let hash = ty::hash_crate_independent(
|
let hash = ty::hash_crate_independent(
|
||||||
ccx.tcx,
|
ccx.tcx,
|
||||||
substs.tys[0],
|
*substs.tys.get(0),
|
||||||
&ccx.link_meta.crate_hash);
|
&ccx.link_meta.crate_hash);
|
||||||
// NB: This needs to be kept in lockstep with the TypeId struct in
|
// NB: This needs to be kept in lockstep with the TypeId struct in
|
||||||
// libstd/unstable/intrinsics.rs
|
// libstd/unstable/intrinsics.rs
|
||||||
|
@ -342,7 +343,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"init" => {
|
"init" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
let lltp_ty = type_of::type_of(ccx, tp_ty);
|
||||||
match bcx.fcx.llretptr.get() {
|
match bcx.fcx.llretptr.get() {
|
||||||
Some(ptr) => { Store(bcx, C_null(lltp_ty), ptr); RetVoid(bcx); }
|
Some(ptr) => { Store(bcx, C_null(lltp_ty), ptr); RetVoid(bcx); }
|
||||||
|
@ -352,7 +353,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
}
|
}
|
||||||
"uninit" => {
|
"uninit" => {
|
||||||
// Do nothing, this is effectively a no-op
|
// Do nothing, this is effectively a no-op
|
||||||
let retty = substs.tys[0];
|
let retty = *substs.tys.get(0);
|
||||||
if type_is_immediate(ccx, retty) && !return_type_is_void(ccx, retty) {
|
if type_is_immediate(ccx, retty) && !return_type_is_void(ccx, retty) {
|
||||||
unsafe {
|
unsafe {
|
||||||
Ret(bcx, lib::llvm::llvm::LLVMGetUndef(type_of(ccx, retty).to_ref()));
|
Ret(bcx, lib::llvm::llvm::LLVMGetUndef(type_of(ccx, retty).to_ref()));
|
||||||
|
@ -365,7 +366,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
RetVoid(bcx);
|
RetVoid(bcx);
|
||||||
}
|
}
|
||||||
"transmute" => {
|
"transmute" => {
|
||||||
let (in_type, out_type) = (substs.tys[0], substs.tys[1]);
|
let (in_type, out_type) = (*substs.tys.get(0), *substs.tys.get(1));
|
||||||
let llintype = type_of::type_of(ccx, in_type);
|
let llintype = type_of::type_of(ccx, in_type);
|
||||||
let llouttype = type_of::type_of(ccx, out_type);
|
let llouttype = type_of::type_of(ccx, out_type);
|
||||||
|
|
||||||
|
@ -432,11 +433,11 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"needs_drop" => {
|
"needs_drop" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
Ret(bcx, C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)));
|
Ret(bcx, C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)));
|
||||||
}
|
}
|
||||||
"owns_managed" => {
|
"owns_managed" => {
|
||||||
let tp_ty = substs.tys[0];
|
let tp_ty = *substs.tys.get(0);
|
||||||
Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).owns_managed()));
|
Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).owns_managed()));
|
||||||
}
|
}
|
||||||
"visit_tydesc" => {
|
"visit_tydesc" => {
|
||||||
|
@ -452,9 +453,11 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
||||||
let lladdr = InBoundsGEP(bcx, ptr, [offset]);
|
let lladdr = InBoundsGEP(bcx, ptr, [offset]);
|
||||||
Ret(bcx, lladdr);
|
Ret(bcx, lladdr);
|
||||||
}
|
}
|
||||||
"copy_nonoverlapping_memory" => copy_intrinsic(bcx, false, substs.tys[0]),
|
"copy_nonoverlapping_memory" => {
|
||||||
"copy_memory" => copy_intrinsic(bcx, true, substs.tys[0]),
|
copy_intrinsic(bcx, false, *substs.tys.get(0))
|
||||||
"set_memory" => memset_intrinsic(bcx, substs.tys[0]),
|
}
|
||||||
|
"copy_memory" => copy_intrinsic(bcx, true, *substs.tys.get(0)),
|
||||||
|
"set_memory" => memset_intrinsic(bcx, *substs.tys.get(0)),
|
||||||
"ctlz8" => count_zeros_intrinsic(bcx, "llvm.ctlz.i8"),
|
"ctlz8" => count_zeros_intrinsic(bcx, "llvm.ctlz.i8"),
|
||||||
"ctlz16" => count_zeros_intrinsic(bcx, "llvm.ctlz.i16"),
|
"ctlz16" => count_zeros_intrinsic(bcx, "llvm.ctlz.i16"),
|
||||||
"ctlz32" => count_zeros_intrinsic(bcx, "llvm.ctlz.i32"),
|
"ctlz32" => count_zeros_intrinsic(bcx, "llvm.ctlz.i32"),
|
||||||
|
|
|
@ -25,16 +25,16 @@ use middle::trans::expr::{SaveIn, Ignore};
|
||||||
use middle::trans::expr;
|
use middle::trans::expr;
|
||||||
use middle::trans::glue;
|
use middle::trans::glue;
|
||||||
use middle::trans::monomorphize;
|
use middle::trans::monomorphize;
|
||||||
|
use middle::trans::type_::Type;
|
||||||
use middle::trans::type_of::*;
|
use middle::trans::type_of::*;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
use util::common::indenter;
|
use util::common::indenter;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
|
||||||
|
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::{ast, ast_map, visit};
|
use syntax::{ast, ast_map, visit};
|
||||||
|
|
||||||
|
@ -202,18 +202,21 @@ pub fn trans_static_method_callee(bcx: &Block,
|
||||||
let vtbls = ccx.maps.vtable_map.borrow().get().get_copy(&expr_id);
|
let vtbls = ccx.maps.vtable_map.borrow().get().get_copy(&expr_id);
|
||||||
let vtbls = resolve_vtables_in_fn_ctxt(bcx.fcx, vtbls);
|
let vtbls = resolve_vtables_in_fn_ctxt(bcx.fcx, vtbls);
|
||||||
|
|
||||||
match vtbls[bound_index][0] {
|
match vtbls.get(bound_index).get(0) {
|
||||||
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
|
&typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
|
||||||
assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t)));
|
assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t)));
|
||||||
|
|
||||||
let mth_id = method_with_name(ccx, impl_did, mname);
|
let mth_id = method_with_name(ccx, impl_did, mname);
|
||||||
let (callee_substs, callee_origins) =
|
let (callee_substs, callee_origins) =
|
||||||
combine_impl_and_methods_tps(
|
combine_impl_and_methods_tps(
|
||||||
bcx, mth_id, expr_id, false,
|
bcx, mth_id, expr_id, false,
|
||||||
*rcvr_substs, rcvr_origins);
|
rcvr_substs.as_slice(), rcvr_origins);
|
||||||
|
|
||||||
let llfn = trans_fn_ref_with_vtables(bcx, mth_id, expr_id,
|
let llfn = trans_fn_ref_with_vtables(bcx,
|
||||||
false, callee_substs,
|
mth_id,
|
||||||
|
expr_id,
|
||||||
|
false,
|
||||||
|
callee_substs.as_slice(),
|
||||||
Some(callee_origins));
|
Some(callee_origins));
|
||||||
|
|
||||||
let callee_ty = node_id_type(bcx, expr_id);
|
let callee_ty = node_id_type(bcx, expr_id);
|
||||||
|
@ -268,14 +271,14 @@ fn trans_monomorphized_callee<'a>(bcx: &'a Block<'a>,
|
||||||
let (callee_substs, callee_origins) =
|
let (callee_substs, callee_origins) =
|
||||||
combine_impl_and_methods_tps(
|
combine_impl_and_methods_tps(
|
||||||
bcx, mth_id, expr_id, true,
|
bcx, mth_id, expr_id, true,
|
||||||
*rcvr_substs, rcvr_origins);
|
rcvr_substs.as_slice(), rcvr_origins);
|
||||||
|
|
||||||
// translate the function
|
// translate the function
|
||||||
let llfn = trans_fn_ref_with_vtables(bcx,
|
let llfn = trans_fn_ref_with_vtables(bcx,
|
||||||
mth_id,
|
mth_id,
|
||||||
expr_id,
|
expr_id,
|
||||||
true,
|
true,
|
||||||
callee_substs,
|
callee_substs.as_slice(),
|
||||||
Some(callee_origins));
|
Some(callee_origins));
|
||||||
|
|
||||||
Callee { bcx: bcx, data: Fn(llfn) }
|
Callee { bcx: bcx, data: Fn(llfn) }
|
||||||
|
@ -316,7 +319,7 @@ fn combine_impl_and_methods_tps(bcx: &Block,
|
||||||
let node_substs = node_id_type_params(bcx, expr_id, is_method);
|
let node_substs = node_id_type_params(bcx, expr_id, is_method);
|
||||||
debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
|
debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
|
||||||
let ty_substs
|
let ty_substs
|
||||||
= vec_ng::append(rcvr_substs.to_owned(),
|
= vec_ng::append(Vec::from_slice(rcvr_substs),
|
||||||
node_substs.tailn(node_substs.len() - n_m_tps));
|
node_substs.tailn(node_substs.len() - n_m_tps));
|
||||||
debug!("n_m_tps={:?}", n_m_tps);
|
debug!("n_m_tps={:?}", n_m_tps);
|
||||||
debug!("node_substs={:?}", node_substs.repr(ccx.tcx));
|
debug!("node_substs={:?}", node_substs.repr(ccx.tcx));
|
||||||
|
@ -327,10 +330,10 @@ fn combine_impl_and_methods_tps(bcx: &Block,
|
||||||
// exist, in which case we need to make them.
|
// exist, in which case we need to make them.
|
||||||
let r_m_origins = match node_vtables(bcx, expr_id) {
|
let r_m_origins = match node_vtables(bcx, expr_id) {
|
||||||
Some(vt) => vt,
|
Some(vt) => vt,
|
||||||
None => @vec::from_elem(node_substs.len(), @Vec::new())
|
None => @Vec::from_elem(node_substs.len(), @Vec::new())
|
||||||
};
|
};
|
||||||
let vtables
|
let vtables
|
||||||
= @vec_ng::append(rcvr_origins.to_owned(),
|
= @vec_ng::append(Vec::from_slice(rcvr_origins.as_slice()),
|
||||||
r_m_origins.tailn(r_m_origins.len() - n_m_tps));
|
r_m_origins.tailn(r_m_origins.len() - n_m_tps));
|
||||||
|
|
||||||
(ty_substs, vtables)
|
(ty_substs, vtables)
|
||||||
|
@ -460,7 +463,7 @@ pub fn get_vtable(bcx: &Block,
|
||||||
let _icx = push_ctxt("meth::get_vtable");
|
let _icx = push_ctxt("meth::get_vtable");
|
||||||
|
|
||||||
// Check the cache.
|
// Check the cache.
|
||||||
let hash_id = (self_ty, vtable_id(ccx, &origins[0]));
|
let hash_id = (self_ty, vtable_id(ccx, origins.get(0)));
|
||||||
{
|
{
|
||||||
let vtables = ccx.vtables.borrow();
|
let vtables = ccx.vtables.borrow();
|
||||||
match vtables.get().find(&hash_id) {
|
match vtables.get().find(&hash_id) {
|
||||||
|
@ -470,18 +473,25 @@ pub fn get_vtable(bcx: &Block,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not in the cache. Actually build it.
|
// Not in the cache. Actually build it.
|
||||||
let methods = origins.flat_map(|origin| {
|
let mut methods = Vec::new();
|
||||||
|
for origin in origins.iter() {
|
||||||
match *origin {
|
match *origin {
|
||||||
typeck::vtable_static(id, ref substs, sub_vtables) => {
|
typeck::vtable_static(id, ref substs, sub_vtables) => {
|
||||||
emit_vtable_methods(bcx, id, *substs, sub_vtables)
|
let vtable_methods = emit_vtable_methods(bcx,
|
||||||
|
id,
|
||||||
|
substs.as_slice(),
|
||||||
|
sub_vtables);
|
||||||
|
for vtable_method in vtable_methods.move_iter() {
|
||||||
|
methods.push(vtable_method)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => ccx.sess.bug("get_vtable: expected a static origin"),
|
_ => ccx.sess.bug("get_vtable: expected a static origin"),
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
// Generate a destructor for the vtable.
|
// Generate a destructor for the vtable.
|
||||||
let drop_glue = glue::get_drop_glue(ccx, self_ty);
|
let drop_glue = glue::get_drop_glue(ccx, self_ty);
|
||||||
let vtable = make_vtable(ccx, drop_glue, methods);
|
let vtable = make_vtable(ccx, drop_glue, methods.as_slice());
|
||||||
|
|
||||||
let mut vtables = ccx.vtables.borrow_mut();
|
let mut vtables = ccx.vtables.borrow_mut();
|
||||||
vtables.get().insert(hash_id, vtable);
|
vtables.get().insert(hash_id, vtable);
|
||||||
|
@ -501,7 +511,7 @@ pub fn make_vtable(ccx: &CrateContext,
|
||||||
components.push(ptr)
|
components.push(ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
let tbl = C_struct(components, false);
|
let tbl = C_struct(components.as_slice(), false);
|
||||||
let sym = token::gensym("vtable");
|
let sym = token::gensym("vtable");
|
||||||
let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| {
|
let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| {
|
||||||
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
|
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
|
||||||
|
@ -589,7 +599,7 @@ pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
|
||||||
*vtable_map.get().get(&id)
|
*vtable_map.get().get(&id)
|
||||||
};
|
};
|
||||||
let res = resolve_vtables_in_fn_ctxt(bcx.fcx, res);
|
let res = resolve_vtables_in_fn_ctxt(bcx.fcx, res);
|
||||||
res[0]
|
*res.get(0)
|
||||||
};
|
};
|
||||||
let vtable = get_vtable(bcx, v_ty, origins);
|
let vtable = get_vtable(bcx, v_ty, origins);
|
||||||
let llvtabledest = GEPi(bcx, lldest, [0u, abi::trt_field_vtable]);
|
let llvtabledest = GEPi(bcx, lldest, [0u, abi::trt_field_vtable]);
|
||||||
|
|
|
@ -23,6 +23,7 @@ use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::ast_util::local_def;
|
use syntax::ast_util::local_def;
|
||||||
|
@ -51,7 +52,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
||||||
let mut must_cast = false;
|
let mut must_cast = false;
|
||||||
|
|
||||||
let psubsts = @param_substs {
|
let psubsts = @param_substs {
|
||||||
tys: real_substs.tps.to_owned(),
|
tys: real_substs.tps.clone(),
|
||||||
vtables: vtables,
|
vtables: vtables,
|
||||||
self_ty: real_substs.self_ty.clone(),
|
self_ty: real_substs.self_ty.clone(),
|
||||||
self_vtables: self_vtables
|
self_vtables: self_vtables
|
||||||
|
@ -124,7 +125,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
||||||
|
|
||||||
debug!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx));
|
debug!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx));
|
||||||
let mono_ty = match is_static_provided {
|
let mono_ty = match is_static_provided {
|
||||||
None => ty::subst_tps(ccx.tcx, psubsts.tys,
|
None => ty::subst_tps(ccx.tcx, psubsts.tys.as_slice(),
|
||||||
psubsts.self_ty, llitem_ty),
|
psubsts.self_ty, llitem_ty),
|
||||||
Some(num_method_ty_params) => {
|
Some(num_method_ty_params) => {
|
||||||
// Static default methods are a little unfortunate, in
|
// Static default methods are a little unfortunate, in
|
||||||
|
@ -186,7 +187,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
||||||
|
|
||||||
let mk_lldecl = || {
|
let mk_lldecl = || {
|
||||||
let lldecl = decl_internal_rust_fn(ccx, false,
|
let lldecl = decl_internal_rust_fn(ccx, false,
|
||||||
f.sig.inputs,
|
f.sig.inputs.as_slice(),
|
||||||
f.sig.output, s);
|
f.sig.output, s);
|
||||||
let mut monomorphized = ccx.monomorphized.borrow_mut();
|
let mut monomorphized = ccx.monomorphized.borrow_mut();
|
||||||
monomorphized.get().insert(hash_id, lldecl);
|
monomorphized.get().insert(hash_id, lldecl);
|
||||||
|
|
|
@ -28,7 +28,8 @@ use util::ppaux::ty_to_str;
|
||||||
use arena::TypedArena;
|
use arena::TypedArena;
|
||||||
use std::libc::c_uint;
|
use std::libc::c_uint;
|
||||||
use std::option::{Some,None};
|
use std::option::{Some,None};
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast::DefId;
|
use syntax::ast::DefId;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
|
@ -93,12 +94,12 @@ impl<'a> Reflector<'a> {
|
||||||
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
|
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
|
||||||
let fcx = self.bcx.fcx;
|
let fcx = self.bcx.fcx;
|
||||||
let tcx = self.bcx.tcx();
|
let tcx = self.bcx.tcx();
|
||||||
let mth_idx = ty::method_idx(
|
let mth_idx = ty::method_idx(token::str_to_ident(~"visit_" + ty_name),
|
||||||
token::str_to_ident(~"visit_" + ty_name),
|
self.visitor_methods.as_slice()).expect(
|
||||||
*self.visitor_methods).expect(format!("couldn't find visit method \
|
format!("couldn't find visit method for {}", ty_name));
|
||||||
for {}", ty_name));
|
|
||||||
let mth_ty =
|
let mth_ty =
|
||||||
ty::mk_bare_fn(tcx, self.visitor_methods[mth_idx].fty.clone());
|
ty::mk_bare_fn(tcx,
|
||||||
|
self.visitor_methods.get(mth_idx).fty.clone());
|
||||||
let v = self.visitor_val;
|
let v = self.visitor_val;
|
||||||
debug!("passing {} args:", args.len());
|
debug!("passing {} args:", args.len());
|
||||||
let mut bcx = self.bcx;
|
let mut bcx = self.bcx;
|
||||||
|
@ -134,7 +135,8 @@ impl<'a> Reflector<'a> {
|
||||||
match vstore {
|
match vstore {
|
||||||
ty::vstore_fixed(n) => {
|
ty::vstore_fixed(n) => {
|
||||||
let extra = vec_ng::append(vec!(self.c_uint(n)),
|
let extra = vec_ng::append(vec!(self.c_uint(n)),
|
||||||
self.c_size_and_align(t));
|
self.c_size_and_align(t)
|
||||||
|
.as_slice());
|
||||||
(~"fixed", extra)
|
(~"fixed", extra)
|
||||||
}
|
}
|
||||||
ty::vstore_slice(_) => (~"slice", Vec::new()),
|
ty::vstore_slice(_) => (~"slice", Vec::new()),
|
||||||
|
@ -172,18 +174,18 @@ impl<'a> Reflector<'a> {
|
||||||
|
|
||||||
ty::ty_unboxed_vec(ref mt) => {
|
ty::ty_unboxed_vec(ref mt) => {
|
||||||
let values = self.c_mt(mt);
|
let values = self.c_mt(mt);
|
||||||
self.visit("vec", values)
|
self.visit("vec", values.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Should rename to str_*/vec_*.
|
// Should rename to str_*/vec_*.
|
||||||
ty::ty_str(vst) => {
|
ty::ty_str(vst) => {
|
||||||
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
||||||
self.visit(~"estr_" + name, extra)
|
self.visit(~"estr_" + name, extra.as_slice())
|
||||||
}
|
}
|
||||||
ty::ty_vec(ref mt, vst) => {
|
ty::ty_vec(ref mt, vst) => {
|
||||||
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
||||||
let extra = extra + self.c_mt(mt);
|
let extra = vec_ng::append(extra, self.c_mt(mt).as_slice());
|
||||||
self.visit(~"evec_" + name, extra)
|
self.visit(~"evec_" + name, extra.as_slice())
|
||||||
}
|
}
|
||||||
// Should remove mt from box and uniq.
|
// Should remove mt from box and uniq.
|
||||||
ty::ty_box(typ) => {
|
ty::ty_box(typ) => {
|
||||||
|
@ -191,31 +193,31 @@ impl<'a> Reflector<'a> {
|
||||||
ty: typ,
|
ty: typ,
|
||||||
mutbl: ast::MutImmutable,
|
mutbl: ast::MutImmutable,
|
||||||
});
|
});
|
||||||
self.visit("box", extra)
|
self.visit("box", extra.as_slice())
|
||||||
}
|
}
|
||||||
ty::ty_uniq(typ) => {
|
ty::ty_uniq(typ) => {
|
||||||
let extra = self.c_mt(&ty::mt {
|
let extra = self.c_mt(&ty::mt {
|
||||||
ty: typ,
|
ty: typ,
|
||||||
mutbl: ast::MutImmutable,
|
mutbl: ast::MutImmutable,
|
||||||
});
|
});
|
||||||
self.visit("uniq", extra)
|
self.visit("uniq", extra.as_slice())
|
||||||
}
|
}
|
||||||
ty::ty_ptr(ref mt) => {
|
ty::ty_ptr(ref mt) => {
|
||||||
let extra = self.c_mt(mt);
|
let extra = self.c_mt(mt);
|
||||||
self.visit("ptr", extra)
|
self.visit("ptr", extra.as_slice())
|
||||||
}
|
}
|
||||||
ty::ty_rptr(_, ref mt) => {
|
ty::ty_rptr(_, ref mt) => {
|
||||||
let extra = self.c_mt(mt);
|
let extra = self.c_mt(mt);
|
||||||
self.visit("rptr", extra)
|
self.visit("rptr", extra.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::ty_tup(ref tys) => {
|
ty::ty_tup(ref tys) => {
|
||||||
let extra = vec!(self.c_uint(tys.len()))
|
let extra = vec_ng::append(vec!(self.c_uint(tys.len())),
|
||||||
+ self.c_size_and_align(t);
|
self.c_size_and_align(t).as_slice());
|
||||||
self.bracketed("tup", extra, |this| {
|
self.bracketed("tup", extra.as_slice(), |this| {
|
||||||
for (i, t) in tys.iter().enumerate() {
|
for (i, t) in tys.iter().enumerate() {
|
||||||
let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
|
let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
|
||||||
this.visit("tup_field", extra);
|
this.visit("tup_field", extra.as_slice());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -230,9 +232,9 @@ impl<'a> Reflector<'a> {
|
||||||
self.c_uint(sigilval),
|
self.c_uint(sigilval),
|
||||||
self.c_uint(fty.sig.inputs.len()),
|
self.c_uint(fty.sig.inputs.len()),
|
||||||
self.c_uint(retval));
|
self.c_uint(retval));
|
||||||
self.visit("enter_fn", extra);
|
self.visit("enter_fn", extra.as_slice());
|
||||||
self.visit_sig(retval, &fty.sig);
|
self.visit_sig(retval, &fty.sig);
|
||||||
self.visit("leave_fn", extra);
|
self.visit("leave_fn", extra.as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME (#2594): fetch constants out of intrinsic:: for the
|
// FIXME (#2594): fetch constants out of intrinsic:: for the
|
||||||
|
@ -245,33 +247,33 @@ impl<'a> Reflector<'a> {
|
||||||
self.c_uint(sigilval),
|
self.c_uint(sigilval),
|
||||||
self.c_uint(fty.sig.inputs.len()),
|
self.c_uint(fty.sig.inputs.len()),
|
||||||
self.c_uint(retval));
|
self.c_uint(retval));
|
||||||
self.visit("enter_fn", extra);
|
self.visit("enter_fn", extra.as_slice());
|
||||||
self.visit_sig(retval, &fty.sig);
|
self.visit_sig(retval, &fty.sig);
|
||||||
self.visit("leave_fn", extra);
|
self.visit("leave_fn", extra.as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::ty_struct(did, ref substs) => {
|
ty::ty_struct(did, ref substs) => {
|
||||||
let fields = ty::struct_fields(tcx, did, substs);
|
let fields = ty::struct_fields(tcx, did, substs);
|
||||||
let mut named_fields = false;
|
let mut named_fields = false;
|
||||||
if !fields.is_empty() {
|
if !fields.is_empty() {
|
||||||
named_fields =
|
named_fields = fields.get(0).ident.name !=
|
||||||
fields[0].ident.name != special_idents::unnamed_field.name;
|
special_idents::unnamed_field.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
let extra = vec!(
|
let extra = vec_ng::append(vec!(
|
||||||
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
|
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
|
||||||
t))),
|
t))),
|
||||||
self.c_bool(named_fields),
|
self.c_bool(named_fields),
|
||||||
self.c_uint(fields.len())
|
self.c_uint(fields.len())
|
||||||
) + self.c_size_and_align(t);
|
), self.c_size_and_align(t).as_slice());
|
||||||
self.bracketed("class", extra, |this| {
|
self.bracketed("class", extra.as_slice(), |this| {
|
||||||
for (i, field) in fields.iter().enumerate() {
|
for (i, field) in fields.iter().enumerate() {
|
||||||
let extra = vec!(
|
let extra = vec_ng::append(vec!(
|
||||||
this.c_uint(i),
|
this.c_uint(i),
|
||||||
this.c_slice(token::get_ident(field.ident)),
|
this.c_slice(token::get_ident(field.ident)),
|
||||||
this.c_bool(named_fields)
|
this.c_bool(named_fields)
|
||||||
) + this.c_mt(&field.mt);
|
), this.c_mt(&field.mt).as_slice());
|
||||||
this.visit("class_field", extra);
|
this.visit("class_field", extra.as_slice());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -319,16 +321,20 @@ impl<'a> Reflector<'a> {
|
||||||
llfdecl
|
llfdecl
|
||||||
};
|
};
|
||||||
|
|
||||||
let enum_args = vec!(self.c_uint(variants.len()), make_get_disr())
|
let enum_args = vec_ng::append(vec!(self.c_uint(variants.len()),
|
||||||
+ self.c_size_and_align(t);
|
make_get_disr()),
|
||||||
self.bracketed("enum", enum_args, |this| {
|
self.c_size_and_align(t)
|
||||||
|
.as_slice());
|
||||||
|
self.bracketed("enum", enum_args.as_slice(), |this| {
|
||||||
for (i, v) in variants.iter().enumerate() {
|
for (i, v) in variants.iter().enumerate() {
|
||||||
let name = token::get_ident(v.name);
|
let name = token::get_ident(v.name);
|
||||||
let variant_args = vec!(this.c_uint(i),
|
let variant_args = vec!(this.c_uint(i),
|
||||||
C_u64(v.disr_val),
|
C_u64(v.disr_val),
|
||||||
this.c_uint(v.args.len()),
|
this.c_uint(v.args.len()),
|
||||||
this.c_slice(name));
|
this.c_slice(name));
|
||||||
this.bracketed("enum_variant", variant_args, |this| {
|
this.bracketed("enum_variant",
|
||||||
|
variant_args.as_slice(),
|
||||||
|
|this| {
|
||||||
for (j, a) in v.args.iter().enumerate() {
|
for (j, a) in v.args.iter().enumerate() {
|
||||||
let bcx = this.bcx;
|
let bcx = this.bcx;
|
||||||
let null = C_null(llptrty);
|
let null = C_null(llptrty);
|
||||||
|
@ -337,7 +343,8 @@ impl<'a> Reflector<'a> {
|
||||||
let field_args = vec!(this.c_uint(j),
|
let field_args = vec!(this.c_uint(j),
|
||||||
offset,
|
offset,
|
||||||
this.c_tydesc(*a));
|
this.c_tydesc(*a));
|
||||||
this.visit("enum_variant_field", field_args);
|
this.visit("enum_variant_field",
|
||||||
|
field_args.as_slice());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -356,7 +363,7 @@ impl<'a> Reflector<'a> {
|
||||||
ty::ty_err => self.leaf("err"),
|
ty::ty_err => self.leaf("err"),
|
||||||
ty::ty_param(ref p) => {
|
ty::ty_param(ref p) => {
|
||||||
let extra = vec!(self.c_uint(p.idx));
|
let extra = vec!(self.c_uint(p.idx));
|
||||||
self.visit("param", extra)
|
self.visit("param", extra.as_slice())
|
||||||
}
|
}
|
||||||
ty::ty_self(..) => self.leaf("self")
|
ty::ty_self(..) => self.leaf("self")
|
||||||
}
|
}
|
||||||
|
@ -368,12 +375,12 @@ impl<'a> Reflector<'a> {
|
||||||
let extra = vec!(self.c_uint(i),
|
let extra = vec!(self.c_uint(i),
|
||||||
self.c_uint(modeval),
|
self.c_uint(modeval),
|
||||||
self.c_tydesc(*arg));
|
self.c_tydesc(*arg));
|
||||||
self.visit("fn_input", extra);
|
self.visit("fn_input", extra.as_slice());
|
||||||
}
|
}
|
||||||
let extra = vec!(self.c_uint(retval),
|
let extra = vec!(self.c_uint(retval),
|
||||||
self.c_bool(sig.variadic),
|
self.c_bool(sig.variadic),
|
||||||
self.c_tydesc(sig.output));
|
self.c_tydesc(sig.output));
|
||||||
self.visit("fn_output", extra);
|
self.visit("fn_output", extra.as_slice());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,8 +20,9 @@ use syntax::ast;
|
||||||
use syntax::abi::{Architecture, X86, X86_64, Arm, Mips};
|
use syntax::abi::{Architecture, X86, X86_64, Arm, Mips};
|
||||||
|
|
||||||
use std::c_str::ToCStr;
|
use std::c_str::ToCStr;
|
||||||
use std::vec;
|
|
||||||
use std::cast;
|
use std::cast;
|
||||||
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
use std::libc::{c_uint};
|
use std::libc::{c_uint};
|
||||||
|
|
||||||
|
@ -301,8 +302,8 @@ impl Type {
|
||||||
if n_elts == 0 {
|
if n_elts == 0 {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
let mut elts = vec::from_elem(n_elts, 0 as TypeRef);
|
let mut elts = Vec::from_elem(n_elts, 0 as TypeRef);
|
||||||
llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]);
|
llvm::LLVMGetStructElementTypes(self.to_ref(), elts.get_mut(0));
|
||||||
cast::transmute(elts)
|
cast::transmute(elts)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -314,7 +315,7 @@ impl Type {
|
||||||
pub fn func_params(&self) -> Vec<Type> {
|
pub fn func_params(&self) -> Vec<Type> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint;
|
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint;
|
||||||
let args = vec::from_elem(n_args, 0 as TypeRef);
|
let args = Vec::from_elem(n_args, 0 as TypeRef);
|
||||||
llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr());
|
llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr());
|
||||||
cast::transmute(args)
|
cast::transmute(args)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ use util::ppaux::Repr;
|
||||||
|
|
||||||
use middle::trans::type_::Type;
|
use middle::trans::type_::Type;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::opt_vec;
|
use syntax::opt_vec;
|
||||||
|
|
||||||
|
@ -62,9 +63,9 @@ pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
|
||||||
|
|
||||||
// Use the output as the actual return value if it's immediate.
|
// Use the output as the actual return value if it's immediate.
|
||||||
if use_out_pointer || return_type_is_void(cx, output) {
|
if use_out_pointer || return_type_is_void(cx, output) {
|
||||||
Type::func(atys, &Type::void())
|
Type::func(atys.as_slice(), &Type::void())
|
||||||
} else {
|
} else {
|
||||||
Type::func(atys, &lloutputtype)
|
Type::func(atys.as_slice(), &lloutputtype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,11 +73,14 @@ pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
|
||||||
pub fn type_of_fn_from_ty(cx: &CrateContext, fty: ty::t) -> Type {
|
pub fn type_of_fn_from_ty(cx: &CrateContext, fty: ty::t) -> Type {
|
||||||
match ty::get(fty).sty {
|
match ty::get(fty).sty {
|
||||||
ty::ty_closure(ref f) => {
|
ty::ty_closure(ref f) => {
|
||||||
type_of_rust_fn(cx, true, f.sig.inputs, f.sig.output)
|
type_of_rust_fn(cx, true, f.sig.inputs.as_slice(), f.sig.output)
|
||||||
}
|
}
|
||||||
ty::ty_bare_fn(ref f) => {
|
ty::ty_bare_fn(ref f) => {
|
||||||
if f.abis.is_rust() || f.abis.is_intrinsic() {
|
if f.abis.is_rust() || f.abis.is_intrinsic() {
|
||||||
type_of_rust_fn(cx, false, f.sig.inputs, f.sig.output)
|
type_of_rust_fn(cx,
|
||||||
|
false,
|
||||||
|
f.sig.inputs.as_slice(),
|
||||||
|
f.sig.output)
|
||||||
} else {
|
} else {
|
||||||
foreign::lltype_for_foreign_fn(cx, fty)
|
foreign::lltype_for_foreign_fn(cx, fty)
|
||||||
}
|
}
|
||||||
|
@ -216,7 +220,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
|
||||||
// avoids creating more than one copy of the enum when one
|
// avoids creating more than one copy of the enum when one
|
||||||
// of the enum's variants refers to the enum itself.
|
// of the enum's variants refers to the enum itself.
|
||||||
let repr = adt::represent_type(cx, t);
|
let repr = adt::represent_type(cx, t);
|
||||||
let name = llvm_type_name(cx, an_enum, did, substs.tps);
|
let name = llvm_type_name(cx, an_enum, did, substs.tps.as_slice());
|
||||||
adt::incomplete_type_of(cx, repr, name)
|
adt::incomplete_type_of(cx, repr, name)
|
||||||
}
|
}
|
||||||
ty::ty_box(typ) => {
|
ty::ty_box(typ) => {
|
||||||
|
@ -277,7 +281,10 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
|
||||||
// in *after* placing it into the type cache. This prevents
|
// in *after* placing it into the type cache. This prevents
|
||||||
// infinite recursion with recursive struct types.
|
// infinite recursion with recursive struct types.
|
||||||
let repr = adt::represent_type(cx, t);
|
let repr = adt::represent_type(cx, t);
|
||||||
let name = llvm_type_name(cx, a_struct, did, substs.tps);
|
let name = llvm_type_name(cx,
|
||||||
|
a_struct,
|
||||||
|
did,
|
||||||
|
substs.tps.as_slice());
|
||||||
adt::incomplete_type_of(cx, repr, name)
|
adt::incomplete_type_of(cx, repr, name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,8 @@ use std::fmt;
|
||||||
use std::hash::{Hash, sip};
|
use std::hash::{Hash, sip};
|
||||||
use std::ops;
|
use std::ops;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::ast_util::{is_local, lit_is_str};
|
use syntax::ast_util::{is_local, lit_is_str};
|
||||||
|
@ -279,7 +280,7 @@ pub struct ctxt_ {
|
||||||
// of this node. This only applies to nodes that refer to entities
|
// of this node. This only applies to nodes that refer to entities
|
||||||
// parameterized by type parameters, such as generic fns, types, or
|
// parameterized by type parameters, such as generic fns, types, or
|
||||||
// other items.
|
// other items.
|
||||||
node_type_substs: RefCell<NodeMap<vec!(t)>>,
|
node_type_substs: RefCell<NodeMap<Vec<t>>>,
|
||||||
|
|
||||||
// Maps from a method to the method "descriptor"
|
// Maps from a method to the method "descriptor"
|
||||||
methods: RefCell<DefIdMap<@Method>>,
|
methods: RefCell<DefIdMap<@Method>>,
|
||||||
|
@ -460,7 +461,7 @@ pub struct ClosureTy {
|
||||||
#[deriving(Clone, Eq, Hash)]
|
#[deriving(Clone, Eq, Hash)]
|
||||||
pub struct FnSig {
|
pub struct FnSig {
|
||||||
binder_id: ast::NodeId,
|
binder_id: ast::NodeId,
|
||||||
inputs: vec!(t),
|
inputs: Vec<t>,
|
||||||
output: t,
|
output: t,
|
||||||
variadic: bool
|
variadic: bool
|
||||||
}
|
}
|
||||||
|
@ -683,7 +684,7 @@ pub enum RegionSubsts {
|
||||||
#[deriving(Clone, Eq, Hash)]
|
#[deriving(Clone, Eq, Hash)]
|
||||||
pub struct substs {
|
pub struct substs {
|
||||||
self_ty: Option<ty::t>,
|
self_ty: Option<ty::t>,
|
||||||
tps: vec!(t),
|
tps: Vec<t>,
|
||||||
regions: RegionSubsts,
|
regions: RegionSubsts,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -755,7 +756,7 @@ pub enum sty {
|
||||||
ty_closure(ClosureTy),
|
ty_closure(ClosureTy),
|
||||||
ty_trait(DefId, substs, TraitStore, ast::Mutability, BuiltinBounds),
|
ty_trait(DefId, substs, TraitStore, ast::Mutability, BuiltinBounds),
|
||||||
ty_struct(DefId, substs),
|
ty_struct(DefId, substs),
|
||||||
ty_tup(vec!(t)),
|
ty_tup(Vec<t>),
|
||||||
|
|
||||||
ty_param(param_ty), // type parameter
|
ty_param(param_ty), // type parameter
|
||||||
ty_self(DefId), /* special, implicit `self` type parameter;
|
ty_self(DefId), /* special, implicit `self` type parameter;
|
||||||
|
@ -1410,7 +1411,7 @@ pub fn mk_mut_unboxed_vec(cx: ctxt, ty: t) -> t {
|
||||||
mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable}))
|
mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable}))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_tup(cx: ctxt, ts: vec!(t)) -> t { mk_t(cx, ty_tup(ts)) }
|
pub fn mk_tup(cx: ctxt, ts: Vec<t>) -> t { mk_t(cx, ty_tup(ts)) }
|
||||||
|
|
||||||
pub fn mk_closure(cx: ctxt, fty: ClosureTy) -> t {
|
pub fn mk_closure(cx: ctxt, fty: ClosureTy) -> t {
|
||||||
mk_t(cx, ty_closure(fty))
|
mk_t(cx, ty_closure(fty))
|
||||||
|
@ -1431,7 +1432,7 @@ pub fn mk_ctor_fn(cx: ctxt,
|
||||||
abis: AbiSet::Rust(),
|
abis: AbiSet::Rust(),
|
||||||
sig: FnSig {
|
sig: FnSig {
|
||||||
binder_id: binder_id,
|
binder_id: binder_id,
|
||||||
inputs: input_args,
|
inputs: Vec::from_slice(input_args),
|
||||||
output: output,
|
output: output,
|
||||||
variadic: false
|
variadic: false
|
||||||
}
|
}
|
||||||
|
@ -1665,7 +1666,7 @@ pub fn simd_type(cx: ctxt, ty: t) -> t {
|
||||||
match get(ty).sty {
|
match get(ty).sty {
|
||||||
ty_struct(did, ref substs) => {
|
ty_struct(did, ref substs) => {
|
||||||
let fields = lookup_struct_fields(cx, did);
|
let fields = lookup_struct_fields(cx, did);
|
||||||
lookup_field_type(cx, did, fields[0].id, substs)
|
lookup_field_type(cx, did, fields.get(0).id, substs)
|
||||||
}
|
}
|
||||||
_ => fail!("simd_type called on invalid type")
|
_ => fail!("simd_type called on invalid type")
|
||||||
}
|
}
|
||||||
|
@ -1683,7 +1684,7 @@ pub fn simd_size(cx: ctxt, ty: t) -> uint {
|
||||||
|
|
||||||
pub fn get_element_type(ty: t, i: uint) -> t {
|
pub fn get_element_type(ty: t, i: uint) -> t {
|
||||||
match get(ty).sty {
|
match get(ty).sty {
|
||||||
ty_tup(ref ts) => return ts[i],
|
ty_tup(ref ts) => return *ts.get(i),
|
||||||
_ => fail!("get_element_type called on invalid type")
|
_ => fail!("get_element_type called on invalid type")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2196,7 +2197,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
ty_struct(did, ref substs) => {
|
ty_struct(did, ref substs) => {
|
||||||
let flds = struct_fields(cx, did, substs);
|
let flds = struct_fields(cx, did, substs);
|
||||||
let mut res =
|
let mut res =
|
||||||
TypeContents::union(flds, |f| tc_mt(cx, f.mt, cache));
|
TypeContents::union(flds.as_slice(),
|
||||||
|
|f| tc_mt(cx, f.mt, cache));
|
||||||
if ty::has_dtor(cx, did) {
|
if ty::has_dtor(cx, did) {
|
||||||
res = res | TC::OwnsDtor;
|
res = res | TC::OwnsDtor;
|
||||||
}
|
}
|
||||||
|
@ -2204,14 +2206,16 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
}
|
}
|
||||||
|
|
||||||
ty_tup(ref tys) => {
|
ty_tup(ref tys) => {
|
||||||
TypeContents::union(*tys, |ty| tc_ty(cx, *ty, cache))
|
TypeContents::union(tys.as_slice(),
|
||||||
|
|ty| tc_ty(cx, *ty, cache))
|
||||||
}
|
}
|
||||||
|
|
||||||
ty_enum(did, ref substs) => {
|
ty_enum(did, ref substs) => {
|
||||||
let variants = substd_enum_variants(cx, did, substs);
|
let variants = substd_enum_variants(cx, did, substs);
|
||||||
let res =
|
let res =
|
||||||
TypeContents::union(variants, |variant| {
|
TypeContents::union(variants.as_slice(), |variant| {
|
||||||
TypeContents::union(variant.args, |arg_ty| {
|
TypeContents::union(variant.args.as_slice(),
|
||||||
|
|arg_ty| {
|
||||||
tc_ty(cx, *arg_ty, cache)
|
tc_ty(cx, *arg_ty, cache)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
@ -2231,7 +2235,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
let tp_def = ty_param_defs.get().get(&p.def_id.node);
|
let tp_def = ty_param_defs.get().get(&p.def_id.node);
|
||||||
kind_bounds_to_contents(cx,
|
kind_bounds_to_contents(cx,
|
||||||
tp_def.bounds.builtin_bounds,
|
tp_def.bounds.builtin_bounds,
|
||||||
tp_def.bounds.trait_bounds)
|
tp_def.bounds.trait_bounds.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
ty_self(def_id) => {
|
ty_self(def_id) => {
|
||||||
|
@ -2924,7 +2928,7 @@ pub fn replace_closure_return_type(tcx: ctxt, fn_type: t, ret_type: t) -> t {
|
||||||
|
|
||||||
// Returns a vec of all the input and output types of fty.
|
// Returns a vec of all the input and output types of fty.
|
||||||
pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
|
pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
|
||||||
vec::append_one(sig.inputs.map(|a| *a), sig.output)
|
vec_ng::append_one(sig.inputs.map(|a| *a), sig.output)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Type accessors for AST nodes
|
// Type accessors for AST nodes
|
||||||
|
@ -3211,7 +3215,7 @@ impl AutoRef {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParamsTy {
|
pub struct ParamsTy {
|
||||||
params: vec!(t),
|
params: Vec<t>,
|
||||||
ty: t
|
ty: t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3249,7 +3253,7 @@ pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin)
|
||||||
let trait_type_param_defs =
|
let trait_type_param_defs =
|
||||||
lookup_trait_def(tcx, trt_id).generics.type_param_defs();
|
lookup_trait_def(tcx, trt_id).generics.type_param_defs();
|
||||||
Rc::new(vec_ng::append(
|
Rc::new(vec_ng::append(
|
||||||
trait_type_param_defs.to_owned(),
|
Vec::from_slice(trait_type_param_defs),
|
||||||
ty::trait_method(tcx,
|
ty::trait_method(tcx,
|
||||||
trt_id,
|
trt_id,
|
||||||
n_mth).generics.type_param_defs()))
|
n_mth).generics.type_param_defs()))
|
||||||
|
@ -3430,9 +3434,11 @@ pub fn expr_kind(tcx: ctxt,
|
||||||
None => fail!("no def for place"),
|
None => fail!("no def for place"),
|
||||||
};
|
};
|
||||||
let def_id = ast_util::def_id_of_def(definition);
|
let def_id = ast_util::def_id_of_def(definition);
|
||||||
match tcx.lang_items.items[ExchangeHeapLangItem as uint] {
|
match tcx.lang_items.items.get(ExchangeHeapLangItem as uint) {
|
||||||
Some(item_def_id) if def_id == item_def_id => RvalueDatumExpr,
|
&Some(item_def_id) if def_id == item_def_id => {
|
||||||
Some(_) | None => RvalueDpsExpr,
|
RvalueDatumExpr
|
||||||
|
}
|
||||||
|
&Some(_) | &None => RvalueDpsExpr,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3829,7 +3835,7 @@ fn lookup_locally_or_in_crate_store<V:Clone>(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trait_method(cx: ctxt, trait_did: ast::DefId, idx: uint) -> @Method {
|
pub fn trait_method(cx: ctxt, trait_did: ast::DefId, idx: uint) -> @Method {
|
||||||
let method_def_id = ty::trait_method_def_ids(cx, trait_did)[idx];
|
let method_def_id = *ty::trait_method_def_ids(cx, trait_did).get(idx);
|
||||||
ty::method(cx, method_def_id)
|
ty::method(cx, method_def_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3932,7 +3938,7 @@ pub fn ty_to_def_id(ty: t) -> Option<ast::DefId> {
|
||||||
// Enum information
|
// Enum information
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
pub struct VariantInfo {
|
pub struct VariantInfo {
|
||||||
args: vec!(t),
|
args: Vec<t>,
|
||||||
arg_names: Option<Vec<ast::Ident> >,
|
arg_names: Option<Vec<ast::Ident> >,
|
||||||
ctor_ty: t,
|
ctor_ty: t,
|
||||||
name: ast::Ident,
|
name: ast::Ident,
|
||||||
|
@ -3953,7 +3959,11 @@ impl VariantInfo {
|
||||||
|
|
||||||
match ast_variant.node.kind {
|
match ast_variant.node.kind {
|
||||||
ast::TupleVariantKind(ref args) => {
|
ast::TupleVariantKind(ref args) => {
|
||||||
let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).map(|a| *a) } else { Vec::new() };
|
let arg_tys = if args.len() > 0 {
|
||||||
|
ty_fn_args(ctor_ty).map(|a| *a)
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
return VariantInfo {
|
return VariantInfo {
|
||||||
args: arg_tys,
|
args: arg_tys,
|
||||||
|
@ -3972,13 +3982,13 @@ impl VariantInfo {
|
||||||
assert!(fields.len() > 0);
|
assert!(fields.len() > 0);
|
||||||
|
|
||||||
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
|
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
|
||||||
let arg_names = fields.map(|field| {
|
let arg_names = fields.iter().map(|field| {
|
||||||
match field.node.kind {
|
match field.node.kind {
|
||||||
NamedField(ident, _) => ident,
|
NamedField(ident, _) => ident,
|
||||||
UnnamedField => cx.sess.bug(
|
UnnamedField => cx.sess.bug(
|
||||||
"enum_variants: all fields in struct must have a name")
|
"enum_variants: all fields in struct must have a name")
|
||||||
}
|
}
|
||||||
});
|
}).collect();
|
||||||
|
|
||||||
return VariantInfo {
|
return VariantInfo {
|
||||||
args: arg_tys,
|
args: arg_tys,
|
||||||
|
@ -4168,8 +4178,10 @@ pub fn enum_variant_with_id(cx: ctxt,
|
||||||
let variants = enum_variants(cx, enum_id);
|
let variants = enum_variants(cx, enum_id);
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < variants.len() {
|
while i < variants.len() {
|
||||||
let variant = variants[i];
|
let variant = *variants.get(i);
|
||||||
if variant.id == variant_id { return variant; }
|
if variant.id == variant_id {
|
||||||
|
return variant
|
||||||
|
}
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
cx.sess.bug("enum_variant_with_id(): no variant exists with that ID");
|
cx.sess.bug("enum_variant_with_id(): no variant exists with that ID");
|
||||||
|
@ -4341,7 +4353,7 @@ pub fn lookup_struct_field(cx: ctxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
|
fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
|
||||||
fields.map(|field| {
|
fields.iter().map(|field| {
|
||||||
match field.node.kind {
|
match field.node.kind {
|
||||||
NamedField(ident, visibility) => {
|
NamedField(ident, visibility) => {
|
||||||
field_ty {
|
field_ty {
|
||||||
|
@ -4358,7 +4370,7 @@ fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns a list of fields corresponding to the struct's items. trans uses
|
// Returns a list of fields corresponding to the struct's items. trans uses
|
||||||
|
@ -4450,7 +4462,7 @@ pub fn is_binopable(cx: ctxt, ty: t, op: ast::BinOp) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> Vec<t> {
|
pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> Vec<t> {
|
||||||
vec::from_fn(generics.ty_params.len(), |i| {
|
Vec::from_fn(generics.ty_params.len(), |i| {
|
||||||
let id = generics.ty_params.get(i).id;
|
let id = generics.ty_params.get(i).id;
|
||||||
ty::mk_param(tcx, i, ast_util::local_def(id))
|
ty::mk_param(tcx, i, ast_util::local_def(id))
|
||||||
})
|
})
|
||||||
|
@ -4502,7 +4514,7 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
|
||||||
-> substs {
|
-> substs {
|
||||||
substs { regions: ErasedRegions,
|
substs { regions: ErasedRegions,
|
||||||
self_ty: ty_fold::fold_opt_ty(self, substs.self_ty),
|
self_ty: ty_fold::fold_opt_ty(self, substs.self_ty),
|
||||||
tps: ty_fold::fold_ty_vec(self, substs.tps) }
|
tps: ty_fold::fold_ty_vec(self, substs.tps.as_slice()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_sig(&mut self,
|
fn fold_sig(&mut self,
|
||||||
|
@ -4510,10 +4522,12 @@ pub fn normalize_ty(cx: ctxt, t: t) -> t {
|
||||||
-> ty::FnSig {
|
-> ty::FnSig {
|
||||||
// The binder-id is only relevant to bound regions, which
|
// The binder-id is only relevant to bound regions, which
|
||||||
// are erased at trans time.
|
// are erased at trans time.
|
||||||
ty::FnSig { binder_id: ast::DUMMY_NODE_ID,
|
ty::FnSig {
|
||||||
inputs: ty_fold::fold_ty_vec(self, sig.inputs),
|
binder_id: ast::DUMMY_NODE_ID,
|
||||||
|
inputs: ty_fold::fold_ty_vec(self, sig.inputs.as_slice()),
|
||||||
output: self.fold_ty(sig.output),
|
output: self.fold_ty(sig.output),
|
||||||
variadic: sig.variadic }
|
variadic: sig.variadic,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4615,14 +4629,15 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
|
||||||
// Add the given trait ty to the hash map
|
// Add the given trait ty to the hash map
|
||||||
while i < trait_refs.len() {
|
while i < trait_refs.len() {
|
||||||
debug!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})",
|
debug!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})",
|
||||||
i, trait_refs[i].repr(tcx));
|
i, trait_refs.get(i).repr(tcx));
|
||||||
|
|
||||||
if !f(trait_refs[i]) {
|
if !f(*trait_refs.get(i)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add supertraits to supertrait_set
|
// Add supertraits to supertrait_set
|
||||||
let supertrait_refs = trait_ref_supertraits(tcx, trait_refs[i]);
|
let supertrait_refs = trait_ref_supertraits(tcx,
|
||||||
|
*trait_refs.get(i));
|
||||||
for &supertrait_ref in supertrait_refs.iter() {
|
for &supertrait_ref in supertrait_refs.iter() {
|
||||||
debug!("each_bound_trait_and_supertraits(supertrait_ref={})",
|
debug!("each_bound_trait_and_supertraits(supertrait_ref={})",
|
||||||
supertrait_ref.repr(tcx));
|
supertrait_ref.repr(tcx));
|
||||||
|
@ -4646,7 +4661,7 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
for type_param_def in type_param_defs.iter() {
|
for type_param_def in type_param_defs.iter() {
|
||||||
each_bound_trait_and_supertraits(
|
each_bound_trait_and_supertraits(
|
||||||
tcx, type_param_def.bounds.trait_bounds, |_| {
|
tcx, type_param_def.bounds.trait_bounds.as_slice(), |_| {
|
||||||
total += 1;
|
total += 1;
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
|
@ -5078,7 +5093,7 @@ pub fn construct_parameter_environment(
|
||||||
let num_item_type_params = item_type_params.len();
|
let num_item_type_params = item_type_params.len();
|
||||||
let num_method_type_params = method_type_params.len();
|
let num_method_type_params = method_type_params.len();
|
||||||
let num_type_params = num_item_type_params + num_method_type_params;
|
let num_type_params = num_item_type_params + num_method_type_params;
|
||||||
let type_params = vec::from_fn(num_type_params, |i| {
|
let type_params = Vec::from_fn(num_type_params, |i| {
|
||||||
let def_id = if i < num_item_type_params {
|
let def_id = if i < num_item_type_params {
|
||||||
item_type_params[i].def_id
|
item_type_params[i].def_id
|
||||||
} else {
|
} else {
|
||||||
|
@ -5106,7 +5121,7 @@ pub fn construct_parameter_environment(
|
||||||
//
|
//
|
||||||
|
|
||||||
let self_bound_substd = self_bound.map(|b| b.subst(tcx, &free_substs));
|
let self_bound_substd = self_bound.map(|b| b.subst(tcx, &free_substs));
|
||||||
let type_param_bounds_substd = vec::from_fn(num_type_params, |i| {
|
let type_param_bounds_substd = Vec::from_fn(num_type_params, |i| {
|
||||||
if i < num_item_type_params {
|
if i < num_item_type_params {
|
||||||
(*item_type_params[i].bounds).subst(tcx, &free_substs)
|
(*item_type_params[i].bounds).subst(tcx, &free_substs)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -13,6 +13,8 @@
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
pub trait TypeFolder {
|
pub trait TypeFolder {
|
||||||
fn tcx(&self) -> ty::ctxt;
|
fn tcx(&self) -> ty::ctxt;
|
||||||
|
|
||||||
|
@ -84,10 +86,8 @@ pub fn fold_opt_ty<T:TypeFolder>(this: &mut T,
|
||||||
t.map(|t| this.fold_ty(t))
|
t.map(|t| this.fold_ty(t))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fold_ty_vec<T:TypeFolder>(this: &mut T,
|
pub fn fold_ty_vec<T:TypeFolder>(this: &mut T, tys: &[ty::t]) -> Vec<ty::t> {
|
||||||
tys: &[ty::t])
|
tys.iter().map(|t| this.fold_ty(*t)).collect()
|
||||||
-> Vec<ty::t> {
|
|
||||||
tys.map(|t| this.fold_ty(*t))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn super_fold_ty<T:TypeFolder>(this: &mut T,
|
pub fn super_fold_ty<T:TypeFolder>(this: &mut T,
|
||||||
|
@ -110,14 +110,14 @@ pub fn super_fold_substs<T:TypeFolder>(this: &mut T,
|
||||||
|
|
||||||
ty::substs { regions: regions,
|
ty::substs { regions: regions,
|
||||||
self_ty: fold_opt_ty(this, substs.self_ty),
|
self_ty: fold_opt_ty(this, substs.self_ty),
|
||||||
tps: fold_ty_vec(this, substs.tps), }
|
tps: fold_ty_vec(this, substs.tps.as_slice()), }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn super_fold_sig<T:TypeFolder>(this: &mut T,
|
pub fn super_fold_sig<T:TypeFolder>(this: &mut T,
|
||||||
sig: &ty::FnSig)
|
sig: &ty::FnSig)
|
||||||
-> ty::FnSig {
|
-> ty::FnSig {
|
||||||
ty::FnSig { binder_id: sig.binder_id,
|
ty::FnSig { binder_id: sig.binder_id,
|
||||||
inputs: fold_ty_vec(this, sig.inputs),
|
inputs: fold_ty_vec(this, sig.inputs.as_slice()),
|
||||||
output: this.fold_ty(sig.output),
|
output: this.fold_ty(sig.output),
|
||||||
variadic: sig.variadic }
|
variadic: sig.variadic }
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,7 @@ pub fn super_fold_sty<T:TypeFolder>(this: &mut T,
|
||||||
bounds)
|
bounds)
|
||||||
}
|
}
|
||||||
ty::ty_tup(ref ts) => {
|
ty::ty_tup(ref ts) => {
|
||||||
ty::ty_tup(fold_ty_vec(this, *ts))
|
ty::ty_tup(fold_ty_vec(this, ts.as_slice()))
|
||||||
}
|
}
|
||||||
ty::ty_bare_fn(ref f) => {
|
ty::ty_bare_fn(ref f) => {
|
||||||
ty::ty_bare_fn(this.fold_bare_fn_ty(f))
|
ty::ty_bare_fn(this.fold_bare_fn_ty(f))
|
||||||
|
|
|
@ -136,7 +136,7 @@ fn opt_ast_region_to_region<AC:AstConv,RS:RegionScope>(
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(rs) => {
|
Ok(rs) => {
|
||||||
rs[0]
|
*rs.get(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -791,7 +791,11 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope>(
|
||||||
let expected_arg_ty = expected_sig.as_ref().and_then(|e| {
|
let expected_arg_ty = expected_sig.as_ref().and_then(|e| {
|
||||||
// no guarantee that the correct number of expected args
|
// no guarantee that the correct number of expected args
|
||||||
// were supplied
|
// were supplied
|
||||||
if i < e.inputs.len() {Some(e.inputs[i])} else {None}
|
if i < e.inputs.len() {
|
||||||
|
Some(*e.inputs.get(i))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
});
|
});
|
||||||
ty_of_arg(this, &rb, a, expected_arg_ty)
|
ty_of_arg(this, &rb, a, expected_arg_ty)
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
|
@ -319,7 +319,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
|
||||||
}
|
}
|
||||||
Some(&(index, ref mut used)) => {
|
Some(&(index, ref mut used)) => {
|
||||||
*used = true;
|
*used = true;
|
||||||
let class_field = class_fields[index];
|
let class_field = *class_fields.get(index);
|
||||||
let field_type = ty::lookup_field_type(tcx,
|
let field_type = ty::lookup_field_type(tcx,
|
||||||
class_id,
|
class_id,
|
||||||
class_field.id,
|
class_field.id,
|
||||||
|
@ -585,7 +585,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
|
||||||
match *s {
|
match *s {
|
||||||
ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => {
|
ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => {
|
||||||
for (i, elt) in elts.iter().enumerate() {
|
for (i, elt) in elts.iter().enumerate() {
|
||||||
check_pat(pcx, *elt, ex_elts[i]);
|
check_pat(pcx, *elt, *ex_elts.get(i));
|
||||||
}
|
}
|
||||||
fcx.write_ty(pat.id, expected);
|
fcx.write_ty(pat.id, expected);
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,7 +100,8 @@ use util::ppaux::Repr;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
use std::result;
|
use std::result;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::ast::{DefId, SelfValue, SelfRegion};
|
use syntax::ast::{DefId, SelfValue, SelfRegion};
|
||||||
use syntax::ast::{SelfUniq, SelfStatic};
|
use syntax::ast::{SelfUniq, SelfStatic};
|
||||||
use syntax::ast::{MutMutable, MutImmutable};
|
use syntax::ast::{MutMutable, MutImmutable};
|
||||||
|
@ -450,7 +451,7 @@ impl<'a> LookupContext<'a> {
|
||||||
self.get_method_index(new_trait_ref, trait_ref, method_num);
|
self.get_method_index(new_trait_ref, trait_ref, method_num);
|
||||||
let mut m = (*m).clone();
|
let mut m = (*m).clone();
|
||||||
// We need to fix up the transformed self type.
|
// We need to fix up the transformed self type.
|
||||||
m.fty.sig.inputs[0] =
|
*m.fty.sig.inputs.get_mut(0) =
|
||||||
self.construct_transformed_self_ty_for_object(
|
self.construct_transformed_self_ty_for_object(
|
||||||
did, &rcvr_substs, &m);
|
did, &rcvr_substs, &m);
|
||||||
|
|
||||||
|
@ -476,7 +477,13 @@ impl<'a> LookupContext<'a> {
|
||||||
param_ty);
|
param_ty);
|
||||||
self.push_inherent_candidates_from_bounds(
|
self.push_inherent_candidates_from_bounds(
|
||||||
rcvr_ty,
|
rcvr_ty,
|
||||||
self.fcx.inh.param_env.type_param_bounds[param_ty.idx].trait_bounds,
|
self.fcx
|
||||||
|
.inh
|
||||||
|
.param_env
|
||||||
|
.type_param_bounds
|
||||||
|
.get(param_ty.idx)
|
||||||
|
.trait_bounds
|
||||||
|
.as_slice(),
|
||||||
restrict_to,
|
restrict_to,
|
||||||
param_numbered(param_ty.idx));
|
param_numbered(param_ty.idx));
|
||||||
}
|
}
|
||||||
|
@ -541,10 +548,9 @@ impl<'a> LookupContext<'a> {
|
||||||
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
|
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
|
||||||
match trait_methods.iter().position(|m| {
|
match trait_methods.iter().position(|m| {
|
||||||
m.explicit_self != ast::SelfStatic &&
|
m.explicit_self != ast::SelfStatic &&
|
||||||
m.ident.name == self.m_name })
|
m.ident.name == self.m_name }) {
|
||||||
{
|
|
||||||
Some(pos) => {
|
Some(pos) => {
|
||||||
let method = trait_methods[pos];
|
let method = *trait_methods.get(pos);
|
||||||
|
|
||||||
match mk_cand(bound_trait_ref, method, pos, this_bound_idx) {
|
match mk_cand(bound_trait_ref, method, pos, this_bound_idx) {
|
||||||
Some(cand) => {
|
Some(cand) => {
|
||||||
|
@ -599,13 +605,16 @@ impl<'a> LookupContext<'a> {
|
||||||
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
|
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
|
||||||
|
|
||||||
let idx = {
|
let idx = {
|
||||||
match impl_info.methods.iter().position(|m| m.ident.name == self.m_name) {
|
match impl_info.methods
|
||||||
|
.iter()
|
||||||
|
.position(|m| m.ident.name == self.m_name) {
|
||||||
Some(idx) => idx,
|
Some(idx) => idx,
|
||||||
None => { return; } // No method with the right name.
|
None => { return; } // No method with the right name.
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let method = ty::method(self.tcx(), impl_info.methods[idx].def_id);
|
let method = ty::method(self.tcx(),
|
||||||
|
impl_info.methods.get(idx).def_id);
|
||||||
|
|
||||||
// determine the `self` of the impl with fresh
|
// determine the `self` of the impl with fresh
|
||||||
// variables for each parameter:
|
// variables for each parameter:
|
||||||
|
@ -899,7 +908,8 @@ impl<'a> LookupContext<'a> {
|
||||||
candidates.iter().map(|c| (*c).clone()).
|
candidates.iter().map(|c| (*c).clone()).
|
||||||
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
|
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
|
||||||
|
|
||||||
let relevant_candidates = self.merge_candidates(relevant_candidates);
|
let relevant_candidates =
|
||||||
|
self.merge_candidates(relevant_candidates.as_slice());
|
||||||
|
|
||||||
if relevant_candidates.len() == 0 {
|
if relevant_candidates.len() == 0 {
|
||||||
return None;
|
return None;
|
||||||
|
@ -914,7 +924,7 @@ impl<'a> LookupContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(self.confirm_candidate(rcvr_ty, &relevant_candidates[0]))
|
Some(self.confirm_candidate(rcvr_ty, relevant_candidates.get(0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_candidates(&self, candidates: &[Candidate]) -> Vec<Candidate> {
|
fn merge_candidates(&self, candidates: &[Candidate]) -> Vec<Candidate> {
|
||||||
|
@ -1004,14 +1014,15 @@ impl<'a> LookupContext<'a> {
|
||||||
parameters given for this method");
|
parameters given for this method");
|
||||||
self.fcx.infcx().next_ty_vars(num_method_tps)
|
self.fcx.infcx().next_ty_vars(num_method_tps)
|
||||||
} else {
|
} else {
|
||||||
self.supplied_tps.to_owned()
|
Vec::from_slice(self.supplied_tps)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Construct the full set of type parameters for the method,
|
// Construct the full set of type parameters for the method,
|
||||||
// which is equal to the class tps + the method tps.
|
// which is equal to the class tps + the method tps.
|
||||||
let all_substs = substs {
|
let all_substs = substs {
|
||||||
tps: vec_ng::append(candidate.rcvr_substs.tps.clone(), m_substs),
|
tps: vec_ng::append(candidate.rcvr_substs.tps.clone(),
|
||||||
|
m_substs.as_slice()),
|
||||||
regions: candidate.rcvr_substs.regions.clone(),
|
regions: candidate.rcvr_substs.regions.clone(),
|
||||||
self_ty: candidate.rcvr_substs.self_ty,
|
self_ty: candidate.rcvr_substs.self_ty,
|
||||||
};
|
};
|
||||||
|
@ -1031,7 +1042,7 @@ impl<'a> LookupContext<'a> {
|
||||||
let args = fn_sig.inputs.slice_from(1).iter().map(|t| {
|
let args = fn_sig.inputs.slice_from(1).iter().map(|t| {
|
||||||
t.subst(tcx, &all_substs)
|
t.subst(tcx, &all_substs)
|
||||||
});
|
});
|
||||||
Some(fn_sig.inputs[0]).move_iter().chain(args).collect()
|
Some(*fn_sig.inputs.get(0)).move_iter().chain(args).collect()
|
||||||
}
|
}
|
||||||
_ => fn_sig.inputs.subst(tcx, &all_substs)
|
_ => fn_sig.inputs.subst(tcx, &all_substs)
|
||||||
};
|
};
|
||||||
|
@ -1050,7 +1061,7 @@ impl<'a> LookupContext<'a> {
|
||||||
self.fcx.infcx().next_region_var(
|
self.fcx.infcx().next_region_var(
|
||||||
infer::BoundRegionInFnCall(self.expr.span, br))
|
infer::BoundRegionInFnCall(self.expr.span, br))
|
||||||
});
|
});
|
||||||
let transformed_self_ty = fn_sig.inputs[0];
|
let transformed_self_ty = *fn_sig.inputs.get(0);
|
||||||
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
|
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
|
||||||
sig: fn_sig,
|
sig: fn_sig,
|
||||||
purity: bare_fn_ty.purity,
|
purity: bare_fn_ty.purity,
|
||||||
|
@ -1118,7 +1129,7 @@ impl<'a> LookupContext<'a> {
|
||||||
ty::mk_err() // error reported in `enforce_object_limitations()`
|
ty::mk_err() // error reported in `enforce_object_limitations()`
|
||||||
}
|
}
|
||||||
ast::SelfRegion(..) | ast::SelfUniq => {
|
ast::SelfRegion(..) | ast::SelfUniq => {
|
||||||
let transformed_self_ty = method_ty.fty.sig.inputs[0];
|
let transformed_self_ty = *method_ty.fty.sig.inputs.get(0);
|
||||||
match ty::get(transformed_self_ty).sty {
|
match ty::get(transformed_self_ty).sty {
|
||||||
ty::ty_rptr(r, mt) => { // must be SelfRegion
|
ty::ty_rptr(r, mt) => { // must be SelfRegion
|
||||||
ty::mk_trait(self.tcx(), trait_def_id,
|
ty::mk_trait(self.tcx(), trait_def_id,
|
||||||
|
|
|
@ -118,6 +118,8 @@ use collections::HashMap;
|
||||||
use std::mem::replace;
|
use std::mem::replace;
|
||||||
use std::result;
|
use std::result;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::ast::{Provided, Required};
|
use syntax::ast::{Provided, Required};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -902,7 +904,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
||||||
bound_region: ty::BrNamed(l.def_id, l.ident)})).
|
bound_region: ty::BrNamed(l.def_id, l.ident)})).
|
||||||
collect();
|
collect();
|
||||||
let dummy_substs = ty::substs {
|
let dummy_substs = ty::substs {
|
||||||
tps: vec_ng::append(dummy_impl_tps, dummy_method_tps),
|
tps: vec_ng::append(dummy_impl_tps, dummy_method_tps.as_slice()),
|
||||||
regions: ty::NonerasedRegions(dummy_impl_regions),
|
regions: ty::NonerasedRegions(dummy_impl_regions),
|
||||||
self_ty: None };
|
self_ty: None };
|
||||||
|
|
||||||
|
@ -929,7 +931,7 @@ fn compare_impl_method(tcx: ty::ctxt,
|
||||||
self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs);
|
self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs);
|
||||||
let substs = substs {
|
let substs = substs {
|
||||||
regions: trait_regions,
|
regions: trait_regions,
|
||||||
tps: vec_ng::append(trait_tps, dummy_method_tps),
|
tps: vec_ng::append(trait_tps, dummy_method_tps.as_slice()),
|
||||||
self_ty: self_ty,
|
self_ty: self_ty,
|
||||||
};
|
};
|
||||||
debug!("trait_fty (pre-subst): {} substs={}",
|
debug!("trait_fty (pre-subst): {} substs={}",
|
||||||
|
@ -988,7 +990,7 @@ impl FnCtxt {
|
||||||
impl RegionScope for infer::InferCtxt {
|
impl RegionScope for infer::InferCtxt {
|
||||||
fn anon_regions(&self, span: Span, count: uint)
|
fn anon_regions(&self, span: Span, count: uint)
|
||||||
-> Result<Vec<ty::Region> , ()> {
|
-> Result<Vec<ty::Region> , ()> {
|
||||||
Ok(vec::from_fn(count, |_| {
|
Ok(Vec::from_fn(count, |_| {
|
||||||
self.next_region_var(infer::MiscVariable(span))
|
self.next_region_var(infer::MiscVariable(span))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -1672,7 +1674,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
let args = args.slice_from(1);
|
let args = args.slice_from(1);
|
||||||
if ty::type_is_error(method_fn_ty) {
|
if ty::type_is_error(method_fn_ty) {
|
||||||
let err_inputs = err_args(args.len());
|
let err_inputs = err_args(args.len());
|
||||||
check_argument_types(fcx, sp, err_inputs, callee_expr,
|
check_argument_types(fcx, sp, err_inputs.as_slice(), callee_expr,
|
||||||
args, deref_args, false);
|
args, deref_args, false);
|
||||||
method_fn_ty
|
method_fn_ty
|
||||||
} else {
|
} else {
|
||||||
|
@ -1713,10 +1715,10 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
let supplied_arg_count = args.len();
|
let supplied_arg_count = args.len();
|
||||||
let expected_arg_count = fn_inputs.len();
|
let expected_arg_count = fn_inputs.len();
|
||||||
let formal_tys = if expected_arg_count == supplied_arg_count {
|
let formal_tys = if expected_arg_count == supplied_arg_count {
|
||||||
fn_inputs.map(|a| *a)
|
fn_inputs.iter().map(|a| *a).collect()
|
||||||
} else if variadic {
|
} else if variadic {
|
||||||
if supplied_arg_count >= expected_arg_count {
|
if supplied_arg_count >= expected_arg_count {
|
||||||
fn_inputs.map(|a| *a)
|
fn_inputs.iter().map(|a| *a).collect()
|
||||||
} else {
|
} else {
|
||||||
let msg = format!(
|
let msg = format!(
|
||||||
"this function takes at least {nexpected, plural, =1{# parameter} \
|
"this function takes at least {nexpected, plural, =1{# parameter} \
|
||||||
|
@ -1782,7 +1784,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
|
|
||||||
if is_block == check_blocks {
|
if is_block == check_blocks {
|
||||||
debug!("checking the argument");
|
debug!("checking the argument");
|
||||||
let mut formal_ty = formal_tys[i];
|
let mut formal_ty = *formal_tys.get(i);
|
||||||
|
|
||||||
match deref_args {
|
match deref_args {
|
||||||
DoDerefArgs => {
|
DoDerefArgs => {
|
||||||
|
@ -1841,7 +1843,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn err_args(len: uint) -> Vec<ty::t> {
|
fn err_args(len: uint) -> Vec<ty::t> {
|
||||||
vec::from_fn(len, |_| ty::mk_err())
|
Vec::from_fn(len, |_| ty::mk_err())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_call(fcx: @FnCtxt, call_expr: &ast::Expr, output: ty::t) {
|
fn write_call(fcx: @FnCtxt, call_expr: &ast::Expr, output: ty::t) {
|
||||||
|
@ -1892,7 +1894,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Call the generic checker.
|
// Call the generic checker.
|
||||||
check_argument_types(fcx, call_expr.span, fn_sig.inputs, f,
|
check_argument_types(fcx, call_expr.span, fn_sig.inputs.as_slice(), f,
|
||||||
args, DontDerefArgs, fn_sig.variadic);
|
args, DontDerefArgs, fn_sig.variadic);
|
||||||
|
|
||||||
write_call(fcx, call_expr, fn_sig.output);
|
write_call(fcx, call_expr, fn_sig.output);
|
||||||
|
@ -2310,7 +2312,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
// field
|
// field
|
||||||
debug!("class named {}", ppaux::ty_to_str(tcx, base_t));
|
debug!("class named {}", ppaux::ty_to_str(tcx, base_t));
|
||||||
let cls_items = ty::lookup_struct_fields(tcx, base_id);
|
let cls_items = ty::lookup_struct_fields(tcx, base_id);
|
||||||
match lookup_field_ty(tcx, base_id, cls_items,
|
match lookup_field_ty(tcx, base_id, cls_items.as_slice(),
|
||||||
field, &(*substs)) {
|
field, &(*substs)) {
|
||||||
Some(field_ty) => {
|
Some(field_ty) => {
|
||||||
// (2) look up what field's type is, and return it
|
// (2) look up what field's type is, and return it
|
||||||
|
@ -2330,7 +2332,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
base,
|
base,
|
||||||
field,
|
field,
|
||||||
expr_t,
|
expr_t,
|
||||||
tps,
|
tps.as_slice(),
|
||||||
DontDerefArgs,
|
DontDerefArgs,
|
||||||
CheckTraitsAndInherentMethods,
|
CheckTraitsAndInherentMethods,
|
||||||
AutoderefReceiver) {
|
AutoderefReceiver) {
|
||||||
|
@ -2484,7 +2486,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
class_id,
|
class_id,
|
||||||
id,
|
id,
|
||||||
substitutions,
|
substitutions,
|
||||||
class_fields,
|
class_fields.as_slice(),
|
||||||
fields,
|
fields,
|
||||||
base_expr.is_none());
|
base_expr.is_none());
|
||||||
if ty::type_is_error(fcx.node_ty(id)) {
|
if ty::type_is_error(fcx.node_ty(id)) {
|
||||||
|
@ -2542,7 +2544,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
variant_id,
|
variant_id,
|
||||||
id,
|
id,
|
||||||
substitutions,
|
substitutions,
|
||||||
variant_fields,
|
variant_fields.as_slice(),
|
||||||
fields,
|
fields,
|
||||||
true);
|
true);
|
||||||
fcx.write_ty(id, enum_type);
|
fcx.write_ty(id, enum_type);
|
||||||
|
@ -2621,18 +2623,21 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
// places: the exchange heap and the managed heap.
|
// places: the exchange heap and the managed heap.
|
||||||
let definition = lookup_def(fcx, path.span, place.id);
|
let definition = lookup_def(fcx, path.span, place.id);
|
||||||
let def_id = ast_util::def_id_of_def(definition);
|
let def_id = ast_util::def_id_of_def(definition);
|
||||||
match tcx.lang_items.items[ExchangeHeapLangItem as uint] {
|
match tcx.lang_items
|
||||||
Some(item_def_id) if def_id == item_def_id => {
|
.items
|
||||||
|
.get(ExchangeHeapLangItem as uint) {
|
||||||
|
&Some(item_def_id) if def_id == item_def_id => {
|
||||||
fcx.write_ty(id, ty::mk_uniq(tcx,
|
fcx.write_ty(id, ty::mk_uniq(tcx,
|
||||||
fcx.expr_ty(subexpr)));
|
fcx.expr_ty(subexpr)));
|
||||||
checked = true
|
checked = true
|
||||||
}
|
}
|
||||||
Some(_) | None => {}
|
&Some(_) | &None => {}
|
||||||
}
|
}
|
||||||
if !checked {
|
if !checked {
|
||||||
match tcx.lang_items
|
match tcx.lang_items
|
||||||
.items[ManagedHeapLangItem as uint] {
|
.items
|
||||||
Some(item_def_id) if def_id == item_def_id => {
|
.get(ManagedHeapLangItem as uint) {
|
||||||
|
&Some(item_def_id) if def_id == item_def_id => {
|
||||||
// Assign the magic `Gc<T>` struct.
|
// Assign the magic `Gc<T>` struct.
|
||||||
let gc_struct_id =
|
let gc_struct_id =
|
||||||
match tcx.lang_items
|
match tcx.lang_items
|
||||||
|
@ -2661,7 +2666,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
fcx.write_ty(id, sty);
|
fcx.write_ty(id, sty);
|
||||||
checked = true
|
checked = true
|
||||||
}
|
}
|
||||||
Some(_) | None => {}
|
&Some(_) | &None => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2750,7 +2755,8 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
ty::ty_struct(did, ref substs) => {
|
ty::ty_struct(did, ref substs) => {
|
||||||
let fields = ty::struct_fields(fcx.tcx(), did, substs);
|
let fields = ty::struct_fields(fcx.tcx(), did, substs);
|
||||||
fields.len() == 1
|
fields.len() == 1
|
||||||
&& fields[0].ident == token::special_idents::unnamed_field
|
&& fields.get(0).ident ==
|
||||||
|
token::special_idents::unnamed_field
|
||||||
}
|
}
|
||||||
_ => false
|
_ => false
|
||||||
};
|
};
|
||||||
|
@ -3129,7 +3135,7 @@ fn check_expr_with_unifier(fcx: @FnCtxt,
|
||||||
|
|
||||||
let elt_ts = elts.iter().enumerate().map(|(i, e)| {
|
let elt_ts = elts.iter().enumerate().map(|(i, e)| {
|
||||||
let opt_hint = match flds {
|
let opt_hint = match flds {
|
||||||
Some(ref fs) if i < fs.len() => Some(fs[i]),
|
Some(ref fs) if i < fs.len() => Some(*fs.get(i)),
|
||||||
_ => None
|
_ => None
|
||||||
};
|
};
|
||||||
check_expr_with_opt_hint(fcx, *e, opt_hint);
|
check_expr_with_opt_hint(fcx, *e, opt_hint);
|
||||||
|
@ -3492,7 +3498,7 @@ pub fn check_simd(tcx: ty::ctxt, sp: Span, id: ast::NodeId) {
|
||||||
tcx.sess.span_err(sp, "SIMD vector cannot be empty");
|
tcx.sess.span_err(sp, "SIMD vector cannot be empty");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let e = ty::lookup_field_type(tcx, did, fields[0].id, substs);
|
let e = ty::lookup_field_type(tcx, did, fields.get(0).id, substs);
|
||||||
if !fields.iter().all(
|
if !fields.iter().all(
|
||||||
|f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {
|
|f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {
|
||||||
tcx.sess.span_err(sp, "SIMD vector should be homogeneous");
|
tcx.sess.span_err(sp, "SIMD vector should be homogeneous");
|
||||||
|
@ -3805,7 +3811,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
|
||||||
.enumerate() {
|
.enumerate() {
|
||||||
match self_parameter_index {
|
match self_parameter_index {
|
||||||
Some(index) if index == i => {
|
Some(index) if index == i => {
|
||||||
tps.push(fcx.infcx().next_ty_vars(1)[0]);
|
tps.push(*fcx.infcx().next_ty_vars(1).get(0));
|
||||||
pushed = true;
|
pushed = true;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -3829,7 +3835,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
|
||||||
for (i, default) in defaults.skip(ty_substs_len).enumerate() {
|
for (i, default) in defaults.skip(ty_substs_len).enumerate() {
|
||||||
match self_parameter_index {
|
match self_parameter_index {
|
||||||
Some(index) if index == i + ty_substs_len => {
|
Some(index) if index == i + ty_substs_len => {
|
||||||
substs.tps.push(fcx.infcx().next_ty_vars(1)[0]);
|
substs.tps.push(*fcx.infcx().next_ty_vars(1).get(0));
|
||||||
pushed = true;
|
pushed = true;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -3848,7 +3854,7 @@ pub fn instantiate_path(fcx: @FnCtxt,
|
||||||
|
|
||||||
// If the self parameter goes at the end, insert it there.
|
// If the self parameter goes at the end, insert it there.
|
||||||
if !pushed && self_parameter_index.is_some() {
|
if !pushed && self_parameter_index.is_some() {
|
||||||
substs.tps.push(fcx.infcx().next_ty_vars(1)[0])
|
substs.tps.push(*fcx.infcx().next_ty_vars(1).get(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(substs.tps.len(), ty_param_count)
|
assert_eq!(substs.tps.len(), ty_param_count)
|
||||||
|
@ -4028,7 +4034,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
|
||||||
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
|
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
|
||||||
|
|
||||||
//We only care about the operation here
|
//We only care about the operation here
|
||||||
match split[1] {
|
match *split.get(1) {
|
||||||
"cxchg" => (1, vec!(ty::mk_mut_rptr(tcx,
|
"cxchg" => (1, vec!(ty::mk_mut_rptr(tcx,
|
||||||
ty::ReLateBound(it.id, ty::BrAnon(0)),
|
ty::ReLateBound(it.id, ty::BrAnon(0)),
|
||||||
param(ccx, 0)),
|
param(ccx, 0)),
|
||||||
|
|
|
@ -13,7 +13,9 @@
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::ty_fold;
|
use middle::ty_fold;
|
||||||
use middle::ty_fold::TypeFolder;
|
use middle::ty_fold::TypeFolder;
|
||||||
|
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
use util::ppaux;
|
use util::ppaux;
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,10 @@ use util::common::indenter;
|
||||||
use util::ppaux;
|
use util::ppaux;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
|
use std::cell::RefCell;
|
||||||
use std::result;
|
use std::result;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
|
@ -100,13 +101,13 @@ fn lookup_vtables(vcx: &VtableContext,
|
||||||
|
|
||||||
// We do this backwards for reasons discussed above.
|
// We do this backwards for reasons discussed above.
|
||||||
assert_eq!(substs.tps.len(), type_param_defs.len());
|
assert_eq!(substs.tps.len(), type_param_defs.len());
|
||||||
let mut result =
|
let mut result: Vec<vtable_param_res> =
|
||||||
substs.tps.rev_iter()
|
substs.tps.rev_iter()
|
||||||
.zip(type_param_defs.rev_iter())
|
.zip(type_param_defs.rev_iter())
|
||||||
.map(|(ty, def)|
|
.map(|(ty, def)|
|
||||||
lookup_vtables_for_param(vcx, location_info, Some(substs),
|
lookup_vtables_for_param(vcx, location_info, Some(substs),
|
||||||
&*def.bounds, *ty, is_early))
|
&*def.bounds, *ty, is_early))
|
||||||
.to_owned_vec();
|
.collect();
|
||||||
result.reverse();
|
result.reverse();
|
||||||
|
|
||||||
assert_eq!(substs.tps.len(), result.len());
|
assert_eq!(substs.tps.len(), result.len());
|
||||||
|
@ -134,7 +135,10 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
|
||||||
// ty is the value supplied for the type parameter A...
|
// ty is the value supplied for the type parameter A...
|
||||||
let mut param_result = Vec::new();
|
let mut param_result = Vec::new();
|
||||||
|
|
||||||
ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds, |trait_ref| {
|
ty::each_bound_trait_and_supertraits(tcx,
|
||||||
|
type_param_bounds.trait_bounds
|
||||||
|
.as_slice(),
|
||||||
|
|trait_ref| {
|
||||||
// ...and here trait_ref is each bound that was declared on A,
|
// ...and here trait_ref is each bound that was declared on A,
|
||||||
// expressed in terms of the type parameters.
|
// expressed in terms of the type parameters.
|
||||||
|
|
||||||
|
@ -252,7 +256,11 @@ fn lookup_vtable(vcx: &VtableContext,
|
||||||
let vtable_opt = match ty::get(ty).sty {
|
let vtable_opt = match ty::get(ty).sty {
|
||||||
ty::ty_param(param_ty {idx: n, ..}) => {
|
ty::ty_param(param_ty {idx: n, ..}) => {
|
||||||
let type_param_bounds: &[@ty::TraitRef] =
|
let type_param_bounds: &[@ty::TraitRef] =
|
||||||
vcx.param_env.type_param_bounds[n].trait_bounds;
|
vcx.param_env
|
||||||
|
.type_param_bounds
|
||||||
|
.get(n)
|
||||||
|
.trait_bounds
|
||||||
|
.as_slice();
|
||||||
lookup_vtable_from_bounds(vcx,
|
lookup_vtable_from_bounds(vcx,
|
||||||
location_info,
|
location_info,
|
||||||
type_param_bounds,
|
type_param_bounds,
|
||||||
|
@ -392,7 +400,7 @@ fn search_for_vtable(vcx: &VtableContext,
|
||||||
// the type self_ty, and substs is bound to [T].
|
// the type self_ty, and substs is bound to [T].
|
||||||
debug!("The self ty is {} and its substs are {}",
|
debug!("The self ty is {} and its substs are {}",
|
||||||
vcx.infcx.ty_to_str(for_ty),
|
vcx.infcx.ty_to_str(for_ty),
|
||||||
vcx.infcx.tys_to_str(substs.tps));
|
vcx.infcx.tys_to_str(substs.tps.as_slice()));
|
||||||
|
|
||||||
// Next, we unify trait_ref -- the type that we want to cast
|
// Next, we unify trait_ref -- the type that we want to cast
|
||||||
// to -- with of_trait_ref -- the trait that im implements. At
|
// to -- with of_trait_ref -- the trait that im implements. At
|
||||||
|
@ -445,7 +453,7 @@ fn search_for_vtable(vcx: &VtableContext,
|
||||||
debug!("The fixed-up substs are {} - \
|
debug!("The fixed-up substs are {} - \
|
||||||
they will be unified with the bounds for \
|
they will be unified with the bounds for \
|
||||||
the target ty, {}",
|
the target ty, {}",
|
||||||
vcx.infcx.tys_to_str(substs_f.tps),
|
vcx.infcx.tys_to_str(substs_f.tps.as_slice()),
|
||||||
vcx.infcx.trait_ref_to_str(trait_ref));
|
vcx.infcx.trait_ref_to_str(trait_ref));
|
||||||
|
|
||||||
// Next, we unify the fixed-up substitutions for the impl self
|
// Next, we unify the fixed-up substitutions for the impl self
|
||||||
|
@ -465,14 +473,14 @@ fn search_for_vtable(vcx: &VtableContext,
|
||||||
|
|
||||||
match found.len() {
|
match found.len() {
|
||||||
0 => { return None }
|
0 => { return None }
|
||||||
1 => return Some(found[0].clone()),
|
1 => return Some(found.get(0).clone()),
|
||||||
_ => {
|
_ => {
|
||||||
if !is_early {
|
if !is_early {
|
||||||
vcx.tcx().sess.span_err(
|
vcx.tcx().sess.span_err(
|
||||||
location_info.span,
|
location_info.span,
|
||||||
"multiple applicable methods in scope");
|
"multiple applicable methods in scope");
|
||||||
}
|
}
|
||||||
return Some(found[0].clone());
|
return Some(found.get(0).clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -701,11 +709,15 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
|
||||||
debug!("vtable resolution on parameter bounds for method call {}",
|
debug!("vtable resolution on parameter bounds for method call {}",
|
||||||
ex.repr(fcx.tcx()));
|
ex.repr(fcx.tcx()));
|
||||||
let type_param_defs = ty::method_call_type_param_defs(cx.tcx, method.origin);
|
let type_param_defs = ty::method_call_type_param_defs(cx.tcx, method.origin);
|
||||||
if has_trait_bounds(*type_param_defs.borrow()) {
|
if has_trait_bounds(type_param_defs.borrow().as_slice()) {
|
||||||
let substs = fcx.method_ty_substs(ex.id);
|
let substs = fcx.method_ty_substs(ex.id);
|
||||||
let vcx = fcx.vtable_context();
|
let vcx = fcx.vtable_context();
|
||||||
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
|
let vtbls = lookup_vtables(&vcx,
|
||||||
*type_param_defs.borrow(), &substs, is_early);
|
&location_info_for_expr(ex),
|
||||||
|
type_param_defs.borrow()
|
||||||
|
.as_slice(),
|
||||||
|
&substs,
|
||||||
|
is_early);
|
||||||
if !is_early {
|
if !is_early {
|
||||||
insert_vtables(fcx, ex.id, vtbls);
|
insert_vtables(fcx, ex.id, vtbls);
|
||||||
}
|
}
|
||||||
|
@ -817,7 +829,7 @@ pub fn trans_resolve_method(tcx: ty::ctxt, id: ast::NodeId,
|
||||||
substs: &ty::substs) -> Option<vtable_res> {
|
substs: &ty::substs) -> Option<vtable_res> {
|
||||||
let generics = ty::lookup_item_type(tcx, ast_util::local_def(id)).generics;
|
let generics = ty::lookup_item_type(tcx, ast_util::local_def(id)).generics;
|
||||||
let type_param_defs = generics.type_param_defs.borrow();
|
let type_param_defs = generics.type_param_defs.borrow();
|
||||||
if has_trait_bounds(*type_param_defs) {
|
if has_trait_bounds(type_param_defs.as_slice()) {
|
||||||
let vcx = VtableContext {
|
let vcx = VtableContext {
|
||||||
infcx: &infer::new_infer_ctxt(tcx),
|
infcx: &infer::new_infer_ctxt(tcx),
|
||||||
param_env: &ty::construct_parameter_environment(tcx, None, [], [], [], id)
|
param_env: &ty::construct_parameter_environment(tcx, None, [], [], [], id)
|
||||||
|
@ -827,7 +839,11 @@ pub fn trans_resolve_method(tcx: ty::ctxt, id: ast::NodeId,
|
||||||
span: tcx.map.span(id)
|
span: tcx.map.span(id)
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(lookup_vtables(&vcx, &loc_info, *type_param_defs, substs, false))
|
Some(lookup_vtables(&vcx,
|
||||||
|
&loc_info,
|
||||||
|
type_param_defs.as_slice(),
|
||||||
|
substs,
|
||||||
|
false))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ use middle::typeck::write_ty_to_tcx;
|
||||||
use util::ppaux;
|
use util::ppaux;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::print::pprust::pat_to_str;
|
use syntax::print::pprust::pat_to_str;
|
||||||
|
@ -54,12 +55,12 @@ fn resolve_type_vars_in_type(fcx: @FnCtxt, sp: Span, typ: ty::t)
|
||||||
|
|
||||||
fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t])
|
fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t])
|
||||||
-> Vec<ty::t> {
|
-> Vec<ty::t> {
|
||||||
tys.map(|t| {
|
tys.iter().map(|t| {
|
||||||
match resolve_type_vars_in_type(fcx, sp, *t) {
|
match resolve_type_vars_in_type(fcx, sp, *t) {
|
||||||
Some(t1) => t1,
|
Some(t1) => t1,
|
||||||
None => ty::mk_err()
|
None => ty::mk_err()
|
||||||
}
|
}
|
||||||
})
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) {
|
fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) {
|
||||||
|
@ -122,7 +123,9 @@ fn resolve_vtable_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) {
|
||||||
origin: &vtable_origin) -> vtable_origin {
|
origin: &vtable_origin) -> vtable_origin {
|
||||||
match origin {
|
match origin {
|
||||||
&vtable_static(def_id, ref tys, origins) => {
|
&vtable_static(def_id, ref tys, origins) => {
|
||||||
let r_tys = resolve_type_vars_in_types(fcx, sp, *tys);
|
let r_tys = resolve_type_vars_in_types(fcx,
|
||||||
|
sp,
|
||||||
|
tys.as_slice());
|
||||||
let r_origins = resolve_origins(fcx, sp, origins);
|
let r_origins = resolve_origins(fcx, sp, origins);
|
||||||
vtable_static(def_id, r_tys, r_origins)
|
vtable_static(def_id, r_tys, r_origins)
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,10 +46,11 @@ use syntax::opt_vec;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::visit;
|
use syntax::visit;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
|
use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
|
|
||||||
struct UniversalQuantificationResult {
|
struct UniversalQuantificationResult {
|
||||||
monotype: t,
|
monotype: t,
|
||||||
|
@ -355,7 +356,8 @@ impl CoherenceChecker {
|
||||||
let new_generics = ty::Generics {
|
let new_generics = ty::Generics {
|
||||||
type_param_defs:
|
type_param_defs:
|
||||||
Rc::new(vec_ng::append(
|
Rc::new(vec_ng::append(
|
||||||
impl_poly_type.generics.type_param_defs().to_owned(),
|
Vec::from_slice(impl_poly_type.generics
|
||||||
|
.type_param_defs()),
|
||||||
new_method_ty.generics.type_param_defs())),
|
new_method_ty.generics.type_param_defs())),
|
||||||
region_param_defs:
|
region_param_defs:
|
||||||
impl_poly_type.generics.region_param_defs.clone()
|
impl_poly_type.generics.region_param_defs.clone()
|
||||||
|
@ -722,7 +724,7 @@ impl CoherenceChecker {
|
||||||
// We'll error out later. For now, just don't ICE.
|
// We'll error out later. For now, just don't ICE.
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let method_def_id = impl_info.methods[0].def_id;
|
let method_def_id = impl_info.methods.get(0).def_id;
|
||||||
|
|
||||||
let self_type = self.get_self_type_for_implementation(*impl_info);
|
let self_type = self.get_self_type_for_implementation(*impl_info);
|
||||||
match ty::get(self_type.ty).sty {
|
match ty::get(self_type.ty).sty {
|
||||||
|
@ -789,10 +791,10 @@ pub fn make_substs_for_receiver_types(tcx: ty::ctxt,
|
||||||
num_trait_type_parameters + method.generics.type_param_defs().len();
|
num_trait_type_parameters + method.generics.type_param_defs().len();
|
||||||
|
|
||||||
// the new method type will have the type parameters from the impl + method
|
// the new method type will have the type parameters from the impl + method
|
||||||
let combined_tps = vec::from_fn(num_method_type_parameters, |i| {
|
let combined_tps = Vec::from_fn(num_method_type_parameters, |i| {
|
||||||
if i < num_trait_type_parameters {
|
if i < num_trait_type_parameters {
|
||||||
// replace type parameters that come from trait with new value
|
// replace type parameters that come from trait with new value
|
||||||
trait_ref.substs.tps[i]
|
*trait_ref.substs.tps.get(i)
|
||||||
} else {
|
} else {
|
||||||
// replace type parameters that belong to method with another
|
// replace type parameters that belong to method with another
|
||||||
// type parameter, this time with the index adjusted
|
// type parameter, this time with the index adjusted
|
||||||
|
|
|
@ -46,6 +46,8 @@ use util::ppaux::Repr;
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
use std::vec_ng;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::ast::{RegionTyParamBound, TraitTyParamBound};
|
use syntax::ast::{RegionTyParamBound, TraitTyParamBound};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -324,7 +326,8 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||||
let substs = substs {
|
let substs = substs {
|
||||||
regions: ty::NonerasedRegions(rps_from_trait),
|
regions: ty::NonerasedRegions(rps_from_trait),
|
||||||
self_ty: Some(self_param),
|
self_ty: Some(self_param),
|
||||||
tps: non_shifted_trait_tps + shifted_method_tps
|
tps: vec_ng::append(Vec::from_slice(non_shifted_trait_tps),
|
||||||
|
shifted_method_tps)
|
||||||
};
|
};
|
||||||
|
|
||||||
// create the type of `foo`, applying the substitution above
|
// create the type of `foo`, applying the substitution above
|
||||||
|
@ -339,7 +342,8 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||||
let mut new_type_param_defs = Vec::new();
|
let mut new_type_param_defs = Vec::new();
|
||||||
let substd_type_param_defs =
|
let substd_type_param_defs =
|
||||||
trait_ty_generics.type_param_defs.subst(tcx, &substs);
|
trait_ty_generics.type_param_defs.subst(tcx, &substs);
|
||||||
new_type_param_defs.push_all(*substd_type_param_defs.borrow());
|
new_type_param_defs.push_all(substd_type_param_defs.borrow()
|
||||||
|
.as_slice());
|
||||||
|
|
||||||
// add in the "self" type parameter
|
// add in the "self" type parameter
|
||||||
let self_trait_def = get_trait_def(ccx, local_def(trait_id));
|
let self_trait_def = get_trait_def(ccx, local_def(trait_id));
|
||||||
|
@ -356,7 +360,8 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||||
|
|
||||||
// add in the type parameters from the method
|
// add in the type parameters from the method
|
||||||
let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs);
|
let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs);
|
||||||
new_type_param_defs.push_all(*substd_type_param_defs.borrow());
|
new_type_param_defs.push_all(substd_type_param_defs.borrow()
|
||||||
|
.as_slice());
|
||||||
|
|
||||||
debug!("static method {} type_param_defs={} ty={}, substs={}",
|
debug!("static method {} type_param_defs={} ty={}, substs={}",
|
||||||
m.def_id.repr(tcx),
|
m.def_id.repr(tcx),
|
||||||
|
@ -495,7 +500,8 @@ fn convert_methods(ccx: &CrateCtxt,
|
||||||
ty_param_bounds_and_ty {
|
ty_param_bounds_and_ty {
|
||||||
generics: ty::Generics {
|
generics: ty::Generics {
|
||||||
type_param_defs: Rc::new(vec_ng::append(
|
type_param_defs: Rc::new(vec_ng::append(
|
||||||
rcvr_ty_generics.type_param_defs().to_owned(),
|
Vec::from_slice(
|
||||||
|
rcvr_ty_generics.type_param_defs()),
|
||||||
m_ty_generics.type_param_defs())),
|
m_ty_generics.type_param_defs())),
|
||||||
region_param_defs: rcvr_ty_generics.region_param_defs.clone(),
|
region_param_defs: rcvr_ty_generics.region_param_defs.clone(),
|
||||||
},
|
},
|
||||||
|
|
|
@ -63,6 +63,7 @@ use util::common::indent;
|
||||||
use util::ppaux::Repr;
|
use util::ppaux::Repr;
|
||||||
|
|
||||||
use std::result;
|
use std::result;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::{Onceness, Purity};
|
use syntax::ast::{Onceness, Purity};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::opt_vec;
|
use syntax::opt_vec;
|
||||||
|
@ -92,7 +93,7 @@ pub trait Combine {
|
||||||
if as_.len() == bs.len() {
|
if as_.len() == bs.len() {
|
||||||
result::fold_(as_.iter().zip(bs.iter())
|
result::fold_(as_.iter().zip(bs.iter())
|
||||||
.map(|(a, b)| eq_tys(self, *a, *b)))
|
.map(|(a, b)| eq_tys(self, *a, *b)))
|
||||||
.then(|| Ok(as_.to_owned()))
|
.then(|| Ok(Vec::from_slice(as_)))
|
||||||
} else {
|
} else {
|
||||||
Err(ty::terr_ty_param_size(expected_found(self,
|
Err(ty::terr_ty_param_size(expected_found(self,
|
||||||
as_.len(),
|
as_.len(),
|
||||||
|
@ -180,7 +181,7 @@ pub trait Combine {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let tps = if_ok!(self.tps(as_.tps, bs.tps));
|
let tps = if_ok!(self.tps(as_.tps.as_slice(), bs.tps.as_slice()));
|
||||||
let self_ty = if_ok!(self.self_tys(as_.self_ty, bs.self_ty));
|
let self_ty = if_ok!(self.self_tys(as_.self_ty, bs.self_ty));
|
||||||
let regions = if_ok!(relate_region_params(self,
|
let regions = if_ok!(relate_region_params(self,
|
||||||
item_def_id,
|
item_def_id,
|
||||||
|
@ -409,7 +410,9 @@ pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<
|
||||||
return Err(ty::terr_variadic_mismatch(expected_found(this, a.variadic, b.variadic)));
|
return Err(ty::terr_variadic_mismatch(expected_found(this, a.variadic, b.variadic)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let inputs = if_ok!(argvecs(this, a.inputs, b.inputs));
|
let inputs = if_ok!(argvecs(this,
|
||||||
|
a.inputs.as_slice(),
|
||||||
|
b.inputs.as_slice()));
|
||||||
let output = if_ok!(this.tys(a.output, b.output));
|
let output = if_ok!(this.tys(a.output, b.output));
|
||||||
Ok(FnSig {binder_id: a.binder_id,
|
Ok(FnSig {binder_id: a.binder_id,
|
||||||
inputs: inputs,
|
inputs: inputs,
|
||||||
|
|
|
@ -155,10 +155,16 @@ impl<'f> Combine for Glb<'f> {
|
||||||
fold_regions_in_sig(
|
fold_regions_in_sig(
|
||||||
self.get_ref().infcx.tcx,
|
self.get_ref().infcx.tcx,
|
||||||
&sig0,
|
&sig0,
|
||||||
|r| generalize_region(self, snapshot,
|
|r| {
|
||||||
new_vars, sig0.binder_id,
|
generalize_region(self,
|
||||||
&a_map, a_vars, b_vars,
|
snapshot,
|
||||||
r));
|
new_vars.as_slice(),
|
||||||
|
sig0.binder_id,
|
||||||
|
&a_map,
|
||||||
|
a_vars.as_slice(),
|
||||||
|
b_vars.as_slice(),
|
||||||
|
r)
|
||||||
|
});
|
||||||
debug!("sig1 = {}", sig1.inf_str(self.get_ref().infcx));
|
debug!("sig1 = {}", sig1.inf_str(self.get_ref().infcx));
|
||||||
return Ok(sig1);
|
return Ok(sig1);
|
||||||
|
|
||||||
|
|
|
@ -43,9 +43,11 @@ use middle::typeck::infer::lub::Lub;
|
||||||
use middle::typeck::infer::unify::*;
|
use middle::typeck::infer::unify::*;
|
||||||
use middle::typeck::infer::sub::Sub;
|
use middle::typeck::infer::sub::Sub;
|
||||||
use middle::typeck::infer::to_str::InferStr;
|
use middle::typeck::infer::to_str::InferStr;
|
||||||
use collections::HashMap;
|
|
||||||
use util::common::indenter;
|
use util::common::indenter;
|
||||||
|
|
||||||
|
use collections::HashMap;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
pub trait LatticeValue {
|
pub trait LatticeValue {
|
||||||
fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures;
|
fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures;
|
||||||
fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>;
|
fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>;
|
||||||
|
|
|
@ -143,7 +143,7 @@ impl<'f> Combine for Lub<'f> {
|
||||||
fold_regions_in_sig(
|
fold_regions_in_sig(
|
||||||
self.get_ref().infcx.tcx,
|
self.get_ref().infcx.tcx,
|
||||||
&sig0,
|
&sig0,
|
||||||
|r| generalize_region(self, snapshot, new_vars,
|
|r| generalize_region(self, snapshot, new_vars.as_slice(),
|
||||||
sig0.binder_id, &a_map, r));
|
sig0.binder_id, &a_map, r));
|
||||||
return Ok(sig1);
|
return Ok(sig1);
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ pub use middle::typeck::infer::resolve::{resolve_ivar, resolve_all};
|
||||||
pub use middle::typeck::infer::resolve::{resolve_nested_tvar};
|
pub use middle::typeck::infer::resolve::{resolve_nested_tvar};
|
||||||
pub use middle::typeck::infer::resolve::{resolve_rvar};
|
pub use middle::typeck::infer::resolve::{resolve_rvar};
|
||||||
|
|
||||||
|
use collections::HashMap;
|
||||||
use collections::SmallIntMap;
|
use collections::SmallIntMap;
|
||||||
use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid};
|
use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid};
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
@ -37,9 +38,8 @@ use middle::typeck::infer::to_str::InferStr;
|
||||||
use middle::typeck::infer::unify::{ValsAndBindings, Root};
|
use middle::typeck::infer::unify::{ValsAndBindings, Root};
|
||||||
use middle::typeck::infer::error_reporting::ErrorReporting;
|
use middle::typeck::infer::error_reporting::ErrorReporting;
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use collections::HashMap;
|
|
||||||
use std::result;
|
use std::result;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast::{MutImmutable, MutMutable};
|
use syntax::ast::{MutImmutable, MutMutable};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap;
|
use syntax::codemap;
|
||||||
|
@ -623,7 +623,7 @@ impl InferCtxt {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_ty_vars(&self, n: uint) -> Vec<ty::t> {
|
pub fn next_ty_vars(&self, n: uint) -> Vec<ty::t> {
|
||||||
vec::from_fn(n, |_i| self.next_ty_var())
|
Vec::from_fn(n, |_i| self.next_ty_var())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_int_var_id(&self) -> IntVid {
|
pub fn next_int_var_id(&self) -> IntVid {
|
||||||
|
@ -660,7 +660,7 @@ impl InferCtxt {
|
||||||
origin: RegionVariableOrigin,
|
origin: RegionVariableOrigin,
|
||||||
count: uint)
|
count: uint)
|
||||||
-> Vec<ty::Region> {
|
-> Vec<ty::Region> {
|
||||||
vec::from_fn(count, |_| self.next_region_var(origin))
|
Vec::from_fn(count, |_| self.next_region_var(origin))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fresh_bound_region(&self, binder_id: ast::NodeId) -> ty::Region {
|
pub fn fresh_bound_region(&self, binder_id: ast::NodeId) -> ty::Region {
|
||||||
|
|
|
@ -27,6 +27,7 @@ use util::ppaux::{Repr};
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::uint;
|
use std::uint;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::opt_vec;
|
use syntax::opt_vec;
|
||||||
|
@ -354,11 +355,11 @@ impl RegionVarBindings {
|
||||||
None => {
|
None => {
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
self.tcx.sess.span_bug(
|
self.tcx.sess.span_bug(
|
||||||
var_origins.get()[rid.to_uint()].span(),
|
var_origins.get().get(rid.to_uint()).span(),
|
||||||
format!("attempt to resolve region variable before \
|
format!("attempt to resolve region variable before \
|
||||||
values have been computed!"))
|
values have been computed!"))
|
||||||
}
|
}
|
||||||
Some(ref values) => values[rid.to_uint()]
|
Some(ref values) => *values.get(rid.to_uint())
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("RegionVarBindings: resolve_var({:?}={})={:?}",
|
debug!("RegionVarBindings: resolve_var({:?}={})={:?}",
|
||||||
|
@ -457,7 +458,7 @@ impl RegionVarBindings {
|
||||||
let mut result_index = 0;
|
let mut result_index = 0;
|
||||||
while result_index < result_set.len() {
|
while result_index < result_set.len() {
|
||||||
// nb: can't use uint::range() here because result_set grows
|
// nb: can't use uint::range() here because result_set grows
|
||||||
let r = result_set[result_index];
|
let r = *result_set.get(result_index);
|
||||||
|
|
||||||
debug!("result_index={}, r={:?}", result_index, r);
|
debug!("result_index={}, r={:?}", result_index, r);
|
||||||
|
|
||||||
|
@ -466,18 +467,18 @@ impl RegionVarBindings {
|
||||||
// nb: can't use uint::range() here as we move result_set
|
// nb: can't use uint::range() here as we move result_set
|
||||||
let regs = {
|
let regs = {
|
||||||
let undo_log = self.undo_log.borrow();
|
let undo_log = self.undo_log.borrow();
|
||||||
match undo_log.get()[undo_index] {
|
match undo_log.get().get(undo_index) {
|
||||||
AddConstraint(ConstrainVarSubVar(ref a, ref b)) => {
|
&AddConstraint(ConstrainVarSubVar(ref a, ref b)) => {
|
||||||
Some((ReInfer(ReVar(*a)),
|
Some((ReInfer(ReVar(*a)),
|
||||||
ReInfer(ReVar(*b))))
|
ReInfer(ReVar(*b))))
|
||||||
}
|
}
|
||||||
AddConstraint(ConstrainRegSubVar(ref a, ref b)) => {
|
&AddConstraint(ConstrainRegSubVar(ref a, ref b)) => {
|
||||||
Some((*a, ReInfer(ReVar(*b))))
|
Some((*a, ReInfer(ReVar(*b))))
|
||||||
}
|
}
|
||||||
AddConstraint(ConstrainVarSubReg(ref a, ref b)) => {
|
&AddConstraint(ConstrainVarSubReg(ref a, ref b)) => {
|
||||||
Some((ReInfer(ReVar(*a)), *b))
|
Some((ReInfer(ReVar(*a)), *b))
|
||||||
}
|
}
|
||||||
AddConstraint(ConstrainRegSubReg(a, b)) => {
|
&AddConstraint(ConstrainRegSubReg(a, b)) => {
|
||||||
Some((a, b))
|
Some((a, b))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -563,7 +564,7 @@ impl RegionVarBindings {
|
||||||
(ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => {
|
(ReInfer(ReVar(v_id)), _) | (_, ReInfer(ReVar(v_id))) => {
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
self.tcx.sess.span_bug(
|
self.tcx.sess.span_bug(
|
||||||
var_origins.get()[v_id.to_uint()].span(),
|
var_origins.get().get(v_id.to_uint()).span(),
|
||||||
format!("lub_concrete_regions invoked with \
|
format!("lub_concrete_regions invoked with \
|
||||||
non-concrete regions: {:?}, {:?}", a, b));
|
non-concrete regions: {:?}, {:?}", a, b));
|
||||||
}
|
}
|
||||||
|
@ -668,7 +669,7 @@ impl RegionVarBindings {
|
||||||
(_, ReInfer(ReVar(v_id))) => {
|
(_, ReInfer(ReVar(v_id))) => {
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
self.tcx.sess.span_bug(
|
self.tcx.sess.span_bug(
|
||||||
var_origins.get()[v_id.to_uint()].span(),
|
var_origins.get().get(v_id.to_uint()).span(),
|
||||||
format!("glb_concrete_regions invoked with \
|
format!("glb_concrete_regions invoked with \
|
||||||
non-concrete regions: {:?}, {:?}", a, b));
|
non-concrete regions: {:?}, {:?}", a, b));
|
||||||
}
|
}
|
||||||
|
@ -782,14 +783,14 @@ impl RegionVarBindings {
|
||||||
errors: &mut OptVec<RegionResolutionError>)
|
errors: &mut OptVec<RegionResolutionError>)
|
||||||
-> Vec<VarValue> {
|
-> Vec<VarValue> {
|
||||||
let mut var_data = self.construct_var_data();
|
let mut var_data = self.construct_var_data();
|
||||||
self.expansion(var_data);
|
self.expansion(var_data.as_mut_slice());
|
||||||
self.contraction(var_data);
|
self.contraction(var_data.as_mut_slice());
|
||||||
self.collect_concrete_region_errors(errors);
|
self.collect_concrete_region_errors(&mut *errors);
|
||||||
self.extract_values_and_collect_conflicts(var_data, errors)
|
self.extract_values_and_collect_conflicts(var_data.as_slice(), errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn construct_var_data(&self) -> Vec<VarData> {
|
fn construct_var_data(&self) -> Vec<VarData> {
|
||||||
vec::from_fn(self.num_vars(), |_| {
|
Vec::from_fn(self.num_vars(), |_| {
|
||||||
VarData {
|
VarData {
|
||||||
// All nodes are initially classified as contracting; during
|
// All nodes are initially classified as contracting; during
|
||||||
// the expansion phase, we will shift the classification for
|
// the expansion phase, we will shift the classification for
|
||||||
|
@ -1071,7 +1072,7 @@ impl RegionVarBindings {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
vec::from_fn(self.num_vars(), |idx| var_data[idx].value)
|
Vec::from_fn(self.num_vars(), |idx| var_data[idx].value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn construct_graph(&self) -> RegionGraph {
|
fn construct_graph(&self) -> RegionGraph {
|
||||||
|
@ -1143,7 +1144,7 @@ impl RegionVarBindings {
|
||||||
{
|
{
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
errors.push(SubSupConflict(
|
errors.push(SubSupConflict(
|
||||||
var_origins.get()[node_idx.to_uint()],
|
*var_origins.get().get(node_idx.to_uint()),
|
||||||
lower_bound.origin,
|
lower_bound.origin,
|
||||||
lower_bound.region,
|
lower_bound.region,
|
||||||
upper_bound.origin,
|
upper_bound.origin,
|
||||||
|
@ -1156,7 +1157,7 @@ impl RegionVarBindings {
|
||||||
|
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
self.tcx.sess.span_bug(
|
self.tcx.sess.span_bug(
|
||||||
var_origins.get()[node_idx.to_uint()].span(),
|
var_origins.get().get(node_idx.to_uint()).span(),
|
||||||
format!("collect_error_for_expanding_node() could not find error \
|
format!("collect_error_for_expanding_node() could not find error \
|
||||||
for var {:?}, lower_bounds={}, upper_bounds={}",
|
for var {:?}, lower_bounds={}, upper_bounds={}",
|
||||||
node_idx,
|
node_idx,
|
||||||
|
@ -1190,7 +1191,7 @@ impl RegionVarBindings {
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
errors.push(SupSupConflict(
|
errors.push(SupSupConflict(
|
||||||
var_origins.get()[node_idx.to_uint()],
|
*var_origins.get().get(node_idx.to_uint()),
|
||||||
upper_bound_1.origin,
|
upper_bound_1.origin,
|
||||||
upper_bound_1.region,
|
upper_bound_1.region,
|
||||||
upper_bound_2.origin,
|
upper_bound_2.origin,
|
||||||
|
@ -1203,7 +1204,7 @@ impl RegionVarBindings {
|
||||||
|
|
||||||
let var_origins = self.var_origins.borrow();
|
let var_origins = self.var_origins.borrow();
|
||||||
self.tcx.sess.span_bug(
|
self.tcx.sess.span_bug(
|
||||||
var_origins.get()[node_idx.to_uint()].span(),
|
var_origins.get().get(node_idx.to_uint()).span(),
|
||||||
format!("collect_error_for_contracting_node() could not find error \
|
format!("collect_error_for_contracting_node() could not find error \
|
||||||
for var {:?}, upper_bounds={}",
|
for var {:?}, upper_bounds={}",
|
||||||
node_idx,
|
node_idx,
|
||||||
|
|
|
@ -58,6 +58,7 @@ use middle::typeck::infer::unify::{Root, UnifyInferCtxtMethods};
|
||||||
use util::common::{indent, indenter};
|
use util::common::{indent, indenter};
|
||||||
use util::ppaux::ty_to_str;
|
use util::ppaux::ty_to_str;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
pub static resolve_nested_tvar: uint = 0b0000000001;
|
pub static resolve_nested_tvar: uint = 0b0000000001;
|
||||||
|
|
|
@ -17,6 +17,7 @@ use middle::typeck::infer::{Bounds, uok, ures};
|
||||||
use middle::typeck::infer::InferCtxt;
|
use middle::typeck::infer::InferCtxt;
|
||||||
use middle::typeck::infer::to_str::InferStr;
|
use middle::typeck::infer::to_str::InferStr;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
|
|
|
@ -72,6 +72,7 @@ use util::nodemap::{DefIdMap, NodeMap};
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use collections::List;
|
use collections::List;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::print::pprust::*;
|
use syntax::print::pprust::*;
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::vec;
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::opt_vec::OptVec;
|
use syntax::opt_vec::OptVec;
|
||||||
|
@ -69,7 +69,7 @@ impl RegionScope for BindingRscope {
|
||||||
-> Result<Vec<ty::Region> , ()> {
|
-> Result<Vec<ty::Region> , ()> {
|
||||||
let idx = self.anon_bindings.get();
|
let idx = self.anon_bindings.get();
|
||||||
self.anon_bindings.set(idx + count);
|
self.anon_bindings.set(idx + count);
|
||||||
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
|
Ok(Vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
|
||||||
ty::BrAnon(idx + i))))
|
ty::BrAnon(idx + i))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,8 +196,8 @@ use collections::HashMap;
|
||||||
use arena;
|
use arena;
|
||||||
use arena::Arena;
|
use arena::Arena;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use std::vec;
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast_util;
|
use syntax::ast_util;
|
||||||
use syntax::opt_vec;
|
use syntax::opt_vec;
|
||||||
|
@ -561,7 +561,7 @@ impl<'a> ConstraintContext<'a> {
|
||||||
// variance not yet inferred, so return a symbolic
|
// variance not yet inferred, so return a symbolic
|
||||||
// variance.
|
// variance.
|
||||||
let InferredIndex(index) = self.inferred_index(param_def_id.node);
|
let InferredIndex(index) = self.inferred_index(param_def_id.node);
|
||||||
self.terms_cx.inferred_infos[index].term
|
self.terms_cx.inferred_infos.get(index).term
|
||||||
} else {
|
} else {
|
||||||
// Parameter on an item defined within another crate:
|
// Parameter on an item defined within another crate:
|
||||||
// variance already inferred, just look it up.
|
// variance already inferred, just look it up.
|
||||||
|
@ -749,7 +749,7 @@ impl<'a> ConstraintContext<'a> {
|
||||||
let variance_decl =
|
let variance_decl =
|
||||||
self.declared_variance(p.def_id, def_id, TypeParam, i);
|
self.declared_variance(p.def_id, def_id, TypeParam, i);
|
||||||
let variance_i = self.xform(variance, variance_decl);
|
let variance_i = self.xform(variance, variance_decl);
|
||||||
self.add_constraints_from_ty(substs.tps[i], variance_i);
|
self.add_constraints_from_ty(*substs.tps.get(i), variance_i);
|
||||||
}
|
}
|
||||||
|
|
||||||
match substs.regions {
|
match substs.regions {
|
||||||
|
@ -842,7 +842,7 @@ struct SolveContext<'a> {
|
||||||
|
|
||||||
fn solve_constraints(constraints_cx: ConstraintContext) {
|
fn solve_constraints(constraints_cx: ConstraintContext) {
|
||||||
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
|
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
|
||||||
let solutions = vec::from_elem(terms_cx.num_inferred(), ty::Bivariant);
|
let solutions = Vec::from_elem(terms_cx.num_inferred(), ty::Bivariant);
|
||||||
let mut solutions_cx = SolveContext {
|
let mut solutions_cx = SolveContext {
|
||||||
terms_cx: terms_cx,
|
terms_cx: terms_cx,
|
||||||
constraints: constraints,
|
constraints: constraints,
|
||||||
|
@ -867,18 +867,21 @@ impl<'a> SolveContext<'a> {
|
||||||
let Constraint { inferred, variance: term } = *constraint;
|
let Constraint { inferred, variance: term } = *constraint;
|
||||||
let InferredIndex(inferred) = inferred;
|
let InferredIndex(inferred) = inferred;
|
||||||
let variance = self.evaluate(term);
|
let variance = self.evaluate(term);
|
||||||
let old_value = self.solutions[inferred];
|
let old_value = *self.solutions.get(inferred);
|
||||||
let new_value = glb(variance, old_value);
|
let new_value = glb(variance, old_value);
|
||||||
if old_value != new_value {
|
if old_value != new_value {
|
||||||
debug!("Updating inferred {} (node {}) \
|
debug!("Updating inferred {} (node {}) \
|
||||||
from {:?} to {:?} due to {}",
|
from {:?} to {:?} due to {}",
|
||||||
inferred,
|
inferred,
|
||||||
self.terms_cx.inferred_infos[inferred].param_id,
|
self.terms_cx
|
||||||
|
.inferred_infos
|
||||||
|
.get(inferred)
|
||||||
|
.param_id,
|
||||||
old_value,
|
old_value,
|
||||||
new_value,
|
new_value,
|
||||||
term.to_str());
|
term.to_str());
|
||||||
|
|
||||||
self.solutions[inferred] = new_value;
|
*self.solutions.get_mut(inferred) = new_value;
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -901,25 +904,28 @@ impl<'a> SolveContext<'a> {
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
let num_inferred = self.terms_cx.num_inferred();
|
let num_inferred = self.terms_cx.num_inferred();
|
||||||
while index < num_inferred {
|
while index < num_inferred {
|
||||||
let item_id = inferred_infos[index].item_id;
|
let item_id = inferred_infos.get(index).item_id;
|
||||||
let mut item_variances = ty::ItemVariances {
|
let mut item_variances = ty::ItemVariances {
|
||||||
self_param: None,
|
self_param: None,
|
||||||
type_params: opt_vec::Empty,
|
type_params: opt_vec::Empty,
|
||||||
region_params: opt_vec::Empty
|
region_params: opt_vec::Empty
|
||||||
};
|
};
|
||||||
while index < num_inferred &&
|
while index < num_inferred &&
|
||||||
inferred_infos[index].item_id == item_id {
|
inferred_infos.get(index).item_id == item_id {
|
||||||
let info = &inferred_infos[index];
|
let info = inferred_infos.get(index);
|
||||||
match info.kind {
|
match info.kind {
|
||||||
SelfParam => {
|
SelfParam => {
|
||||||
assert!(item_variances.self_param.is_none());
|
assert!(item_variances.self_param.is_none());
|
||||||
item_variances.self_param = Some(solutions[index]);
|
item_variances.self_param =
|
||||||
|
Some(*solutions.get(index));
|
||||||
}
|
}
|
||||||
TypeParam => {
|
TypeParam => {
|
||||||
item_variances.type_params.push(solutions[index]);
|
item_variances.type_params
|
||||||
|
.push(*solutions.get(index));
|
||||||
}
|
}
|
||||||
RegionParam => {
|
RegionParam => {
|
||||||
item_variances.region_params.push(solutions[index]);
|
item_variances.region_params
|
||||||
|
.push(*solutions.get(index));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
index += 1;
|
index += 1;
|
||||||
|
@ -958,7 +964,7 @@ impl<'a> SolveContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
InferredTerm(InferredIndex(index)) => {
|
InferredTerm(InferredIndex(index)) => {
|
||||||
self.solutions[index]
|
*self.solutions.get(index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ use syntax::visit;
|
||||||
use syntax::visit::Visitor;
|
use syntax::visit::Visitor;
|
||||||
|
|
||||||
use std::local_data;
|
use std::local_data;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
|
|
||||||
use time;
|
use time;
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ use middle::ty::{ty_nil, ty_param, ty_ptr, ty_rptr, ty_self, ty_tup};
|
||||||
use middle::ty::{ty_uniq, ty_trait, ty_int, ty_uint, ty_unboxed_vec, ty_infer};
|
use middle::ty::{ty_uniq, ty_trait, ty_int, ty_uint, ty_unboxed_vec, ty_infer};
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
|
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use syntax::abi::AbiSet;
|
use syntax::abi::AbiSet;
|
||||||
use syntax::ast_map;
|
use syntax::ast_map;
|
||||||
use syntax::codemap::{Span, Pos};
|
use syntax::codemap::{Span, Pos};
|
||||||
|
@ -476,12 +478,17 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
|
||||||
ty_self(..) => ~"Self",
|
ty_self(..) => ~"Self",
|
||||||
ty_enum(did, ref substs) | ty_struct(did, ref substs) => {
|
ty_enum(did, ref substs) | ty_struct(did, ref substs) => {
|
||||||
let base = ty::item_path_str(cx, did);
|
let base = ty::item_path_str(cx, did);
|
||||||
parameterized(cx, base, &substs.regions, substs.tps, did, false)
|
parameterized(cx,
|
||||||
|
base,
|
||||||
|
&substs.regions,
|
||||||
|
substs.tps.as_slice(),
|
||||||
|
did,
|
||||||
|
false)
|
||||||
}
|
}
|
||||||
ty_trait(did, ref substs, s, mutbl, ref bounds) => {
|
ty_trait(did, ref substs, s, mutbl, ref bounds) => {
|
||||||
let base = ty::item_path_str(cx, did);
|
let base = ty::item_path_str(cx, did);
|
||||||
let ty = parameterized(cx, base, &substs.regions,
|
let ty = parameterized(cx, base, &substs.regions,
|
||||||
substs.tps, did, true);
|
substs.tps.as_slice(), did, true);
|
||||||
let bound_sep = if bounds.is_empty() { "" } else { ":" };
|
let bound_sep = if bounds.is_empty() { "" } else { ":" };
|
||||||
let bound_str = bounds.repr(cx);
|
let bound_str = bounds.repr(cx);
|
||||||
format!("{}{}{}{}{}", trait_store_to_str(cx, s), mutability_to_str(mutbl), ty,
|
format!("{}{}{}{}{}", trait_store_to_str(cx, s), mutability_to_str(mutbl), ty,
|
||||||
|
@ -521,7 +528,7 @@ pub fn parameterized(cx: ctxt,
|
||||||
let num_defaults = if has_defaults {
|
let num_defaults = if has_defaults {
|
||||||
// We should have a borrowed version of substs instead of cloning.
|
// We should have a borrowed version of substs instead of cloning.
|
||||||
let mut substs = ty::substs {
|
let mut substs = ty::substs {
|
||||||
tps: tps.to_owned(),
|
tps: Vec::from_slice(tps),
|
||||||
regions: regions.clone(),
|
regions: regions.clone(),
|
||||||
self_ty: None
|
self_ty: None
|
||||||
};
|
};
|
||||||
|
@ -612,7 +619,7 @@ impl<T:Repr> Repr for OptVec<T> {
|
||||||
// autoderef cannot convert the &[T] handler
|
// autoderef cannot convert the &[T] handler
|
||||||
impl<T:Repr> Repr for Vec<T> {
|
impl<T:Repr> Repr for Vec<T> {
|
||||||
fn repr(&self, tcx: ctxt) -> ~str {
|
fn repr(&self, tcx: ctxt) -> ~str {
|
||||||
repr_vec(tcx, *self)
|
repr_vec(tcx, self.as_slice())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -989,10 +996,10 @@ impl UserString for ty::TraitRef {
|
||||||
let mut all_tps = self.substs.tps.clone();
|
let mut all_tps = self.substs.tps.clone();
|
||||||
for &t in self.substs.self_ty.iter() { all_tps.push(t); }
|
for &t in self.substs.self_ty.iter() { all_tps.push(t); }
|
||||||
parameterized(tcx, base, &self.substs.regions,
|
parameterized(tcx, base, &self.substs.regions,
|
||||||
all_tps, self.def_id, true)
|
all_tps.as_slice(), self.def_id, true)
|
||||||
} else {
|
} else {
|
||||||
parameterized(tcx, base, &self.substs.regions,
|
parameterized(tcx, base, &self.substs.regions,
|
||||||
self.substs.tps, self.def_id, true)
|
self.substs.tps.as_slice(), self.def_id, true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
|
|
||||||
use std::iter::range_step;
|
use std::iter::range_step;
|
||||||
use std::num::Zero;
|
use std::num::Zero;
|
||||||
use std::vec;
|
|
||||||
use std::vec::bytes::{MutableByteVector, copy_memory};
|
use std::vec::bytes::{MutableByteVector, copy_memory};
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use serialize::hex::ToHex;
|
use serialize::hex::ToHex;
|
||||||
|
|
||||||
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian
|
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian
|
||||||
|
@ -254,15 +254,15 @@ pub trait Digest {
|
||||||
/// Convenience function that retrieves the result of a digest as a
|
/// Convenience function that retrieves the result of a digest as a
|
||||||
/// newly allocated vec of bytes.
|
/// newly allocated vec of bytes.
|
||||||
fn result_bytes(&mut self) -> Vec<u8> {
|
fn result_bytes(&mut self) -> Vec<u8> {
|
||||||
let mut buf = vec::from_elem((self.output_bits()+7)/8, 0u8);
|
let mut buf = Vec::from_elem((self.output_bits()+7)/8, 0u8);
|
||||||
self.result(buf);
|
self.result(buf.as_mut_slice());
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience function that retrieves the result of a digest as a
|
/// Convenience function that retrieves the result of a digest as a
|
||||||
/// ~str in hexadecimal format.
|
/// ~str in hexadecimal format.
|
||||||
fn result_str(&mut self) -> ~str {
|
fn result_str(&mut self) -> ~str {
|
||||||
self.result_bytes().to_hex()
|
self.result_bytes().as_slice().to_hex()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -527,6 +527,7 @@ mod tests {
|
||||||
use super::{Digest, Sha256, FixedBuffer};
|
use super::{Digest, Sha256, FixedBuffer};
|
||||||
use std::num::Bounded;
|
use std::num::Bounded;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
use std::vec_ng::Vec;
|
||||||
use std::rand::isaac::IsaacRng;
|
use std::rand::isaac::IsaacRng;
|
||||||
use std::rand::Rng;
|
use std::rand::Rng;
|
||||||
use serialize::hex::FromHex;
|
use serialize::hex::FromHex;
|
||||||
|
@ -594,7 +595,7 @@ mod tests {
|
||||||
|
|
||||||
let mut sh = ~Sha256::new();
|
let mut sh = ~Sha256::new();
|
||||||
|
|
||||||
test_hash(sh, tests);
|
test_hash(sh, tests.as_slice());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is
|
/// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is
|
||||||
|
@ -619,7 +620,12 @@ mod tests {
|
||||||
let result_bytes = digest.result_bytes();
|
let result_bytes = digest.result_bytes();
|
||||||
|
|
||||||
assert_eq!(expected, result_str.as_slice());
|
assert_eq!(expected, result_str.as_slice());
|
||||||
assert_eq!(expected.from_hex().unwrap(), result_bytes);
|
|
||||||
|
let expected_vec: Vec<u8> = expected.from_hex()
|
||||||
|
.unwrap()
|
||||||
|
.move_iter()
|
||||||
|
.collect();
|
||||||
|
assert_eq!(expected_vec, result_bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue