Auto merge of #55008 - ljedrz:cleanup_rustc_driver, r=estebank
Cleanup rustc/driver - improve/remove allocations - simplify `profile::trace::cons*` - don't sort `base` if it only has one element - use `Cow<str>` where applicable - use `unwrap_or_else` with function calls - remove an explicit `return`, add an explicit `None` - remove lifetimes from `const`s - improve common patterns - improve macro calls - whitespace & formatting fixes
This commit is contained in:
commit
f02768b685
5 changed files with 207 additions and 245 deletions
|
@ -44,7 +44,7 @@ use serialize::json;
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ffi::{OsStr, OsString};
|
use std::ffi::OsString;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
@ -1021,6 +1021,7 @@ where
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect();
|
.collect();
|
||||||
missing_fragment_specifiers.sort();
|
missing_fragment_specifiers.sort();
|
||||||
|
|
||||||
for span in missing_fragment_specifiers {
|
for span in missing_fragment_specifiers {
|
||||||
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
|
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
|
||||||
let msg = "missing fragment specifier";
|
let msg = "missing fragment specifier";
|
||||||
|
@ -1472,7 +1473,7 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[Pa
|
||||||
.collect();
|
.collect();
|
||||||
let mut file = fs::File::create(&deps_filename)?;
|
let mut file = fs::File::create(&deps_filename)?;
|
||||||
for path in out_filenames {
|
for path in out_filenames {
|
||||||
write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
|
writeln!(file, "{}: {}\n", path.display(), files.join(" "))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit a fake target for each input file to the compilation. This
|
// Emit a fake target for each input file to the compilation. This
|
||||||
|
@ -1484,15 +1485,12 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[Pa
|
||||||
Ok(())
|
Ok(())
|
||||||
})();
|
})();
|
||||||
|
|
||||||
match result {
|
if let Err(e) = result {
|
||||||
Ok(()) => {}
|
sess.fatal(&format!(
|
||||||
Err(e) => {
|
"error writing dependencies to `{}`: {}",
|
||||||
sess.fatal(&format!(
|
deps_filename.display(),
|
||||||
"error writing dependencies to `{}`: {}",
|
e
|
||||||
deps_filename.display(),
|
));
|
||||||
e
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1520,6 +1518,7 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<c
|
||||||
Symbol::intern("proc-macro"),
|
Symbol::intern("proc-macro"),
|
||||||
Symbol::intern("bin")
|
Symbol::intern("bin")
|
||||||
];
|
];
|
||||||
|
|
||||||
if let ast::MetaItemKind::NameValue(spanned) = a.meta().unwrap().node {
|
if let ast::MetaItemKind::NameValue(spanned) = a.meta().unwrap().node {
|
||||||
let span = spanned.span;
|
let span = spanned.span;
|
||||||
let lev_candidate = find_best_match_for_name(
|
let lev_candidate = find_best_match_for_name(
|
||||||
|
@ -1551,7 +1550,7 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<c
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
_ => {
|
None => {
|
||||||
session
|
session
|
||||||
.struct_span_err(a.span, "`crate_type` requires a value")
|
.struct_span_err(a.span, "`crate_type` requires a value")
|
||||||
.note("for example: `#![crate_type=\"lib\"]`")
|
.note("for example: `#![crate_type=\"lib\"]`")
|
||||||
|
@ -1581,25 +1580,26 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<c
|
||||||
base.push(::rustc_codegen_utils::link::default_output_for_target(
|
base.push(::rustc_codegen_utils::link::default_output_for_target(
|
||||||
session,
|
session,
|
||||||
));
|
));
|
||||||
|
} else {
|
||||||
|
base.sort();
|
||||||
|
base.dedup();
|
||||||
}
|
}
|
||||||
base.sort();
|
|
||||||
base.dedup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
base.into_iter()
|
base.retain(|crate_type| {
|
||||||
.filter(|crate_type| {
|
let res = !::rustc_codegen_utils::link::invalid_output_for_target(session, *crate_type);
|
||||||
let res = !::rustc_codegen_utils::link::invalid_output_for_target(session, *crate_type);
|
|
||||||
|
|
||||||
if !res {
|
if !res {
|
||||||
session.warn(&format!(
|
session.warn(&format!(
|
||||||
"dropping unsupported crate type `{}` for target `{}`",
|
"dropping unsupported crate type `{}` for target `{}`",
|
||||||
*crate_type, session.opts.target_triple
|
*crate_type, session.opts.target_triple
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
})
|
});
|
||||||
.collect()
|
|
||||||
|
base
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
|
pub fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
|
||||||
|
@ -1650,17 +1650,14 @@ pub fn build_output_filenames(
|
||||||
// "-" as input file will cause the parser to read from stdin so we
|
// "-" as input file will cause the parser to read from stdin so we
|
||||||
// have to make up a name
|
// have to make up a name
|
||||||
// We want to toss everything after the final '.'
|
// We want to toss everything after the final '.'
|
||||||
let dirpath = match *odir {
|
let dirpath = (*odir).as_ref().cloned().unwrap_or_default();
|
||||||
Some(ref d) => d.clone(),
|
|
||||||
None => PathBuf::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// If a crate name is present, we use it as the link name
|
// If a crate name is present, we use it as the link name
|
||||||
let stem = sess.opts
|
let stem = sess.opts
|
||||||
.crate_name
|
.crate_name
|
||||||
.clone()
|
.clone()
|
||||||
.or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
|
.or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
|
||||||
.unwrap_or(input.filestem());
|
.unwrap_or_else(|| input.filestem());
|
||||||
|
|
||||||
OutputFilenames {
|
OutputFilenames {
|
||||||
out_directory: dirpath,
|
out_directory: dirpath,
|
||||||
|
@ -1693,13 +1690,11 @@ pub fn build_output_filenames(
|
||||||
sess.warn("ignoring -C extra-filename flag due to -o flag");
|
sess.warn("ignoring -C extra-filename flag due to -o flag");
|
||||||
}
|
}
|
||||||
|
|
||||||
let cur_dir = Path::new("");
|
|
||||||
|
|
||||||
OutputFilenames {
|
OutputFilenames {
|
||||||
out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
|
out_directory: out_file.parent().unwrap_or_else(|| Path::new("")).to_path_buf(),
|
||||||
out_filestem: out_file
|
out_filestem: out_file
|
||||||
.file_stem()
|
.file_stem()
|
||||||
.unwrap_or(OsStr::new(""))
|
.unwrap_or_default()
|
||||||
.to_str()
|
.to_str()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
|
|
|
@ -89,6 +89,7 @@ use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
use serialize::json::ToJson;
|
use serialize::json::ToJson;
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::cmp::max;
|
use std::cmp::max;
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
|
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
|
||||||
|
@ -136,9 +137,7 @@ pub mod target_features {
|
||||||
codegen_backend: &dyn CodegenBackend) {
|
codegen_backend: &dyn CodegenBackend) {
|
||||||
let tf = Symbol::intern("target_feature");
|
let tf = Symbol::intern("target_feature");
|
||||||
|
|
||||||
for feat in codegen_backend.target_features(sess) {
|
cfg.extend(codegen_backend.target_features(sess).into_iter().map(|feat| (tf, Some(feat))));
|
||||||
cfg.insert((tf, Some(feat)));
|
|
||||||
}
|
|
||||||
|
|
||||||
if sess.crt_static_feature() {
|
if sess.crt_static_feature() {
|
||||||
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
|
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
|
||||||
|
@ -152,21 +151,14 @@ pub const EXIT_SUCCESS: isize = 0;
|
||||||
/// Exit status code used for compilation failures and invalid flags.
|
/// Exit status code used for compilation failures and invalid flags.
|
||||||
pub const EXIT_FAILURE: isize = 1;
|
pub const EXIT_FAILURE: isize = 1;
|
||||||
|
|
||||||
const BUG_REPORT_URL: &'static str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
|
const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
|
||||||
md#bug-reports";
|
md#bug-reports";
|
||||||
|
|
||||||
const ICE_REPORT_COMPILER_FLAGS: &'static [&'static str] = &[
|
const ICE_REPORT_COMPILER_FLAGS: &[&str] = &["Z", "C", "crate-type"];
|
||||||
"Z",
|
|
||||||
"C",
|
const ICE_REPORT_COMPILER_FLAGS_EXCLUDE: &[&str] = &["metadata", "extra-filename"];
|
||||||
"crate-type",
|
|
||||||
];
|
const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &[&str] = &["incremental"];
|
||||||
const ICE_REPORT_COMPILER_FLAGS_EXCLUDE: &'static [&'static str] = &[
|
|
||||||
"metadata",
|
|
||||||
"extra-filename",
|
|
||||||
];
|
|
||||||
const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &'static [&'static str] = &[
|
|
||||||
"incremental",
|
|
||||||
];
|
|
||||||
|
|
||||||
pub fn abort_on_err<T>(result: Result<T, CompileIncomplete>, sess: &Session) -> T {
|
pub fn abort_on_err<T>(result: Result<T, CompileIncomplete>, sess: &Session) -> T {
|
||||||
match result {
|
match result {
|
||||||
|
@ -195,14 +187,16 @@ pub fn run<F>(run_compiler: F) -> isize
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let emitter =
|
let emitter =
|
||||||
errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
|
errors::emitter::EmitterWriter::stderr(
|
||||||
None,
|
errors::ColorConfig::Auto,
|
||||||
true,
|
None,
|
||||||
false);
|
true,
|
||||||
|
false
|
||||||
|
);
|
||||||
let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
|
let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
|
||||||
handler.emit(&MultiSpan::new(),
|
handler.emit(&MultiSpan::new(),
|
||||||
"aborting due to previous error(s)",
|
"aborting due to previous error(s)",
|
||||||
errors::Level::Fatal);
|
errors::Level::Fatal);
|
||||||
panic::resume_unwind(Box::new(errors::FatalErrorMarker));
|
panic::resume_unwind(Box::new(errors::FatalErrorMarker));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -224,15 +218,10 @@ fn load_backend_from_dylib(path: &Path) -> fn() -> Box<dyn CodegenBackend> {
|
||||||
// available for future dynamic libraries opened. This is currently used by
|
// available for future dynamic libraries opened. This is currently used by
|
||||||
// loading LLVM and then making its symbols available for other dynamic
|
// loading LLVM and then making its symbols available for other dynamic
|
||||||
// libraries.
|
// libraries.
|
||||||
let lib = match DynamicLibrary::open_global_now(path) {
|
let lib = DynamicLibrary::open_global_now(path).unwrap_or_else(|err| {
|
||||||
Ok(lib) => lib,
|
let err = format!("couldn't load codegen backend {:?}: {:?}", path, err);
|
||||||
Err(err) => {
|
early_error(ErrorOutputType::default(), &err);
|
||||||
let err = format!("couldn't load codegen backend {:?}: {:?}",
|
});
|
||||||
path,
|
|
||||||
err);
|
|
||||||
early_error(ErrorOutputType::default(), &err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
unsafe {
|
unsafe {
|
||||||
match lib.symbol("__rustc_codegen_backend") {
|
match lib.symbol("__rustc_codegen_backend") {
|
||||||
Ok(f) => {
|
Ok(f) => {
|
||||||
|
@ -328,37 +317,30 @@ fn get_codegen_sysroot(backend_name: &str) -> fn() -> Box<dyn CodegenBackend> {
|
||||||
let sysroot = sysroot_candidates.iter()
|
let sysroot = sysroot_candidates.iter()
|
||||||
.map(|sysroot| {
|
.map(|sysroot| {
|
||||||
let libdir = filesearch::relative_target_lib_path(&sysroot, &target);
|
let libdir = filesearch::relative_target_lib_path(&sysroot, &target);
|
||||||
sysroot.join(libdir)
|
sysroot.join(libdir).with_file_name(
|
||||||
.with_file_name(option_env!("CFG_CODEGEN_BACKENDS_DIR")
|
option_env!("CFG_CODEGEN_BACKENDS_DIR").unwrap_or("codegen-backends"))
|
||||||
.unwrap_or("codegen-backends"))
|
|
||||||
})
|
})
|
||||||
.filter(|f| {
|
.filter(|f| {
|
||||||
info!("codegen backend candidate: {}", f.display());
|
info!("codegen backend candidate: {}", f.display());
|
||||||
f.exists()
|
f.exists()
|
||||||
})
|
})
|
||||||
.next();
|
.next();
|
||||||
let sysroot = match sysroot {
|
let sysroot = sysroot.unwrap_or_else(|| {
|
||||||
Some(path) => path,
|
let candidates = sysroot_candidates.iter()
|
||||||
None => {
|
.map(|p| p.display().to_string())
|
||||||
let candidates = sysroot_candidates.iter()
|
.collect::<Vec<_>>()
|
||||||
.map(|p| p.display().to_string())
|
.join("\n* ");
|
||||||
.collect::<Vec<_>>()
|
let err = format!("failed to find a `codegen-backends` folder \
|
||||||
.join("\n* ");
|
in the sysroot candidates:\n* {}", candidates);
|
||||||
let err = format!("failed to find a `codegen-backends` folder \
|
early_error(ErrorOutputType::default(), &err);
|
||||||
in the sysroot candidates:\n* {}", candidates);
|
});
|
||||||
early_error(ErrorOutputType::default(), &err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
info!("probing {} for a codegen backend", sysroot.display());
|
info!("probing {} for a codegen backend", sysroot.display());
|
||||||
|
|
||||||
let d = match sysroot.read_dir() {
|
let d = sysroot.read_dir().unwrap_or_else(|e| {
|
||||||
Ok(d) => d,
|
let err = format!("failed to load default codegen backend, couldn't \
|
||||||
Err(e) => {
|
read `{}`: {}", sysroot.display(), e);
|
||||||
let err = format!("failed to load default codegen backend, couldn't \
|
early_error(ErrorOutputType::default(), &err);
|
||||||
read `{}`: {}", sysroot.display(), e);
|
});
|
||||||
early_error(ErrorOutputType::default(), &err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file: Option<PathBuf> = None;
|
let mut file: Option<PathBuf> = None;
|
||||||
|
|
||||||
|
@ -378,8 +360,8 @@ fn get_codegen_sysroot(backend_name: &str) -> fn() -> Box<dyn CodegenBackend> {
|
||||||
}
|
}
|
||||||
if let Some(ref prev) = file {
|
if let Some(ref prev) = file {
|
||||||
let err = format!("duplicate codegen backends found\n\
|
let err = format!("duplicate codegen backends found\n\
|
||||||
first: {}\n\
|
first: {}\n\
|
||||||
second: {}\n\
|
second: {}\n\
|
||||||
", prev.display(), path.display());
|
", prev.display(), path.display());
|
||||||
early_error(ErrorOutputType::default(), &err);
|
early_error(ErrorOutputType::default(), &err);
|
||||||
}
|
}
|
||||||
|
@ -391,7 +373,7 @@ fn get_codegen_sysroot(backend_name: &str) -> fn() -> Box<dyn CodegenBackend> {
|
||||||
None => {
|
None => {
|
||||||
let err = format!("failed to load default codegen backend for `{}`, \
|
let err = format!("failed to load default codegen backend for `{}`, \
|
||||||
no appropriate codegen dylib found in `{}`",
|
no appropriate codegen dylib found in `{}`",
|
||||||
backend_name, sysroot.display());
|
backend_name, sysroot.display());
|
||||||
early_error(ErrorOutputType::default(), &err);
|
early_error(ErrorOutputType::default(), &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -578,7 +560,7 @@ pub fn set_sigpipe_handler() {
|
||||||
unsafe {
|
unsafe {
|
||||||
// Set the SIGPIPE signal handler, so that an EPIPE
|
// Set the SIGPIPE signal handler, so that an EPIPE
|
||||||
// will cause rustc to terminate, as expected.
|
// will cause rustc to terminate, as expected.
|
||||||
assert!(libc::signal(libc::SIGPIPE, libc::SIG_DFL) != libc::SIG_ERR);
|
assert_ne!(libc::signal(libc::SIGPIPE, libc::SIG_DFL), libc::SIG_ERR);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -996,7 +978,7 @@ impl RustcDefaultCalls {
|
||||||
input: &Input)
|
input: &Input)
|
||||||
-> Compilation {
|
-> Compilation {
|
||||||
let r = matches.opt_strs("Z");
|
let r = matches.opt_strs("Z");
|
||||||
if r.contains(&("ls".to_string())) {
|
if r.iter().any(|s| *s == "ls") {
|
||||||
match input {
|
match input {
|
||||||
&Input::File(ref ifile) => {
|
&Input::File(ref ifile) => {
|
||||||
let path = &(*ifile);
|
let path = &(*ifile);
|
||||||
|
@ -1015,7 +997,7 @@ impl RustcDefaultCalls {
|
||||||
return Compilation::Stop;
|
return Compilation::Stop;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Compilation::Continue;
|
Compilation::Continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1028,7 +1010,7 @@ impl RustcDefaultCalls {
|
||||||
use rustc::session::config::PrintRequest::*;
|
use rustc::session::config::PrintRequest::*;
|
||||||
// PrintRequest::NativeStaticLibs is special - printed during linking
|
// PrintRequest::NativeStaticLibs is special - printed during linking
|
||||||
// (empty iterator returns true)
|
// (empty iterator returns true)
|
||||||
if sess.opts.prints.iter().all(|&p| p==PrintRequest::NativeStaticLibs) {
|
if sess.opts.prints.iter().all(|&p| p == PrintRequest::NativeStaticLibs) {
|
||||||
return Compilation::Continue;
|
return Compilation::Continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1055,10 +1037,8 @@ impl RustcDefaultCalls {
|
||||||
Sysroot => println!("{}", sess.sysroot().display()),
|
Sysroot => println!("{}", sess.sysroot().display()),
|
||||||
TargetSpec => println!("{}", sess.target.target.to_json().pretty()),
|
TargetSpec => println!("{}", sess.target.target.to_json().pretty()),
|
||||||
FileNames | CrateName => {
|
FileNames | CrateName => {
|
||||||
let input = match input {
|
let input = input.unwrap_or_else(||
|
||||||
Some(input) => input,
|
early_error(ErrorOutputType::default(), "no input file provided"));
|
||||||
None => early_error(ErrorOutputType::default(), "no input file provided"),
|
|
||||||
};
|
|
||||||
let attrs = attrs.as_ref().unwrap();
|
let attrs = attrs.as_ref().unwrap();
|
||||||
let t_outputs = driver::build_output_filenames(input, odir, ofile, attrs, sess);
|
let t_outputs = driver::build_output_filenames(input, odir, ofile, attrs, sess);
|
||||||
let id = rustc_codegen_utils::link::find_crate_name(Some(sess), attrs, input);
|
let id = rustc_codegen_utils::link::find_crate_name(Some(sess), attrs, input);
|
||||||
|
@ -1074,18 +1054,14 @@ impl RustcDefaultCalls {
|
||||||
&id,
|
&id,
|
||||||
&t_outputs
|
&t_outputs
|
||||||
);
|
);
|
||||||
println!("{}",
|
println!("{}", fname.file_name().unwrap().to_string_lossy());
|
||||||
fname.file_name()
|
|
||||||
.unwrap()
|
|
||||||
.to_string_lossy());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Cfg => {
|
Cfg => {
|
||||||
let allow_unstable_cfg = UnstableFeatures::from_environment()
|
let allow_unstable_cfg = UnstableFeatures::from_environment()
|
||||||
.is_nightly_build();
|
.is_nightly_build();
|
||||||
|
|
||||||
let mut cfgs = Vec::new();
|
let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| {
|
||||||
for &(name, ref value) in sess.parse_sess.config.iter() {
|
|
||||||
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
|
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
|
||||||
ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
|
ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
|
||||||
node: ast::MetaItemKind::Word,
|
node: ast::MetaItemKind::Word,
|
||||||
|
@ -1104,16 +1080,16 @@ impl RustcDefaultCalls {
|
||||||
let value = value.as_ref().map(|s| s.as_ref());
|
let value = value.as_ref().map(|s| s.as_ref());
|
||||||
if name != "target_feature" || value != Some("crt-static") {
|
if name != "target_feature" || value != Some("crt-static") {
|
||||||
if !allow_unstable_cfg && gated_cfg.is_some() {
|
if !allow_unstable_cfg && gated_cfg.is_some() {
|
||||||
continue;
|
return None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cfgs.push(if let Some(value) = value {
|
if let Some(value) = value {
|
||||||
format!("{}=\"{}\"", name, value)
|
Some(format!("{}=\"{}\"", name, value))
|
||||||
} else {
|
} else {
|
||||||
name.to_string()
|
Some(name.to_string())
|
||||||
});
|
}
|
||||||
}
|
}).collect::<Vec<String>>();
|
||||||
|
|
||||||
cfgs.sort();
|
cfgs.sort();
|
||||||
for cfg in cfgs {
|
for cfg in cfgs {
|
||||||
|
@ -1150,9 +1126,8 @@ fn commit_date_str() -> Option<&'static str> {
|
||||||
pub fn version(binary: &str, matches: &getopts::Matches) {
|
pub fn version(binary: &str, matches: &getopts::Matches) {
|
||||||
let verbose = matches.opt_present("verbose");
|
let verbose = matches.opt_present("verbose");
|
||||||
|
|
||||||
println!("{} {}",
|
println!("{} {}", binary, option_env!("CFG_VERSION").unwrap_or("unknown version"));
|
||||||
binary,
|
|
||||||
option_env!("CFG_VERSION").unwrap_or("unknown version"));
|
|
||||||
if verbose {
|
if verbose {
|
||||||
fn unw(x: Option<&str>) -> &str {
|
fn unw(x: Option<&str>) -> &str {
|
||||||
x.unwrap_or("unknown")
|
x.unwrap_or("unknown")
|
||||||
|
@ -1176,7 +1151,7 @@ fn usage(verbose: bool, include_unstable_options: bool) {
|
||||||
for option in groups.iter().filter(|x| include_unstable_options || x.is_stable()) {
|
for option in groups.iter().filter(|x| include_unstable_options || x.is_stable()) {
|
||||||
(option.apply)(&mut options);
|
(option.apply)(&mut options);
|
||||||
}
|
}
|
||||||
let message = "Usage: rustc [OPTIONS] INPUT".to_string();
|
let message = "Usage: rustc [OPTIONS] INPUT";
|
||||||
let nightly_help = if nightly_options::is_nightly_build() {
|
let nightly_help = if nightly_options::is_nightly_build() {
|
||||||
"\n -Z help Print internal options for debugging rustc"
|
"\n -Z help Print internal options for debugging rustc"
|
||||||
} else {
|
} else {
|
||||||
|
@ -1191,7 +1166,7 @@ fn usage(verbose: bool, include_unstable_options: bool) {
|
||||||
-C help Print codegen options
|
-C help Print codegen options
|
||||||
-W help \
|
-W help \
|
||||||
Print 'lint' options and default settings{}{}\n",
|
Print 'lint' options and default settings{}{}\n",
|
||||||
options.usage(&message),
|
options.usage(message),
|
||||||
nightly_help,
|
nightly_help,
|
||||||
verbose_help);
|
verbose_help);
|
||||||
}
|
}
|
||||||
|
@ -1273,8 +1248,6 @@ Available lint options:
|
||||||
|
|
||||||
print_lints(builtin);
|
print_lints(builtin);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let max_name_len = max("warnings".len(),
|
let max_name_len = max("warnings".len(),
|
||||||
plugin_groups.iter()
|
plugin_groups.iter()
|
||||||
.chain(&builtin_groups)
|
.chain(&builtin_groups)
|
||||||
|
@ -1407,10 +1380,8 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||||
for option in config::rustc_optgroups() {
|
for option in config::rustc_optgroups() {
|
||||||
(option.apply)(&mut options);
|
(option.apply)(&mut options);
|
||||||
}
|
}
|
||||||
let matches = match options.parse(args) {
|
let matches = options.parse(args).unwrap_or_else(|f|
|
||||||
Ok(m) => m,
|
early_error(ErrorOutputType::default(), &f.to_string()));
|
||||||
Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// For all options we just parsed, we check a few aspects:
|
// For all options we just parsed, we check a few aspects:
|
||||||
//
|
//
|
||||||
|
@ -1452,6 +1423,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let cg_flags = matches.opt_strs("C");
|
let cg_flags = matches.opt_strs("C");
|
||||||
|
|
||||||
if cg_flags.iter().any(|x| *x == "help") {
|
if cg_flags.iter().any(|x| *x == "help") {
|
||||||
describe_codegen_flags();
|
describe_codegen_flags();
|
||||||
return None;
|
return None;
|
||||||
|
@ -1462,7 +1434,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||||
"the --no-stack-check flag is deprecated and does nothing");
|
"the --no-stack-check flag is deprecated and does nothing");
|
||||||
}
|
}
|
||||||
|
|
||||||
if cg_flags.contains(&"passes=list".to_string()) {
|
if cg_flags.iter().any(|x| *x == "passes=list") {
|
||||||
get_codegen_sysroot("llvm")().print_passes();
|
get_codegen_sysroot("llvm")().print_passes();
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -1500,7 +1472,7 @@ pub fn in_named_rustc_thread<F, R>(name: String, f: F) -> Result<R, Box<dyn Any
|
||||||
// Temporarily have stack size set to 16MB to deal with nom-using crates failing
|
// Temporarily have stack size set to 16MB to deal with nom-using crates failing
|
||||||
const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB
|
const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB
|
||||||
|
|
||||||
#[cfg(all(unix,not(target_os = "haiku")))]
|
#[cfg(all(unix, not(target_os = "haiku")))]
|
||||||
let spawn_thread = unsafe {
|
let spawn_thread = unsafe {
|
||||||
// Fetch the current resource limits
|
// Fetch the current resource limits
|
||||||
let mut rlim = libc::rlimit {
|
let mut rlim = libc::rlimit {
|
||||||
|
@ -1554,7 +1526,7 @@ pub fn in_named_rustc_thread<F, R>(name: String, f: F) -> Result<R, Box<dyn Any
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(not(any(windows,unix)))]
|
#[cfg(not(any(windows, unix)))]
|
||||||
let spawn_thread = true;
|
let spawn_thread = true;
|
||||||
|
|
||||||
// The or condition is added from backward compatibility.
|
// The or condition is added from backward compatibility.
|
||||||
|
@ -1632,7 +1604,7 @@ fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if result.len() > 0 {
|
if !result.is_empty() {
|
||||||
Some((result, excluded_cargo_defaults))
|
Some((result, excluded_cargo_defaults))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -1680,25 +1652,25 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) -> Result<(), CompilationFail
|
||||||
errors::Level::Bug);
|
errors::Level::Bug);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut xs = vec![
|
let mut xs: Vec<Cow<'static, str>> = vec![
|
||||||
"the compiler unexpectedly panicked. this is a bug.".to_string(),
|
"the compiler unexpectedly panicked. this is a bug.".into(),
|
||||||
format!("we would appreciate a bug report: {}", BUG_REPORT_URL),
|
format!("we would appreciate a bug report: {}", BUG_REPORT_URL).into(),
|
||||||
format!("rustc {} running on {}",
|
format!("rustc {} running on {}",
|
||||||
option_env!("CFG_VERSION").unwrap_or("unknown_version"),
|
option_env!("CFG_VERSION").unwrap_or("unknown_version"),
|
||||||
config::host_triple()),
|
config::host_triple()).into(),
|
||||||
];
|
];
|
||||||
|
|
||||||
if let Some((flags, excluded_cargo_defaults)) = extra_compiler_flags() {
|
if let Some((flags, excluded_cargo_defaults)) = extra_compiler_flags() {
|
||||||
xs.push(format!("compiler flags: {}", flags.join(" ")));
|
xs.push(format!("compiler flags: {}", flags.join(" ")).into());
|
||||||
|
|
||||||
if excluded_cargo_defaults {
|
if excluded_cargo_defaults {
|
||||||
xs.push("some of the compiler flags provided by cargo are hidden".to_string());
|
xs.push("some of the compiler flags provided by cargo are hidden".into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for note in &xs {
|
for note in &xs {
|
||||||
handler.emit(&MultiSpan::new(),
|
handler.emit(&MultiSpan::new(),
|
||||||
¬e,
|
note,
|
||||||
errors::Level::Note);
|
errors::Level::Note);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -167,10 +167,10 @@ pub fn parse_pretty(sess: &Session,
|
||||||
impl PpSourceMode {
|
impl PpSourceMode {
|
||||||
/// Constructs a `PrinterSupport` object and passes it to `f`.
|
/// Constructs a `PrinterSupport` object and passes it to `f`.
|
||||||
fn call_with_pp_support<'tcx, A, F>(&self,
|
fn call_with_pp_support<'tcx, A, F>(&self,
|
||||||
sess: &'tcx Session,
|
sess: &'tcx Session,
|
||||||
hir_map: Option<&hir_map::Map<'tcx>>,
|
hir_map: Option<&hir_map::Map<'tcx>>,
|
||||||
f: F)
|
f: F)
|
||||||
-> A
|
-> A
|
||||||
where F: FnOnce(&dyn PrinterSupport) -> A
|
where F: FnOnce(&dyn PrinterSupport) -> A
|
||||||
{
|
{
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -198,17 +198,18 @@ impl PpSourceMode {
|
||||||
_ => panic!("Should use call_with_pp_support_hir"),
|
_ => panic!("Should use call_with_pp_support_hir"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn call_with_pp_support_hir<'tcx, A, F>(&self,
|
fn call_with_pp_support_hir<'tcx, A, F>(
|
||||||
sess: &'tcx Session,
|
&self,
|
||||||
cstore: &'tcx CStore,
|
sess: &'tcx Session,
|
||||||
hir_map: &hir_map::Map<'tcx>,
|
cstore: &'tcx CStore,
|
||||||
analysis: &ty::CrateAnalysis,
|
hir_map: &hir_map::Map<'tcx>,
|
||||||
resolutions: &Resolutions,
|
analysis: &ty::CrateAnalysis,
|
||||||
arenas: &'tcx AllArenas<'tcx>,
|
resolutions: &Resolutions,
|
||||||
output_filenames: &OutputFilenames,
|
arenas: &'tcx AllArenas<'tcx>,
|
||||||
id: &str,
|
output_filenames: &OutputFilenames,
|
||||||
f: F)
|
id: &str,
|
||||||
-> A
|
f: F
|
||||||
|
) -> A
|
||||||
where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
|
where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
|
||||||
{
|
{
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -855,7 +856,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
|
||||||
break n.body();
|
break n.body();
|
||||||
}
|
}
|
||||||
let parent = tcx.hir.get_parent_node(node_id);
|
let parent = tcx.hir.get_parent_node(node_id);
|
||||||
assert!(node_id != parent);
|
assert_ne!(node_id, parent);
|
||||||
node_id = parent;
|
node_id = parent;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -952,18 +953,17 @@ pub fn print_after_parsing(sess: &Session,
|
||||||
// Silently ignores an identified node.
|
// Silently ignores an identified node.
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support(sess, None, move |annotation| {
|
s.call_with_pp_support(sess, None, move |annotation| {
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
let sess = annotation.sess();
|
let sess = annotation.sess();
|
||||||
pprust::print_crate(sess.source_map(),
|
pprust::print_crate(sess.source_map(),
|
||||||
&sess.parse_sess,
|
&sess.parse_sess,
|
||||||
krate,
|
krate,
|
||||||
src_name,
|
src_name,
|
||||||
&mut rdr,
|
&mut rdr,
|
||||||
box out,
|
box out,
|
||||||
annotation.pp_ann(),
|
annotation.pp_ann(),
|
||||||
false)
|
false)
|
||||||
})
|
}).unwrap()
|
||||||
.unwrap()
|
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
};
|
||||||
|
|
|
@ -23,7 +23,7 @@ pub fn begin(sess: &Session) {
|
||||||
use std::sync::mpsc::{channel};
|
use std::sync::mpsc::{channel};
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
if profq_set_chan(sess, tx) {
|
if profq_set_chan(sess, tx) {
|
||||||
thread::spawn(move||profile_queries_thread(rx));
|
thread::spawn(move || profile_queries_thread(rx));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,11 +34,12 @@ pub fn begin(sess: &Session) {
|
||||||
pub fn dump(sess: &Session, path: String) {
|
pub fn dump(sess: &Session, path: String) {
|
||||||
use std::sync::mpsc::{channel};
|
use std::sync::mpsc::{channel};
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
let params = ProfQDumpParams{
|
let params = ProfQDumpParams {
|
||||||
path, ack:tx,
|
path,
|
||||||
|
ack: tx,
|
||||||
// FIXME: Add another compiler flag to toggle whether this log
|
// FIXME: Add another compiler flag to toggle whether this log
|
||||||
// is written; false for now
|
// is written; false for now
|
||||||
dump_profq_msg_log:true,
|
dump_profq_msg_log: true,
|
||||||
};
|
};
|
||||||
profq_msg(sess, ProfileQueriesMsg::Dump(params));
|
profq_msg(sess, ProfileQueriesMsg::Dump(params));
|
||||||
let _ = rx.recv().unwrap();
|
let _ = rx.recv().unwrap();
|
||||||
|
@ -63,20 +64,20 @@ struct StackFrame {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn total_duration(traces: &[trace::Rec]) -> Duration {
|
fn total_duration(traces: &[trace::Rec]) -> Duration {
|
||||||
let mut sum : Duration = Duration::new(0,0);
|
let mut sum : Duration = Duration::new(0, 0);
|
||||||
for t in traces.iter() { sum += t.dur_total; }
|
for t in traces.iter() { sum += t.dur_total; }
|
||||||
return sum
|
return sum
|
||||||
}
|
}
|
||||||
|
|
||||||
// profiling thread; retains state (in local variables) and dump traces, upon request.
|
// profiling thread; retains state (in local variables) and dump traces, upon request.
|
||||||
fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
fn profile_queries_thread(r: Receiver<ProfileQueriesMsg>) {
|
||||||
use self::trace::*;
|
use self::trace::*;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::time::{Instant};
|
use std::time::{Instant};
|
||||||
|
|
||||||
let mut profq_msgs : Vec<ProfileQueriesMsg> = vec![];
|
let mut profq_msgs: Vec<ProfileQueriesMsg> = vec![];
|
||||||
let mut frame : StackFrame = StackFrame{ parse_st:ParseState::Clear, traces:vec![] };
|
let mut frame: StackFrame = StackFrame { parse_st: ParseState::Clear, traces: vec![] };
|
||||||
let mut stack : Vec<StackFrame> = vec![];
|
let mut stack: Vec<StackFrame> = vec![];
|
||||||
loop {
|
loop {
|
||||||
let msg = r.recv();
|
let msg = r.recv();
|
||||||
if let Err(_recv_err) = msg {
|
if let Err(_recv_err) = msg {
|
||||||
|
@ -90,7 +91,7 @@ fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
match msg {
|
match msg {
|
||||||
ProfileQueriesMsg::Halt => return,
|
ProfileQueriesMsg::Halt => return,
|
||||||
ProfileQueriesMsg::Dump(params) => {
|
ProfileQueriesMsg::Dump(params) => {
|
||||||
assert!(stack.len() == 0);
|
assert!(stack.is_empty());
|
||||||
assert!(frame.parse_st == ParseState::Clear);
|
assert!(frame.parse_st == ParseState::Clear);
|
||||||
{
|
{
|
||||||
// write log of all messages
|
// write log of all messages
|
||||||
|
@ -109,17 +110,14 @@ fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
let counts_path = format!("{}.counts.txt", params.path);
|
let counts_path = format!("{}.counts.txt", params.path);
|
||||||
let mut counts_file = File::create(&counts_path).unwrap();
|
let mut counts_file = File::create(&counts_path).unwrap();
|
||||||
|
|
||||||
write!(html_file, "<html>\n").unwrap();
|
writeln!(html_file,
|
||||||
write!(html_file,
|
"<html>\n<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">",
|
||||||
"<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n",
|
"profile_queries.css").unwrap();
|
||||||
"profile_queries.css").unwrap();
|
writeln!(html_file, "<style>").unwrap();
|
||||||
write!(html_file, "<style>\n").unwrap();
|
|
||||||
trace::write_style(&mut html_file);
|
trace::write_style(&mut html_file);
|
||||||
write!(html_file, "</style>\n").unwrap();
|
writeln!(html_file, "</style>\n</head>\n<body>").unwrap();
|
||||||
write!(html_file, "</head>\n").unwrap();
|
|
||||||
write!(html_file, "<body>\n").unwrap();
|
|
||||||
trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
|
trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
|
||||||
write!(html_file, "</body>\n</html>\n").unwrap();
|
writeln!(html_file, "</body>\n</html>").unwrap();
|
||||||
|
|
||||||
let ack_path = format!("{}.ack", params.path);
|
let ack_path = format!("{}.ack", params.path);
|
||||||
let ack_file = File::create(&ack_path).unwrap();
|
let ack_file = File::create(&ack_path).unwrap();
|
||||||
|
@ -141,10 +139,10 @@ fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
|
|
||||||
// Parse State: Clear
|
// Parse State: Clear
|
||||||
(ParseState::Clear,
|
(ParseState::Clear,
|
||||||
ProfileQueriesMsg::QueryBegin(span,querymsg)) => {
|
ProfileQueriesMsg::QueryBegin(span, querymsg)) => {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
frame.parse_st = ParseState::HaveQuery
|
frame.parse_st = ParseState::HaveQuery
|
||||||
(Query{span:span, msg:querymsg}, start)
|
(Query { span, msg: querymsg }, start)
|
||||||
},
|
},
|
||||||
(ParseState::Clear,
|
(ParseState::Clear,
|
||||||
ProfileQueriesMsg::CacheHit) => {
|
ProfileQueriesMsg::CacheHit) => {
|
||||||
|
@ -287,8 +285,6 @@ fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
|
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
|
||||||
},
|
},
|
||||||
|
|
||||||
//
|
|
||||||
//
|
|
||||||
// Parse errors:
|
// Parse errors:
|
||||||
|
|
||||||
(ParseState::HaveQuery(q,_),
|
(ParseState::HaveQuery(q,_),
|
||||||
|
@ -310,7 +306,6 @@ fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,18 +43,18 @@ pub struct QueryMetric {
|
||||||
pub dur_total: Duration,
|
pub dur_total: Duration,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn cons(s: &str) -> String {
|
||||||
|
let first = s.split(|d| d == '(' || d == '{').next();
|
||||||
|
assert!(first.is_some() && first != Some(""));
|
||||||
|
first.unwrap().to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn cons_of_query_msg(q: &trace::Query) -> String {
|
pub fn cons_of_query_msg(q: &trace::Query) -> String {
|
||||||
let s = format!("{:?}", q.msg);
|
cons(&format!("{:?}", q.msg))
|
||||||
let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
|
|
||||||
assert!(cons.len() > 0 && cons[0] != "");
|
|
||||||
cons[0].to_string()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cons_of_key(k: &DepNode) -> String {
|
pub fn cons_of_key(k: &DepNode) -> String {
|
||||||
let s = format!("{:?}", k);
|
cons(&format!("{:?}", k))
|
||||||
let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
|
|
||||||
assert!(cons.len() > 0 && cons[0] != "");
|
|
||||||
cons[0].to_string()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// First return value is text; second return value is a CSS class
|
// First return value is text; second return value is a CSS class
|
||||||
|
@ -84,35 +84,33 @@ pub fn html_of_effect(eff: &Effect) -> (String, String) {
|
||||||
// First return value is text; second return value is a CSS class
|
// First return value is text; second return value is a CSS class
|
||||||
fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
|
fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
|
||||||
use rustc::util::common::duration_to_secs_str;
|
use rustc::util::common::duration_to_secs_str;
|
||||||
(duration_to_secs_str(dur.clone()),
|
(duration_to_secs_str(dur.clone()), String::new())
|
||||||
String::new()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn html_of_fraction(frac: f64) -> (String, String) {
|
fn html_of_fraction(frac: f64) -> (String, &'static str) {
|
||||||
let css = {
|
let css = {
|
||||||
if frac > 0.50 { "frac-50".to_string() }
|
if frac > 0.50 { "frac-50" }
|
||||||
else if frac > 0.40 { "frac-40".to_string() }
|
else if frac > 0.40 { "frac-40" }
|
||||||
else if frac > 0.30 { "frac-30".to_string() }
|
else if frac > 0.30 { "frac-30" }
|
||||||
else if frac > 0.20 { "frac-20".to_string() }
|
else if frac > 0.20 { "frac-20" }
|
||||||
else if frac > 0.10 { "frac-10".to_string() }
|
else if frac > 0.10 { "frac-10" }
|
||||||
else if frac > 0.05 { "frac-05".to_string() }
|
else if frac > 0.05 { "frac-05" }
|
||||||
else if frac > 0.02 { "frac-02".to_string() }
|
else if frac > 0.02 { "frac-02" }
|
||||||
else if frac > 0.01 { "frac-01".to_string() }
|
else if frac > 0.01 { "frac-01" }
|
||||||
else if frac > 0.001 { "frac-001".to_string() }
|
else if frac > 0.001 { "frac-001" }
|
||||||
else { "frac-0".to_string() }
|
else { "frac-0" }
|
||||||
};
|
};
|
||||||
let percent = frac * 100.0;
|
let percent = frac * 100.0;
|
||||||
if percent > 0.1 { (format!("{:.1}%", percent), css) }
|
|
||||||
else { ("< 0.1%".to_string(), css) }
|
if percent > 0.1 {
|
||||||
|
(format!("{:.1}%", percent), css)
|
||||||
|
} else {
|
||||||
|
("< 0.1%".to_string(), css)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn total_duration(traces: &[Rec]) -> Duration {
|
fn total_duration(traces: &[Rec]) -> Duration {
|
||||||
let mut sum : Duration = Duration::new(0,0);
|
Duration::new(0, 0) + traces.iter().map(|t| t.dur_total).sum()
|
||||||
for t in traces.iter() {
|
|
||||||
sum += t.dur_total;
|
|
||||||
}
|
|
||||||
return sum
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn duration_div(nom: Duration, den: Duration) -> f64 {
|
fn duration_div(nom: Duration, den: Duration) -> f64 {
|
||||||
|
@ -130,64 +128,65 @@ fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usi
|
||||||
let fraction = duration_div(t.dur_total, total);
|
let fraction = duration_div(t.dur_total, total);
|
||||||
let percent = fraction * 100.0;
|
let percent = fraction * 100.0;
|
||||||
let (frc_text, frc_css_classes) = html_of_fraction(fraction);
|
let (frc_text, frc_css_classes) = html_of_fraction(fraction);
|
||||||
write!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">\n",
|
writeln!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">",
|
||||||
depth,
|
depth,
|
||||||
t.extent.len(),
|
t.extent.len(),
|
||||||
/* Heuristic for 'important' CSS class: */
|
/* Heuristic for 'important' CSS class: */
|
||||||
if t.extent.len() > 5 || percent >= 1.0 {
|
if t.extent.len() > 5 || percent >= 1.0 { " important" } else { "" },
|
||||||
" important" }
|
eff_css_classes,
|
||||||
else { "" },
|
dur_css_classes,
|
||||||
eff_css_classes,
|
frc_css_classes,
|
||||||
dur_css_classes,
|
|
||||||
frc_css_classes,
|
|
||||||
).unwrap();
|
).unwrap();
|
||||||
write!(file, "<div class=\"eff\">{}</div>\n", eff_text).unwrap();
|
writeln!(file, "<div class=\"eff\">{}</div>", eff_text).unwrap();
|
||||||
write!(file, "<div class=\"dur\">{}</div>\n", dur_text).unwrap();
|
writeln!(file, "<div class=\"dur\">{}</div>", dur_text).unwrap();
|
||||||
write!(file, "<div class=\"frc\">{}</div>\n", frc_text).unwrap();
|
writeln!(file, "<div class=\"frc\">{}</div>", frc_text).unwrap();
|
||||||
write_traces_rec(file, &t.extent, total, depth + 1);
|
write_traces_rec(file, &t.extent, total, depth + 1);
|
||||||
write!(file, "</div>\n").unwrap();
|
writeln!(file, "</div>").unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]) {
|
fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]) {
|
||||||
|
counts.reserve(traces.len());
|
||||||
for t in traces.iter() {
|
for t in traces.iter() {
|
||||||
match t.effect {
|
match t.effect {
|
||||||
Effect::TimeBegin(ref msg) => {
|
Effect::TimeBegin(ref msg) => {
|
||||||
let qm = match counts.get(msg) {
|
let qm = match counts.get(msg) {
|
||||||
Some(_qm) => { panic!("TimeBegin with non-unique, repeat message") }
|
Some(_qm) => panic!("TimeBegin with non-unique, repeat message"),
|
||||||
None => QueryMetric{
|
None => QueryMetric {
|
||||||
count: 1,
|
count: 1,
|
||||||
dur_self: t.dur_self,
|
dur_self: t.dur_self,
|
||||||
dur_total: t.dur_total,
|
dur_total: t.dur_total,
|
||||||
}};
|
}
|
||||||
|
};
|
||||||
counts.insert(msg.clone(), qm);
|
counts.insert(msg.clone(), qm);
|
||||||
},
|
},
|
||||||
Effect::TaskBegin(ref key) => {
|
Effect::TaskBegin(ref key) => {
|
||||||
let cons = cons_of_key(key);
|
let cons = cons_of_key(key);
|
||||||
let qm = match counts.get(&cons) {
|
let qm = match counts.get(&cons) {
|
||||||
Some(qm) =>
|
Some(qm) =>
|
||||||
QueryMetric{
|
QueryMetric {
|
||||||
count: qm.count + 1,
|
count: qm.count + 1,
|
||||||
dur_self: qm.dur_self + t.dur_self,
|
dur_self: qm.dur_self + t.dur_self,
|
||||||
dur_total: qm.dur_total + t.dur_total,
|
dur_total: qm.dur_total + t.dur_total,
|
||||||
},
|
},
|
||||||
None => QueryMetric{
|
None => QueryMetric {
|
||||||
count: 1,
|
count: 1,
|
||||||
dur_self: t.dur_self,
|
dur_self: t.dur_self,
|
||||||
dur_total: t.dur_total,
|
dur_total: t.dur_total,
|
||||||
}};
|
}
|
||||||
|
};
|
||||||
counts.insert(cons, qm);
|
counts.insert(cons, qm);
|
||||||
},
|
},
|
||||||
Effect::QueryBegin(ref qmsg, ref _cc) => {
|
Effect::QueryBegin(ref qmsg, ref _cc) => {
|
||||||
let qcons = cons_of_query_msg(qmsg);
|
let qcons = cons_of_query_msg(qmsg);
|
||||||
let qm = match counts.get(&qcons) {
|
let qm = match counts.get(&qcons) {
|
||||||
Some(qm) =>
|
Some(qm) =>
|
||||||
QueryMetric{
|
QueryMetric {
|
||||||
count: qm.count + 1,
|
count: qm.count + 1,
|
||||||
dur_total: qm.dur_total + t.dur_total,
|
dur_total: qm.dur_total + t.dur_total,
|
||||||
dur_self: qm.dur_self + t.dur_self
|
dur_self: qm.dur_self + t.dur_self
|
||||||
},
|
},
|
||||||
None => QueryMetric{
|
None => QueryMetric {
|
||||||
count: 1,
|
count: 1,
|
||||||
dur_total: t.dur_total,
|
dur_total: t.dur_total,
|
||||||
dur_self: t.dur_self,
|
dur_self: t.dur_self,
|
||||||
|
@ -200,19 +199,20 @@ fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String,QueryMetric>) {
|
pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String, QueryMetric>) {
|
||||||
use rustc::util::common::duration_to_secs_str;
|
use rustc::util::common::duration_to_secs_str;
|
||||||
use std::cmp::Reverse;
|
use std::cmp::Reverse;
|
||||||
|
|
||||||
let mut data = counts.iter().map(|(ref cons, ref qm)|
|
let mut data = counts.iter().map(|(ref cons, ref qm)|
|
||||||
(cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
|
(cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
|
||||||
).collect::<Vec<_>>();
|
).collect::<Vec<_>>();
|
||||||
|
|
||||||
data.sort_by_key(|k| Reverse(k.3));
|
data.sort_by_key(|k| Reverse(k.3));
|
||||||
for (cons, count, dur_total, dur_self) in data {
|
for (cons, count, dur_total, dur_self) in data {
|
||||||
write!(count_file, "{}, {}, {}, {}\n",
|
writeln!(count_file, "{}, {}, {}, {}",
|
||||||
cons, count,
|
cons, count,
|
||||||
duration_to_secs_str(dur_total),
|
duration_to_secs_str(dur_total),
|
||||||
duration_to_secs_str(dur_self)
|
duration_to_secs_str(dur_self)
|
||||||
).unwrap();
|
).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,12 +223,12 @@ pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &[Rec]
|
||||||
compute_counts_rec(&mut counts, traces);
|
compute_counts_rec(&mut counts, traces);
|
||||||
write_counts(counts_file, &mut counts);
|
write_counts(counts_file, &mut counts);
|
||||||
|
|
||||||
let total : Duration = total_duration(traces);
|
let total: Duration = total_duration(traces);
|
||||||
write_traces_rec(html_file, traces, total, 0)
|
write_traces_rec(html_file, traces, total, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_style(html_file: &mut File) {
|
pub fn write_style(html_file: &mut File) {
|
||||||
write!(html_file,"{}", "
|
write!(html_file, "{}", "
|
||||||
body {
|
body {
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
background: black;
|
background: black;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue