1
Fork 0

incr.ThinLTO: Do some cleanup and add some logging.

This commit is contained in:
Michael Woerister 2018-09-03 12:42:27 +02:00
parent d5545751f9
commit 21d05f64aa
2 changed files with 57 additions and 62 deletions

View file

@ -29,7 +29,7 @@ use {ModuleCodegen, ModuleLlvm, ModuleKind};
use libc;
use std::ffi::{CStr, CString};
use std::fs::File;
use std::fs::{self, File};
use std::ptr;
use std::slice;
use std::sync::Arc;
@ -423,16 +423,10 @@ fn thin_lto(cgcx: &CodegenContext,
// because only then it will contain the ThinLTO module summary.
if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir {
if cgcx.config(module.kind).emit_pre_thin_lto_bc {
use std::io::Write;
let path = incr_comp_session_dir
.join(pre_lto_bitcode_filename(&module.name));
let mut file = File::create(&path).unwrap_or_else(|e| {
panic!("Failed to create pre-lto-bitcode file `{}`: {}",
path.display(),
e);
});
file.write_all(buffer.data()).unwrap_or_else(|e| {
fs::write(&path, buffer.data()).unwrap_or_else(|e| {
panic!("Error writing pre-lto-bitcode file `{}`: {}",
path.display(),
e);
@ -499,12 +493,22 @@ fn thin_lto(cgcx: &CodegenContext,
write::llvm_err(&diag_handler, "failed to prepare thin LTO context".to_string())
})?;
let import_map = ThinLTOImports::from_thin_lto_data(data);
let data = ThinData(data);
info!("thin LTO data created");
timeline.record("data");
let import_map = if cgcx.incr_comp_session_dir.is_some() {
ThinLTOImports::from_thin_lto_data(data)
} else {
// If we don't compile incrementally, we don't need to load the
// import data from LLVM.
assert!(green_modules.is_empty());
ThinLTOImports::new()
};
info!("thin LTO import map loaded");
timeline.record("import-map-loaded");
let data = ThinData(data);
// Throw our data in an `Arc` as we'll be sharing it across threads. We
// also put all memory referenced by the C++ data (buffers, ids, etc)
// into the arc as well. After this we'll create a thin module
@ -519,25 +523,27 @@ fn thin_lto(cgcx: &CodegenContext,
let mut copy_jobs = vec![];
let mut opt_jobs = vec![];
info!("checking which modules can be-reused and which have to be re-optimized.");
for (module_index, module_name) in shared.module_names.iter().enumerate() {
let module_name = module_name_to_str(module_name);
// If the module hasn't changed and none of the modules it imports
// from has changed, we can re-use the post-ThinLTO version of the
// module.
if green_modules.contains_key(module_name) {
let mut imports_all_green = true;
for imported_module in import_map.modules_imported_by(module_name) {
if !green_modules.contains_key(imported_module) {
imports_all_green = false;
break
}
}
let imports_all_green = import_map.modules_imported_by(module_name)
.iter()
.all(|imported_module| green_modules.contains_key(imported_module));
if imports_all_green {
let work_product = green_modules[module_name].clone();
copy_jobs.push(work_product);
info!(" - {}: re-used", module_name);
continue
}
}
info!(" - {}: re-compiled", module_name);
opt_jobs.push(LtoModuleCodegen::Thin(ThinModule {
shared: shared.clone(),
idx: module_index,
@ -872,7 +878,13 @@ pub struct ThinLTOImports {
}
impl ThinLTOImports {
pub fn modules_imported_by(&self, llvm_module_name: &str) -> &[String] {
fn new() -> ThinLTOImports {
ThinLTOImports {
imports: FxHashMap(),
}
}
fn modules_imported_by(&self, llvm_module_name: &str) -> &[String] {
self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[])
}

View file

@ -1311,36 +1311,31 @@ fn execute_work_item(cgcx: &CodegenContext,
timeline: &mut Timeline)
-> Result<WorkItemResult, FatalError>
{
let module_config = cgcx.config(work_item.module_kind());
match work_item {
work_item @ WorkItem::Optimize(_) => {
execute_optimize_work_item(cgcx, work_item, timeline)
WorkItem::Optimize(module) => {
execute_optimize_work_item(cgcx, module, module_config, timeline)
}
work_item @ WorkItem::CopyPostLtoArtifacts(_) => {
execute_copy_from_cache_work_item(cgcx, work_item, timeline)
WorkItem::CopyPostLtoArtifacts(module) => {
execute_copy_from_cache_work_item(cgcx, module, module_config, timeline)
}
work_item @ WorkItem::LTO(_) => {
execute_lto_work_item(cgcx, work_item, timeline)
WorkItem::LTO(module) => {
execute_lto_work_item(cgcx, module, module_config, timeline)
}
}
}
fn execute_optimize_work_item(cgcx: &CodegenContext,
work_item: WorkItem,
module: ModuleCodegen,
module_config: &ModuleConfig,
timeline: &mut Timeline)
-> Result<WorkItemResult, FatalError>
{
let config = cgcx.config(work_item.module_kind());
let module = if let WorkItem::Optimize(module) = work_item {
module
} else {
bug!("execute_optimize_work_item() called with non-WorkItem::Optimize");
};
let diag_handler = cgcx.create_diag_handler();
unsafe {
optimize(cgcx, &diag_handler, &module, config, timeline)?;
optimize(cgcx, &diag_handler, &module, module_config, timeline)?;
}
let linker_does_lto = cgcx.opts.debugging_opts.cross_lang_lto.enabled();
@ -1394,25 +1389,18 @@ fn execute_optimize_work_item(cgcx: &CodegenContext,
Ok(WorkItemResult::NeedsLTO(module))
} else {
let module = unsafe {
codegen(cgcx, &diag_handler, module, config, timeline)?
codegen(cgcx, &diag_handler, module, module_config, timeline)?
};
Ok(WorkItemResult::Compiled(module))
}
}
fn execute_copy_from_cache_work_item(cgcx: &CodegenContext,
work_item: WorkItem,
module: CachedModuleCodegen,
module_config: &ModuleConfig,
_: &mut Timeline)
-> Result<WorkItemResult, FatalError>
{
let config = cgcx.config(work_item.module_kind());
let module = if let WorkItem::CopyPostLtoArtifacts(module) = work_item {
module
} else {
bug!("execute_copy_from_cache_work_item() called with wrong WorkItem kind.")
};
let incr_comp_session_dir = cgcx.incr_comp_session_dir
.as_ref()
.unwrap();
@ -1459,9 +1447,9 @@ fn execute_copy_from_cache_work_item(cgcx: &CodegenContext,
}
}
assert_eq!(object.is_some(), config.emit_obj);
assert_eq!(bytecode.is_some(), config.emit_bc);
assert_eq!(bytecode_compressed.is_some(), config.emit_bc_compressed);
assert_eq!(object.is_some(), module_config.emit_obj);
assert_eq!(bytecode.is_some(), module_config.emit_bc);
assert_eq!(bytecode_compressed.is_some(), module_config.emit_bc_compressed);
Ok(WorkItemResult::Compiled(CompiledModule {
name: module.name,
@ -1473,23 +1461,18 @@ fn execute_copy_from_cache_work_item(cgcx: &CodegenContext,
}
fn execute_lto_work_item(cgcx: &CodegenContext,
work_item: WorkItem,
mut module: lto::LtoModuleCodegen,
module_config: &ModuleConfig,
timeline: &mut Timeline)
-> Result<WorkItemResult, FatalError>
{
let config = cgcx.config(work_item.module_kind());
if let WorkItem::LTO(mut lto) = work_item {
let diag_handler = cgcx.create_diag_handler();
unsafe {
let module = lto.optimize(cgcx, timeline)?;
let module = codegen(cgcx, &diag_handler, module, config, timeline)?;
let module = module.optimize(cgcx, timeline)?;
let module = codegen(cgcx, &diag_handler, module, module_config, timeline)?;
Ok(WorkItemResult::Compiled(module))
}
} else {
bug!("execute_lto_work_item() called with wrong WorkItem kind.")
}
}
enum Message {