Support local ThinLTO with incremental compilation.
This commit is contained in:
parent
72c1993b8e
commit
64a738d8ce
10 changed files with 634 additions and 266 deletions
|
@ -878,11 +878,12 @@ pub struct WorkProduct {
|
||||||
pub saved_files: Vec<(WorkProductFileKind, String)>,
|
pub saved_files: Vec<(WorkProductFileKind, String)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, Copy, Debug, RustcEncodable, RustcDecodable, PartialEq)]
|
||||||
pub enum WorkProductFileKind {
|
pub enum WorkProductFileKind {
|
||||||
Object,
|
Object,
|
||||||
Bytecode,
|
Bytecode,
|
||||||
BytecodeCompressed,
|
BytecodeCompressed,
|
||||||
|
PreThinLtoBytecode,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct CurrentDepGraph {
|
pub(super) struct CurrentDepGraph {
|
||||||
|
|
|
@ -68,7 +68,7 @@ pub enum OptLevel {
|
||||||
SizeMin, // -Oz
|
SizeMin, // -Oz
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Hash)]
|
#[derive(Clone, Copy, PartialEq, Hash, Debug)]
|
||||||
pub enum Lto {
|
pub enum Lto {
|
||||||
/// Don't do any LTO whatsoever
|
/// Don't do any LTO whatsoever
|
||||||
No,
|
No,
|
||||||
|
|
|
@ -580,11 +580,6 @@ impl Session {
|
||||||
return config::Lto::No;
|
return config::Lto::No;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Right now ThinLTO isn't compatible with incremental compilation.
|
|
||||||
if self.opts.incremental.is_some() {
|
|
||||||
return config::Lto::No;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now we're in "defaults" territory. By default we enable ThinLTO for
|
// Now we're in "defaults" territory. By default we enable ThinLTO for
|
||||||
// optimized compiles (anything greater than O0).
|
// optimized compiles (anything greater than O0).
|
||||||
match self.opts.optimize {
|
match self.opts.optimize {
|
||||||
|
@ -1177,8 +1172,18 @@ pub fn build_session_(
|
||||||
// commandline argument, you can do so here.
|
// commandline argument, you can do so here.
|
||||||
fn validate_commandline_args_with_session_available(sess: &Session) {
|
fn validate_commandline_args_with_session_available(sess: &Session) {
|
||||||
|
|
||||||
if sess.lto() != Lto::No && sess.opts.incremental.is_some() {
|
if sess.opts.incremental.is_some() {
|
||||||
sess.err("can't perform LTO when compiling incrementally");
|
match sess.lto() {
|
||||||
|
Lto::Yes |
|
||||||
|
Lto::Thin |
|
||||||
|
Lto::Fat => {
|
||||||
|
sess.err("can't perform LTO when compiling incrementally");
|
||||||
|
}
|
||||||
|
Lto::ThinLocal |
|
||||||
|
Lto::No => {
|
||||||
|
// This is fine
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since we don't know if code in an rlib will be linked to statically or
|
// Since we don't know if code in an rlib will be linked to statically or
|
||||||
|
|
|
@ -14,6 +14,7 @@ cc = "1.0.1"
|
||||||
num_cpus = "1.0"
|
num_cpus = "1.0"
|
||||||
rustc-demangle = "0.1.4"
|
rustc-demangle = "0.1.4"
|
||||||
rustc_llvm = { path = "../librustc_llvm" }
|
rustc_llvm = { path = "../librustc_llvm" }
|
||||||
|
memmap = "0.6"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# This is used to convince Cargo to separately cache builds of `rustc_codegen_llvm`
|
# This is used to convince Cargo to separately cache builds of `rustc_codegen_llvm`
|
||||||
|
|
|
@ -16,13 +16,14 @@ use errors::{FatalError, Handler};
|
||||||
use llvm::archive_ro::ArchiveRO;
|
use llvm::archive_ro::ArchiveRO;
|
||||||
use llvm::{True, False};
|
use llvm::{True, False};
|
||||||
use llvm;
|
use llvm;
|
||||||
|
use memmap;
|
||||||
use rustc::hir::def_id::LOCAL_CRATE;
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
use rustc::middle::exported_symbols::SymbolExportLevel;
|
use rustc::middle::exported_symbols::SymbolExportLevel;
|
||||||
use rustc::session::config::{self, Lto};
|
use rustc::session::config::{self, Lto};
|
||||||
use rustc::util::common::time_ext;
|
use rustc::util::common::time_ext;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
use time_graph::Timeline;
|
use time_graph::Timeline;
|
||||||
use {ModuleCodegen, ModuleLlvm, ModuleKind, ModuleSource};
|
use {ModuleCodegen, ModuleLlvm, ModuleKind};
|
||||||
|
|
||||||
use libc;
|
use libc;
|
||||||
|
|
||||||
|
@ -82,8 +83,8 @@ impl LtoModuleCodegen {
|
||||||
let module = module.take().unwrap();
|
let module = module.take().unwrap();
|
||||||
{
|
{
|
||||||
let config = cgcx.config(module.kind);
|
let config = cgcx.config(module.kind);
|
||||||
let llmod = module.llvm().unwrap().llmod();
|
let llmod = module.module_llvm.llmod();
|
||||||
let tm = &*module.llvm().unwrap().tm;
|
let tm = &*module.module_llvm.tm;
|
||||||
run_pass_manager(cgcx, tm, llmod, config, false);
|
run_pass_manager(cgcx, tm, llmod, config, false);
|
||||||
timeline.record("fat-done");
|
timeline.record("fat-done");
|
||||||
}
|
}
|
||||||
|
@ -106,6 +107,7 @@ impl LtoModuleCodegen {
|
||||||
|
|
||||||
pub(crate) fn run(cgcx: &CodegenContext,
|
pub(crate) fn run(cgcx: &CodegenContext,
|
||||||
modules: Vec<ModuleCodegen>,
|
modules: Vec<ModuleCodegen>,
|
||||||
|
import_only_modules: Vec<(SerializedModule, CString)>,
|
||||||
timeline: &mut Timeline)
|
timeline: &mut Timeline)
|
||||||
-> Result<Vec<LtoModuleCodegen>, FatalError>
|
-> Result<Vec<LtoModuleCodegen>, FatalError>
|
||||||
{
|
{
|
||||||
|
@ -194,11 +196,19 @@ pub(crate) fn run(cgcx: &CodegenContext,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let arr = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
|
let symbol_white_list = symbol_white_list.iter()
|
||||||
|
.map(|c| c.as_ptr())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
match cgcx.lto {
|
match cgcx.lto {
|
||||||
Lto::Yes | // `-C lto` == fat LTO by default
|
Lto::Yes | // `-C lto` == fat LTO by default
|
||||||
Lto::Fat => {
|
Lto::Fat => {
|
||||||
fat_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline)
|
assert!(import_only_modules.is_empty());
|
||||||
|
fat_lto(cgcx,
|
||||||
|
&diag_handler,
|
||||||
|
modules,
|
||||||
|
upstream_modules,
|
||||||
|
&symbol_white_list,
|
||||||
|
timeline)
|
||||||
}
|
}
|
||||||
Lto::Thin |
|
Lto::Thin |
|
||||||
Lto::ThinLocal => {
|
Lto::ThinLocal => {
|
||||||
|
@ -206,7 +216,13 @@ pub(crate) fn run(cgcx: &CodegenContext,
|
||||||
unreachable!("We should never reach this case if the LTO step \
|
unreachable!("We should never reach this case if the LTO step \
|
||||||
is deferred to the linker");
|
is deferred to the linker");
|
||||||
}
|
}
|
||||||
thin_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline)
|
thin_lto(cgcx,
|
||||||
|
&diag_handler,
|
||||||
|
modules,
|
||||||
|
upstream_modules,
|
||||||
|
import_only_modules,
|
||||||
|
&symbol_white_list,
|
||||||
|
timeline)
|
||||||
}
|
}
|
||||||
Lto::No => unreachable!(),
|
Lto::No => unreachable!(),
|
||||||
}
|
}
|
||||||
|
@ -236,7 +252,7 @@ fn fat_lto(cgcx: &CodegenContext,
|
||||||
.filter(|&(_, module)| module.kind == ModuleKind::Regular)
|
.filter(|&(_, module)| module.kind == ModuleKind::Regular)
|
||||||
.map(|(i, module)| {
|
.map(|(i, module)| {
|
||||||
let cost = unsafe {
|
let cost = unsafe {
|
||||||
llvm::LLVMRustModuleCost(module.llvm().unwrap().llmod())
|
llvm::LLVMRustModuleCost(module.module_llvm.llmod())
|
||||||
};
|
};
|
||||||
(cost, i)
|
(cost, i)
|
||||||
})
|
})
|
||||||
|
@ -246,7 +262,7 @@ fn fat_lto(cgcx: &CodegenContext,
|
||||||
let mut serialized_bitcode = Vec::new();
|
let mut serialized_bitcode = Vec::new();
|
||||||
{
|
{
|
||||||
let (llcx, llmod) = {
|
let (llcx, llmod) = {
|
||||||
let llvm = module.llvm().expect("can't lto pre-codegened modules");
|
let llvm = &module.module_llvm;
|
||||||
(&llvm.llcx, llvm.llmod())
|
(&llvm.llcx, llvm.llmod())
|
||||||
};
|
};
|
||||||
info!("using {:?} as a base module", module.name);
|
info!("using {:?} as a base module", module.name);
|
||||||
|
@ -262,8 +278,7 @@ fn fat_lto(cgcx: &CodegenContext,
|
||||||
// way we know of to do that is to serialize them to a string and them parse
|
// way we know of to do that is to serialize them to a string and them parse
|
||||||
// them later. Not great but hey, that's why it's "fat" LTO, right?
|
// them later. Not great but hey, that's why it's "fat" LTO, right?
|
||||||
for module in modules {
|
for module in modules {
|
||||||
let llvm = module.llvm().expect("can't lto pre-codegened modules");
|
let buffer = ModuleBuffer::new(module.module_llvm.llmod());
|
||||||
let buffer = ModuleBuffer::new(llvm.llmod());
|
|
||||||
let llmod_id = CString::new(&module.name[..]).unwrap();
|
let llmod_id = CString::new(&module.name[..]).unwrap();
|
||||||
serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
|
serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
|
||||||
}
|
}
|
||||||
|
@ -373,6 +388,7 @@ fn thin_lto(cgcx: &CodegenContext,
|
||||||
diag_handler: &Handler,
|
diag_handler: &Handler,
|
||||||
modules: Vec<ModuleCodegen>,
|
modules: Vec<ModuleCodegen>,
|
||||||
serialized_modules: Vec<(SerializedModule, CString)>,
|
serialized_modules: Vec<(SerializedModule, CString)>,
|
||||||
|
import_only_modules: Vec<(SerializedModule, CString)>,
|
||||||
symbol_white_list: &[*const libc::c_char],
|
symbol_white_list: &[*const libc::c_char],
|
||||||
timeline: &mut Timeline)
|
timeline: &mut Timeline)
|
||||||
-> Result<Vec<LtoModuleCodegen>, FatalError>
|
-> Result<Vec<LtoModuleCodegen>, FatalError>
|
||||||
|
@ -393,9 +409,8 @@ fn thin_lto(cgcx: &CodegenContext,
|
||||||
// analysis!
|
// analysis!
|
||||||
for (i, module) in modules.iter().enumerate() {
|
for (i, module) in modules.iter().enumerate() {
|
||||||
info!("local module: {} - {}", i, module.name);
|
info!("local module: {} - {}", i, module.name);
|
||||||
let llvm = module.llvm().expect("can't lto precodegened module");
|
|
||||||
let name = CString::new(module.name.clone()).unwrap();
|
let name = CString::new(module.name.clone()).unwrap();
|
||||||
let buffer = ThinBuffer::new(llvm.llmod());
|
let buffer = ThinBuffer::new(module.module_llvm.llmod());
|
||||||
thin_modules.push(llvm::ThinLTOModule {
|
thin_modules.push(llvm::ThinLTOModule {
|
||||||
identifier: name.as_ptr(),
|
identifier: name.as_ptr(),
|
||||||
data: buffer.data().as_ptr(),
|
data: buffer.data().as_ptr(),
|
||||||
|
@ -434,6 +449,22 @@ fn thin_lto(cgcx: &CodegenContext,
|
||||||
module_names.push(name);
|
module_names.push(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// All the modules collected up to this point we actually want to
|
||||||
|
// optimize. The `import_only_modules` below need to be in the list of
|
||||||
|
// available modules but we don't need to run optimizations for them
|
||||||
|
// since we already have their optimized version cached.
|
||||||
|
let modules_to_optimize = module_names.len();
|
||||||
|
for (module, name) in import_only_modules {
|
||||||
|
info!("foreign module {:?}", name);
|
||||||
|
thin_modules.push(llvm::ThinLTOModule {
|
||||||
|
identifier: name.as_ptr(),
|
||||||
|
data: module.data().as_ptr(),
|
||||||
|
len: module.data().len(),
|
||||||
|
});
|
||||||
|
serialized.push(module);
|
||||||
|
module_names.push(name);
|
||||||
|
}
|
||||||
|
|
||||||
// Delegate to the C++ bindings to create some data here. Once this is a
|
// Delegate to the C++ bindings to create some data here. Once this is a
|
||||||
// tried-and-true interface we may wish to try to upstream some of this
|
// tried-and-true interface we may wish to try to upstream some of this
|
||||||
// to LLVM itself, right now we reimplement a lot of what they do
|
// to LLVM itself, right now we reimplement a lot of what they do
|
||||||
|
@ -450,7 +481,21 @@ fn thin_lto(cgcx: &CodegenContext,
|
||||||
// Save the ThinLTO import information for incremental compilation.
|
// Save the ThinLTO import information for incremental compilation.
|
||||||
if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir {
|
if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir {
|
||||||
let path = incr_comp_session_dir.join(THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME);
|
let path = incr_comp_session_dir.join(THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME);
|
||||||
let imports = ThinLTOImports::from_thin_lto_data(data);
|
|
||||||
|
// The import information from the current compilation session. It
|
||||||
|
// does not contain info about modules that have been loaded from
|
||||||
|
// the cache instead of having been recompiled...
|
||||||
|
let current_imports = ThinLTOImports::from_thin_lto_data(data);
|
||||||
|
|
||||||
|
// ... so we load this additional information from the previous
|
||||||
|
// cache file if necessary.
|
||||||
|
let imports = if path.exists() {
|
||||||
|
let prev_imports = ThinLTOImports::load_from_file(&path).unwrap();
|
||||||
|
prev_imports.update(current_imports, &module_names)
|
||||||
|
} else {
|
||||||
|
current_imports
|
||||||
|
};
|
||||||
|
|
||||||
if let Err(err) = imports.save_to_file(&path) {
|
if let Err(err) = imports.save_to_file(&path) {
|
||||||
let msg = format!("Error while writing ThinLTO import data: {}",
|
let msg = format!("Error while writing ThinLTO import data: {}",
|
||||||
err);
|
err);
|
||||||
|
@ -472,7 +517,7 @@ fn thin_lto(cgcx: &CodegenContext,
|
||||||
serialized_modules: serialized,
|
serialized_modules: serialized,
|
||||||
module_names,
|
module_names,
|
||||||
});
|
});
|
||||||
Ok((0..shared.module_names.len()).map(|i| {
|
Ok((0..modules_to_optimize).map(|i| {
|
||||||
LtoModuleCodegen::Thin(ThinModule {
|
LtoModuleCodegen::Thin(ThinModule {
|
||||||
shared: shared.clone(),
|
shared: shared.clone(),
|
||||||
idx: i,
|
idx: i,
|
||||||
|
@ -546,6 +591,7 @@ fn run_pass_manager(cgcx: &CodegenContext,
|
||||||
pub enum SerializedModule {
|
pub enum SerializedModule {
|
||||||
Local(ModuleBuffer),
|
Local(ModuleBuffer),
|
||||||
FromRlib(Vec<u8>),
|
FromRlib(Vec<u8>),
|
||||||
|
FromUncompressedFile(memmap::Mmap, File),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SerializedModule {
|
impl SerializedModule {
|
||||||
|
@ -553,6 +599,7 @@ impl SerializedModule {
|
||||||
match *self {
|
match *self {
|
||||||
SerializedModule::Local(ref m) => m.data(),
|
SerializedModule::Local(ref m) => m.data(),
|
||||||
SerializedModule::FromRlib(ref m) => m,
|
SerializedModule::FromRlib(ref m) => m,
|
||||||
|
SerializedModule::FromUncompressedFile(ref m, _) => m,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -682,16 +729,16 @@ impl ThinModule {
|
||||||
write::llvm_err(&diag_handler, msg)
|
write::llvm_err(&diag_handler, msg)
|
||||||
})? as *const _;
|
})? as *const _;
|
||||||
let module = ModuleCodegen {
|
let module = ModuleCodegen {
|
||||||
source: ModuleSource::Codegened(ModuleLlvm {
|
module_llvm: ModuleLlvm {
|
||||||
llmod_raw,
|
llmod_raw,
|
||||||
llcx,
|
llcx,
|
||||||
tm,
|
tm,
|
||||||
}),
|
},
|
||||||
name: self.name().to_string(),
|
name: self.name().to_string(),
|
||||||
kind: ModuleKind::Regular,
|
kind: ModuleKind::Regular,
|
||||||
};
|
};
|
||||||
{
|
{
|
||||||
let llmod = module.llvm().unwrap().llmod();
|
let llmod = module.module_llvm.llmod();
|
||||||
cgcx.save_temp_bitcode(&module, "thin-lto-input");
|
cgcx.save_temp_bitcode(&module, "thin-lto-input");
|
||||||
|
|
||||||
// Before we do much else find the "main" `DICompileUnit` that we'll be
|
// Before we do much else find the "main" `DICompileUnit` that we'll be
|
||||||
|
@ -787,7 +834,7 @@ impl ThinModule {
|
||||||
// little differently.
|
// little differently.
|
||||||
info!("running thin lto passes over {}", module.name);
|
info!("running thin lto passes over {}", module.name);
|
||||||
let config = cgcx.config(module.kind);
|
let config = cgcx.config(module.kind);
|
||||||
run_pass_manager(cgcx, module.llvm().unwrap().tm, llmod, config, true);
|
run_pass_manager(cgcx, module.module_llvm.tm, llmod, config, true);
|
||||||
cgcx.save_temp_bitcode(&module, "thin-lto-after-pm");
|
cgcx.save_temp_bitcode(&module, "thin-lto-after-pm");
|
||||||
timeline.record("thin-done");
|
timeline.record("thin-done");
|
||||||
}
|
}
|
||||||
|
@ -809,6 +856,26 @@ impl ThinLTOImports {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn modules_imported_by(&self, llvm_module_name: &str) -> &[String] {
|
||||||
|
self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update(mut self, new: ThinLTOImports, module_names: &[CString]) -> ThinLTOImports {
|
||||||
|
let module_names: FxHashSet<_> = module_names.iter().map(|name| {
|
||||||
|
name.clone().into_string().unwrap()
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
// Remove all modules that don't exist anymore.
|
||||||
|
self.imports.retain(|k, _| module_names.contains(k));
|
||||||
|
|
||||||
|
// Overwrite old values
|
||||||
|
for (importing_module, imported_modules) in new.imports {
|
||||||
|
self.imports.insert(importing_module, imported_modules);
|
||||||
|
}
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Load the ThinLTO import map from ThinLTOData.
|
/// Load the ThinLTO import map from ThinLTOData.
|
||||||
unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImports {
|
unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImports {
|
||||||
fn module_name_to_str(c_str: &CStr) -> &str {
|
fn module_name_to_str(c_str: &CStr) -> &str {
|
||||||
|
@ -832,6 +899,7 @@ impl ThinLTOImports {
|
||||||
if !map.imports.contains_key(importing_module_name) {
|
if !map.imports.contains_key(importing_module_name) {
|
||||||
map.imports.insert(importing_module_name.to_owned(), vec![]);
|
map.imports.insert(importing_module_name.to_owned(), vec![]);
|
||||||
}
|
}
|
||||||
|
|
||||||
map.imports
|
map.imports
|
||||||
.get_mut(importing_module_name)
|
.get_mut(importing_module_name)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
|
@ -10,14 +10,16 @@
|
||||||
|
|
||||||
use attributes;
|
use attributes;
|
||||||
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
|
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
|
||||||
use back::lto::{self, ModuleBuffer, ThinBuffer};
|
use back::lto::{self, ModuleBuffer, ThinBuffer, SerializedModule};
|
||||||
use back::link::{self, get_linker, remove};
|
use back::link::{self, get_linker, remove};
|
||||||
use back::command::Command;
|
use back::command::Command;
|
||||||
use back::linker::LinkerInfo;
|
use back::linker::LinkerInfo;
|
||||||
use back::symbol_export::ExportedSymbols;
|
use back::symbol_export::ExportedSymbols;
|
||||||
use base;
|
use base;
|
||||||
use consts;
|
use consts;
|
||||||
use rustc_incremental::{copy_cgu_workproducts_to_incr_comp_cache_dir, in_incr_comp_dir};
|
use memmap;
|
||||||
|
use rustc_incremental::{copy_cgu_workproducts_to_incr_comp_cache_dir,
|
||||||
|
in_incr_comp_dir, in_incr_comp_dir_sess};
|
||||||
use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind};
|
use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind};
|
||||||
use rustc::middle::cstore::EncodedMetadata;
|
use rustc::middle::cstore::EncodedMetadata;
|
||||||
use rustc::session::config::{self, OutputFilenames, OutputType, Passes, Sanitizer, Lto};
|
use rustc::session::config::{self, OutputFilenames, OutputType, Passes, Sanitizer, Lto};
|
||||||
|
@ -26,7 +28,8 @@ use rustc::util::nodemap::FxHashMap;
|
||||||
use time_graph::{self, TimeGraph, Timeline};
|
use time_graph::{self, TimeGraph, Timeline};
|
||||||
use llvm::{self, DiagnosticInfo, PassManager, SMDiagnostic};
|
use llvm::{self, DiagnosticInfo, PassManager, SMDiagnostic};
|
||||||
use llvm_util;
|
use llvm_util;
|
||||||
use {CodegenResults, ModuleSource, ModuleCodegen, CompiledModule, ModuleKind};
|
use {CodegenResults, ModuleCodegen, CompiledModule, ModuleKind, ModuleLlvm,
|
||||||
|
CachedModuleCodegen};
|
||||||
use CrateInfo;
|
use CrateInfo;
|
||||||
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
|
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
|
@ -84,6 +87,8 @@ pub const TLS_MODEL_ARGS : [(&'static str, llvm::ThreadLocalMode); 4] = [
|
||||||
("local-exec", llvm::ThreadLocalMode::LocalExec),
|
("local-exec", llvm::ThreadLocalMode::LocalExec),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const PRE_THIN_LTO_BC_EXT: &str = "pre-thin-lto.bc";
|
||||||
|
|
||||||
pub fn llvm_err(handler: &errors::Handler, msg: String) -> FatalError {
|
pub fn llvm_err(handler: &errors::Handler, msg: String) -> FatalError {
|
||||||
match llvm::last_error() {
|
match llvm::last_error() {
|
||||||
Some(err) => handler.fatal(&format!("{}: {}", msg, err)),
|
Some(err) => handler.fatal(&format!("{}: {}", msg, err)),
|
||||||
|
@ -224,6 +229,7 @@ pub struct ModuleConfig {
|
||||||
|
|
||||||
// Flags indicating which outputs to produce.
|
// Flags indicating which outputs to produce.
|
||||||
emit_no_opt_bc: bool,
|
emit_no_opt_bc: bool,
|
||||||
|
emit_pre_thin_lto_bc: bool,
|
||||||
emit_bc: bool,
|
emit_bc: bool,
|
||||||
emit_bc_compressed: bool,
|
emit_bc_compressed: bool,
|
||||||
emit_lto_bc: bool,
|
emit_lto_bc: bool,
|
||||||
|
@ -260,6 +266,7 @@ impl ModuleConfig {
|
||||||
pgo_use: String::new(),
|
pgo_use: String::new(),
|
||||||
|
|
||||||
emit_no_opt_bc: false,
|
emit_no_opt_bc: false,
|
||||||
|
emit_pre_thin_lto_bc: false,
|
||||||
emit_bc: false,
|
emit_bc: false,
|
||||||
emit_bc_compressed: false,
|
emit_bc_compressed: false,
|
||||||
emit_lto_bc: false,
|
emit_lto_bc: false,
|
||||||
|
@ -392,7 +399,7 @@ impl CodegenContext {
|
||||||
let cgu = Some(&module.name[..]);
|
let cgu = Some(&module.name[..]);
|
||||||
let path = self.output_filenames.temp_path_ext(&ext, cgu);
|
let path = self.output_filenames.temp_path_ext(&ext, cgu);
|
||||||
let cstr = path2cstr(&path);
|
let cstr = path2cstr(&path);
|
||||||
let llmod = module.llvm().unwrap().llmod();
|
let llmod = module.module_llvm.llmod();
|
||||||
llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
|
llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -495,13 +502,9 @@ unsafe fn optimize(cgcx: &CodegenContext,
|
||||||
timeline: &mut Timeline)
|
timeline: &mut Timeline)
|
||||||
-> Result<(), FatalError>
|
-> Result<(), FatalError>
|
||||||
{
|
{
|
||||||
let (llmod, llcx, tm) = match module.source {
|
let llmod = module.module_llvm.llmod();
|
||||||
ModuleSource::Codegened(ref llvm) => (llvm.llmod(), &*llvm.llcx, &*llvm.tm),
|
let llcx = &*module.module_llvm.llcx;
|
||||||
ModuleSource::Preexisting(_) => {
|
let tm = &*module.module_llvm.tm;
|
||||||
bug!("optimize_and_codegen: called with ModuleSource::Preexisting")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let _handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
|
let _handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
|
||||||
|
|
||||||
let module_name = module.name.clone();
|
let module_name = module.name.clone();
|
||||||
|
@ -622,12 +625,20 @@ unsafe fn optimize(cgcx: &CodegenContext,
|
||||||
// Deallocate managers that we're now done with
|
// Deallocate managers that we're now done with
|
||||||
llvm::LLVMDisposePassManager(fpm);
|
llvm::LLVMDisposePassManager(fpm);
|
||||||
llvm::LLVMDisposePassManager(mpm);
|
llvm::LLVMDisposePassManager(mpm);
|
||||||
|
|
||||||
|
if config.emit_pre_thin_lto_bc {
|
||||||
|
let out = cgcx.output_filenames.temp_path_ext(PRE_THIN_LTO_BC_EXT,
|
||||||
|
module_name);
|
||||||
|
let out = path2cstr(&out);
|
||||||
|
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_lto_work(cgcx: &CodegenContext,
|
fn generate_lto_work(cgcx: &CodegenContext,
|
||||||
modules: Vec<ModuleCodegen>)
|
modules: Vec<ModuleCodegen>,
|
||||||
|
import_only_modules: Vec<(SerializedModule, CString)>)
|
||||||
-> Vec<(WorkItem, u64)>
|
-> Vec<(WorkItem, u64)>
|
||||||
{
|
{
|
||||||
let mut timeline = cgcx.time_graph.as_ref().map(|tg| {
|
let mut timeline = cgcx.time_graph.as_ref().map(|tg| {
|
||||||
|
@ -635,7 +646,7 @@ fn generate_lto_work(cgcx: &CodegenContext,
|
||||||
CODEGEN_WORK_PACKAGE_KIND,
|
CODEGEN_WORK_PACKAGE_KIND,
|
||||||
"generate lto")
|
"generate lto")
|
||||||
}).unwrap_or(Timeline::noop());
|
}).unwrap_or(Timeline::noop());
|
||||||
let lto_modules = lto::run(cgcx, modules, &mut timeline)
|
let lto_modules = lto::run(cgcx, modules, import_only_modules, &mut timeline)
|
||||||
.unwrap_or_else(|e| e.raise());
|
.unwrap_or_else(|e| e.raise());
|
||||||
|
|
||||||
lto_modules.into_iter().map(|module| {
|
lto_modules.into_iter().map(|module| {
|
||||||
|
@ -653,12 +664,9 @@ unsafe fn codegen(cgcx: &CodegenContext,
|
||||||
{
|
{
|
||||||
timeline.record("codegen");
|
timeline.record("codegen");
|
||||||
{
|
{
|
||||||
let (llmod, llcx, tm) = match module.source {
|
let llmod = module.module_llvm.llmod();
|
||||||
ModuleSource::Codegened(ref llvm) => (llvm.llmod(), &*llvm.llcx, &*llvm.tm),
|
let llcx = &*module.module_llvm.llcx;
|
||||||
ModuleSource::Preexisting(_) => {
|
let tm = &*module.module_llvm.tm;
|
||||||
bug!("codegen: called with ModuleSource::Preexisting")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let module_name = module.name.clone();
|
let module_name = module.name.clone();
|
||||||
let module_name = Some(&module_name[..]);
|
let module_name = Some(&module_name[..]);
|
||||||
let handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
|
let handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx);
|
||||||
|
@ -912,6 +920,20 @@ fn need_crate_bitcode_for_rlib(sess: &Session) -> bool {
|
||||||
sess.opts.output_types.contains_key(&OutputType::Exe)
|
sess.opts.output_types.contains_key(&OutputType::Exe)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn need_pre_thin_lto_bitcode_for_incr_comp(sess: &Session) -> bool {
|
||||||
|
if sess.opts.incremental.is_none() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
match sess.lto() {
|
||||||
|
Lto::Yes |
|
||||||
|
Lto::Fat |
|
||||||
|
Lto::No => false,
|
||||||
|
Lto::Thin |
|
||||||
|
Lto::ThinLocal => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn start_async_codegen(tcx: TyCtxt,
|
pub fn start_async_codegen(tcx: TyCtxt,
|
||||||
time_graph: Option<TimeGraph>,
|
time_graph: Option<TimeGraph>,
|
||||||
metadata: EncodedMetadata,
|
metadata: EncodedMetadata,
|
||||||
|
@ -970,6 +992,7 @@ pub fn start_async_codegen(tcx: TyCtxt,
|
||||||
// Save all versions of the bytecode if we're saving our temporaries.
|
// Save all versions of the bytecode if we're saving our temporaries.
|
||||||
if sess.opts.cg.save_temps {
|
if sess.opts.cg.save_temps {
|
||||||
modules_config.emit_no_opt_bc = true;
|
modules_config.emit_no_opt_bc = true;
|
||||||
|
modules_config.emit_pre_thin_lto_bc = true;
|
||||||
modules_config.emit_bc = true;
|
modules_config.emit_bc = true;
|
||||||
modules_config.emit_lto_bc = true;
|
modules_config.emit_lto_bc = true;
|
||||||
metadata_config.emit_bc = true;
|
metadata_config.emit_bc = true;
|
||||||
|
@ -984,6 +1007,9 @@ pub fn start_async_codegen(tcx: TyCtxt,
|
||||||
allocator_config.emit_bc_compressed = true;
|
allocator_config.emit_bc_compressed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
modules_config.emit_pre_thin_lto_bc =
|
||||||
|
need_pre_thin_lto_bitcode_for_incr_comp(sess);
|
||||||
|
|
||||||
modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
|
modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
|
||||||
tcx.sess.target.target.options.no_integrated_as;
|
tcx.sess.target.target.options.no_integrated_as;
|
||||||
|
|
||||||
|
@ -1056,7 +1082,8 @@ pub fn start_async_codegen(tcx: TyCtxt,
|
||||||
|
|
||||||
fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
|
fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
compiled_modules: &CompiledModules
|
compiled_modules: &CompiledModules,
|
||||||
|
output_filenames: &OutputFilenames,
|
||||||
) -> FxHashMap<WorkProductId, WorkProduct> {
|
) -> FxHashMap<WorkProductId, WorkProduct> {
|
||||||
let mut work_products = FxHashMap::default();
|
let mut work_products = FxHashMap::default();
|
||||||
|
|
||||||
|
@ -1064,7 +1091,7 @@ fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
|
||||||
return work_products;
|
return work_products;
|
||||||
}
|
}
|
||||||
|
|
||||||
for module in compiled_modules.modules.iter() {
|
for module in compiled_modules.modules.iter().filter(|m| m.kind == ModuleKind::Regular) {
|
||||||
let mut files = vec![];
|
let mut files = vec![];
|
||||||
|
|
||||||
if let Some(ref path) = module.object {
|
if let Some(ref path) = module.object {
|
||||||
|
@ -1077,6 +1104,13 @@ fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
|
||||||
files.push((WorkProductFileKind::BytecodeCompressed, path.clone()));
|
files.push((WorkProductFileKind::BytecodeCompressed, path.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let pre_thin_lto_bytecode_path =
|
||||||
|
output_filenames.temp_path_ext(PRE_THIN_LTO_BC_EXT, Some(&module.name));
|
||||||
|
|
||||||
|
if pre_thin_lto_bytecode_path.exists() {
|
||||||
|
files.push((WorkProductFileKind::PreThinLtoBytecode, pre_thin_lto_bytecode_path));
|
||||||
|
}
|
||||||
|
|
||||||
if let Some((id, product)) =
|
if let Some((id, product)) =
|
||||||
copy_cgu_workproducts_to_incr_comp_cache_dir(sess, &module.name, &files) {
|
copy_cgu_workproducts_to_incr_comp_cache_dir(sess, &module.name, &files) {
|
||||||
work_products.insert(id, product);
|
work_products.insert(id, product);
|
||||||
|
@ -1236,21 +1270,34 @@ fn produce_final_output_artifacts(sess: &Session,
|
||||||
// These are used in linking steps and will be cleaned up afterward.
|
// These are used in linking steps and will be cleaned up afterward.
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn dump_incremental_data(codegen_results: &CodegenResults) {
|
pub(crate) fn dump_incremental_data(_codegen_results: &CodegenResults) {
|
||||||
println!("[incremental] Re-using {} out of {} modules",
|
// FIXME(mw): This does not work at the moment because the situation has
|
||||||
codegen_results.modules.iter().filter(|m| m.pre_existing).count(),
|
// become more complicated due to incremental LTO. Now a CGU
|
||||||
codegen_results.modules.len());
|
// can have more than two caching states.
|
||||||
|
// println!("[incremental] Re-using {} out of {} modules",
|
||||||
|
// codegen_results.modules.iter().filter(|m| m.pre_existing).count(),
|
||||||
|
// codegen_results.modules.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
enum WorkItem {
|
enum WorkItem {
|
||||||
|
/// Optimize a newly codegened, totally unoptimized module.
|
||||||
Optimize(ModuleCodegen),
|
Optimize(ModuleCodegen),
|
||||||
|
/// Copy the post-LTO artifacts from the incremental cache to the output
|
||||||
|
/// directory.
|
||||||
|
CopyPostLtoArtifacts(CachedModuleCodegen),
|
||||||
|
/// Load the pre-LTO version of a module from the incremental cache, so it
|
||||||
|
/// can be run through LTO again.
|
||||||
|
LoadPreLtoModule(CachedModuleCodegen),
|
||||||
|
/// Perform (Thin)LTO on the given module.
|
||||||
LTO(lto::LtoModuleCodegen),
|
LTO(lto::LtoModuleCodegen),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WorkItem {
|
impl WorkItem {
|
||||||
fn kind(&self) -> ModuleKind {
|
fn module_kind(&self) -> ModuleKind {
|
||||||
match *self {
|
match *self {
|
||||||
WorkItem::Optimize(ref m) => m.kind,
|
WorkItem::Optimize(ref m) => m.kind,
|
||||||
|
WorkItem::CopyPostLtoArtifacts(_) |
|
||||||
|
WorkItem::LoadPreLtoModule(_) |
|
||||||
WorkItem::LTO(_) => ModuleKind::Regular,
|
WorkItem::LTO(_) => ModuleKind::Regular,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1258,6 +1305,8 @@ impl WorkItem {
|
||||||
fn name(&self) -> String {
|
fn name(&self) -> String {
|
||||||
match *self {
|
match *self {
|
||||||
WorkItem::Optimize(ref m) => format!("optimize: {}", m.name),
|
WorkItem::Optimize(ref m) => format!("optimize: {}", m.name),
|
||||||
|
WorkItem::LoadPreLtoModule(ref m) => format!("load pre-lto module: {}", m.name),
|
||||||
|
WorkItem::CopyPostLtoArtifacts(ref m) => format!("copy post LTO artifacts: {}", m.name),
|
||||||
WorkItem::LTO(ref m) => format!("lto: {}", m.name()),
|
WorkItem::LTO(ref m) => format!("lto: {}", m.name()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1273,141 +1322,254 @@ fn execute_work_item(cgcx: &CodegenContext,
|
||||||
timeline: &mut Timeline)
|
timeline: &mut Timeline)
|
||||||
-> Result<WorkItemResult, FatalError>
|
-> Result<WorkItemResult, FatalError>
|
||||||
{
|
{
|
||||||
let diag_handler = cgcx.create_diag_handler();
|
match work_item {
|
||||||
let config = cgcx.config(work_item.kind());
|
work_item @ WorkItem::Optimize(_) => {
|
||||||
let module = match work_item {
|
execute_optimize_work_item(cgcx, work_item, timeline)
|
||||||
WorkItem::Optimize(module) => module,
|
|
||||||
WorkItem::LTO(mut lto) => {
|
|
||||||
unsafe {
|
|
||||||
let module = lto.optimize(cgcx, timeline)?;
|
|
||||||
let module = codegen(cgcx, &diag_handler, module, config, timeline)?;
|
|
||||||
return Ok(WorkItemResult::Compiled(module))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
work_item @ WorkItem::LoadPreLtoModule(_) => {
|
||||||
let module_name = module.name.clone();
|
execute_load_pre_lto_mod_work_item(cgcx, work_item, timeline)
|
||||||
|
|
||||||
let pre_existing = match module.source {
|
|
||||||
ModuleSource::Codegened(_) => None,
|
|
||||||
ModuleSource::Preexisting(ref wp) => Some(wp.clone()),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(wp) = pre_existing {
|
|
||||||
let incr_comp_session_dir = cgcx.incr_comp_session_dir
|
|
||||||
.as_ref()
|
|
||||||
.unwrap();
|
|
||||||
let name = &module.name;
|
|
||||||
let mut object = None;
|
|
||||||
let mut bytecode = None;
|
|
||||||
let mut bytecode_compressed = None;
|
|
||||||
for (kind, saved_file) in wp.saved_files {
|
|
||||||
let obj_out = match kind {
|
|
||||||
WorkProductFileKind::Object => {
|
|
||||||
let path = cgcx.output_filenames.temp_path(OutputType::Object, Some(name));
|
|
||||||
object = Some(path.clone());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
WorkProductFileKind::Bytecode => {
|
|
||||||
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(name));
|
|
||||||
bytecode = Some(path.clone());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
WorkProductFileKind::BytecodeCompressed => {
|
|
||||||
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(name))
|
|
||||||
.with_extension(RLIB_BYTECODE_EXTENSION);
|
|
||||||
bytecode_compressed = Some(path.clone());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let source_file = in_incr_comp_dir(&incr_comp_session_dir,
|
|
||||||
&saved_file);
|
|
||||||
debug!("copying pre-existing module `{}` from {:?} to {}",
|
|
||||||
module.name,
|
|
||||||
source_file,
|
|
||||||
obj_out.display());
|
|
||||||
match link_or_copy(&source_file, &obj_out) {
|
|
||||||
Ok(_) => { }
|
|
||||||
Err(err) => {
|
|
||||||
diag_handler.err(&format!("unable to copy {} to {}: {}",
|
|
||||||
source_file.display(),
|
|
||||||
obj_out.display(),
|
|
||||||
err));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
assert_eq!(object.is_some(), config.emit_obj);
|
work_item @ WorkItem::CopyPostLtoArtifacts(_) => {
|
||||||
assert_eq!(bytecode.is_some(), config.emit_bc);
|
execute_copy_from_cache_work_item(cgcx, work_item, timeline)
|
||||||
assert_eq!(bytecode_compressed.is_some(), config.emit_bc_compressed);
|
}
|
||||||
|
work_item @ WorkItem::LTO(_) => {
|
||||||
|
execute_lto_work_item(cgcx, work_item, timeline)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(WorkItemResult::Compiled(CompiledModule {
|
fn execute_optimize_work_item(cgcx: &CodegenContext,
|
||||||
name: module_name,
|
work_item: WorkItem,
|
||||||
kind: ModuleKind::Regular,
|
timeline: &mut Timeline)
|
||||||
pre_existing: true,
|
-> Result<WorkItemResult, FatalError>
|
||||||
object,
|
{
|
||||||
bytecode,
|
let config = cgcx.config(work_item.module_kind());
|
||||||
bytecode_compressed,
|
|
||||||
}))
|
let module = if let WorkItem::Optimize(module) = work_item {
|
||||||
|
module
|
||||||
} else {
|
} else {
|
||||||
debug!("llvm-optimizing {:?}", module_name);
|
bug!("execute_optimize_work_item() called with non-WorkItem::Optimize");
|
||||||
|
};
|
||||||
|
|
||||||
unsafe {
|
let diag_handler = cgcx.create_diag_handler();
|
||||||
optimize(cgcx, &diag_handler, &module, config, timeline)?;
|
|
||||||
|
|
||||||
let linker_does_lto = cgcx.opts.debugging_opts.cross_lang_lto.enabled();
|
unsafe {
|
||||||
|
optimize(cgcx, &diag_handler, &module, config, timeline)?;
|
||||||
|
}
|
||||||
|
|
||||||
// After we've done the initial round of optimizations we need to
|
let linker_does_lto = cgcx.opts.debugging_opts.cross_lang_lto.enabled();
|
||||||
// decide whether to synchronously codegen this module or ship it
|
|
||||||
// back to the coordinator thread for further LTO processing (which
|
|
||||||
// has to wait for all the initial modules to be optimized).
|
|
||||||
//
|
|
||||||
// Here we dispatch based on the `cgcx.lto` and kind of module we're
|
|
||||||
// codegenning...
|
|
||||||
let needs_lto = match cgcx.lto {
|
|
||||||
Lto::No => false,
|
|
||||||
|
|
||||||
// If the linker does LTO, we don't have to do it. Note that we
|
// After we've done the initial round of optimizations we need to
|
||||||
// keep doing full LTO, if it is requested, as not to break the
|
// decide whether to synchronously codegen this module or ship it
|
||||||
// assumption that the output will be a single module.
|
// back to the coordinator thread for further LTO processing (which
|
||||||
Lto::Thin | Lto::ThinLocal if linker_does_lto => false,
|
// has to wait for all the initial modules to be optimized).
|
||||||
|
//
|
||||||
|
// Here we dispatch based on the `cgcx.lto` and kind of module we're
|
||||||
|
// codegenning...
|
||||||
|
let needs_lto = match cgcx.lto {
|
||||||
|
Lto::No => false,
|
||||||
|
|
||||||
// Here we've got a full crate graph LTO requested. We ignore
|
// If the linker does LTO, we don't have to do it. Note that we
|
||||||
// this, however, if the crate type is only an rlib as there's
|
// keep doing full LTO, if it is requested, as not to break the
|
||||||
// no full crate graph to process, that'll happen later.
|
// assumption that the output will be a single module.
|
||||||
//
|
Lto::Thin | Lto::ThinLocal if linker_does_lto => false,
|
||||||
// This use case currently comes up primarily for targets that
|
|
||||||
// require LTO so the request for LTO is always unconditionally
|
|
||||||
// passed down to the backend, but we don't actually want to do
|
|
||||||
// anything about it yet until we've got a final product.
|
|
||||||
Lto::Yes | Lto::Fat | Lto::Thin => {
|
|
||||||
cgcx.crate_types.len() != 1 ||
|
|
||||||
cgcx.crate_types[0] != config::CrateType::Rlib
|
|
||||||
}
|
|
||||||
|
|
||||||
// When we're automatically doing ThinLTO for multi-codegen-unit
|
// Here we've got a full crate graph LTO requested. We ignore
|
||||||
// builds we don't actually want to LTO the allocator modules if
|
// this, however, if the crate type is only an rlib as there's
|
||||||
// it shows up. This is due to various linker shenanigans that
|
// no full crate graph to process, that'll happen later.
|
||||||
// we'll encounter later.
|
//
|
||||||
//
|
// This use case currently comes up primarily for targets that
|
||||||
// Additionally here's where we also factor in the current LLVM
|
// require LTO so the request for LTO is always unconditionally
|
||||||
// version. If it doesn't support ThinLTO we skip this.
|
// passed down to the backend, but we don't actually want to do
|
||||||
Lto::ThinLocal => {
|
// anything about it yet until we've got a final product.
|
||||||
module.kind != ModuleKind::Allocator &&
|
Lto::Yes | Lto::Fat | Lto::Thin => {
|
||||||
llvm::LLVMRustThinLTOAvailable()
|
cgcx.crate_types.len() != 1 ||
|
||||||
}
|
cgcx.crate_types[0] != config::CrateType::Rlib
|
||||||
};
|
}
|
||||||
|
|
||||||
// Metadata modules never participate in LTO regardless of the lto
|
// When we're automatically doing ThinLTO for multi-codegen-unit
|
||||||
// settings.
|
// builds we don't actually want to LTO the allocator modules if
|
||||||
let needs_lto = needs_lto && module.kind != ModuleKind::Metadata;
|
// it shows up. This is due to various linker shenanigans that
|
||||||
|
// we'll encounter later.
|
||||||
|
//
|
||||||
|
// Additionally here's where we also factor in the current LLVM
|
||||||
|
// version. If it doesn't support ThinLTO we skip this.
|
||||||
|
Lto::ThinLocal => {
|
||||||
|
module.kind != ModuleKind::Allocator &&
|
||||||
|
unsafe { llvm::LLVMRustThinLTOAvailable() }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if needs_lto {
|
// Metadata modules never participate in LTO regardless of the lto
|
||||||
Ok(WorkItemResult::NeedsLTO(module))
|
// settings.
|
||||||
} else {
|
let needs_lto = needs_lto && module.kind != ModuleKind::Metadata;
|
||||||
let module = codegen(cgcx, &diag_handler, module, config, timeline)?;
|
|
||||||
Ok(WorkItemResult::Compiled(module))
|
if needs_lto {
|
||||||
|
Ok(WorkItemResult::NeedsLTO(module))
|
||||||
|
} else {
|
||||||
|
let module = unsafe {
|
||||||
|
codegen(cgcx, &diag_handler, module, config, timeline)?
|
||||||
|
};
|
||||||
|
Ok(WorkItemResult::Compiled(module))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_copy_from_cache_work_item(cgcx: &CodegenContext,
|
||||||
|
work_item: WorkItem,
|
||||||
|
_: &mut Timeline)
|
||||||
|
-> Result<WorkItemResult, FatalError>
|
||||||
|
{
|
||||||
|
let config = cgcx.config(work_item.module_kind());
|
||||||
|
|
||||||
|
let module = if let WorkItem::CopyPostLtoArtifacts(module) = work_item {
|
||||||
|
module
|
||||||
|
} else {
|
||||||
|
bug!("execute_copy_from_cache_work_item() called with wrong WorkItem kind.")
|
||||||
|
};
|
||||||
|
|
||||||
|
let incr_comp_session_dir = cgcx.incr_comp_session_dir
|
||||||
|
.as_ref()
|
||||||
|
.unwrap();
|
||||||
|
let mut object = None;
|
||||||
|
let mut bytecode = None;
|
||||||
|
let mut bytecode_compressed = None;
|
||||||
|
for (kind, saved_file) in &module.source.saved_files {
|
||||||
|
let obj_out = match kind {
|
||||||
|
WorkProductFileKind::Object => {
|
||||||
|
let path = cgcx.output_filenames.temp_path(OutputType::Object,
|
||||||
|
Some(&module.name));
|
||||||
|
object = Some(path.clone());
|
||||||
|
path
|
||||||
|
}
|
||||||
|
WorkProductFileKind::Bytecode => {
|
||||||
|
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode,
|
||||||
|
Some(&module.name));
|
||||||
|
bytecode = Some(path.clone());
|
||||||
|
path
|
||||||
|
}
|
||||||
|
WorkProductFileKind::BytecodeCompressed => {
|
||||||
|
let path = cgcx.output_filenames.temp_path(OutputType::Bitcode,
|
||||||
|
Some(&module.name))
|
||||||
|
.with_extension(RLIB_BYTECODE_EXTENSION);
|
||||||
|
bytecode_compressed = Some(path.clone());
|
||||||
|
path
|
||||||
|
}
|
||||||
|
WorkProductFileKind::PreThinLtoBytecode => {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let source_file = in_incr_comp_dir(&incr_comp_session_dir,
|
||||||
|
&saved_file);
|
||||||
|
debug!("copying pre-existing module `{}` from {:?} to {}",
|
||||||
|
module.name,
|
||||||
|
source_file,
|
||||||
|
obj_out.display());
|
||||||
|
match link_or_copy(&source_file, &obj_out) {
|
||||||
|
Ok(_) => { }
|
||||||
|
Err(err) => {
|
||||||
|
let diag_handler = cgcx.create_diag_handler();
|
||||||
|
diag_handler.err(&format!("unable to copy {} to {}: {}",
|
||||||
|
source_file.display(),
|
||||||
|
obj_out.display(),
|
||||||
|
err));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert_eq!(object.is_some(), config.emit_obj);
|
||||||
|
assert_eq!(bytecode.is_some(), config.emit_bc);
|
||||||
|
assert_eq!(bytecode_compressed.is_some(), config.emit_bc_compressed);
|
||||||
|
|
||||||
|
Ok(WorkItemResult::Compiled(CompiledModule {
|
||||||
|
name: module.name,
|
||||||
|
kind: ModuleKind::Regular,
|
||||||
|
object,
|
||||||
|
bytecode,
|
||||||
|
bytecode_compressed,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_lto_work_item(cgcx: &CodegenContext,
|
||||||
|
work_item: WorkItem,
|
||||||
|
timeline: &mut Timeline)
|
||||||
|
-> Result<WorkItemResult, FatalError>
|
||||||
|
{
|
||||||
|
let config = cgcx.config(work_item.module_kind());
|
||||||
|
|
||||||
|
if let WorkItem::LTO(mut lto) = work_item {
|
||||||
|
let diag_handler = cgcx.create_diag_handler();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
let module = lto.optimize(cgcx, timeline)?;
|
||||||
|
let module = codegen(cgcx, &diag_handler, module, config, timeline)?;
|
||||||
|
Ok(WorkItemResult::Compiled(module))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bug!("execute_lto_work_item() called with wrong WorkItem kind.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_load_pre_lto_mod_work_item(cgcx: &CodegenContext,
|
||||||
|
work_item: WorkItem,
|
||||||
|
_: &mut Timeline)
|
||||||
|
-> Result<WorkItemResult, FatalError>
|
||||||
|
{
|
||||||
|
let module = if let WorkItem::LoadPreLtoModule(module) = work_item {
|
||||||
|
module
|
||||||
|
} else {
|
||||||
|
bug!("execute_load_pre_lto_mod_work_item() called with wrong WorkItem kind.")
|
||||||
|
};
|
||||||
|
|
||||||
|
let work_product = module.source.clone();
|
||||||
|
let incr_comp_session_dir = cgcx.incr_comp_session_dir
|
||||||
|
.as_ref()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let filename = pre_lto_bitcode_filename(&work_product);
|
||||||
|
let bc_path = in_incr_comp_dir(&incr_comp_session_dir, &filename);
|
||||||
|
|
||||||
|
let file = fs::File::open(&bc_path).unwrap_or_else(|e| {
|
||||||
|
panic!("failed to open bitcode file `{}`: {}",
|
||||||
|
bc_path.display(),
|
||||||
|
e);
|
||||||
|
});
|
||||||
|
|
||||||
|
let module_llvm = unsafe {
|
||||||
|
let data = ::memmap::Mmap::map(&file).unwrap_or_else(|e| {
|
||||||
|
panic!("failed to create mmap for bitcode file `{}`: {}",
|
||||||
|
bc_path.display(),
|
||||||
|
e);
|
||||||
|
});
|
||||||
|
|
||||||
|
let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
|
||||||
|
let mod_name_c = SmallCStr::new(&module.name);
|
||||||
|
let llmod_raw = match llvm::LLVMRustParseBitcodeForThinLTO(
|
||||||
|
llcx,
|
||||||
|
data.as_ptr(),
|
||||||
|
data.len(),
|
||||||
|
mod_name_c.as_ptr(),
|
||||||
|
) {
|
||||||
|
Some(m) => m as *const _,
|
||||||
|
None => {
|
||||||
|
panic!("failed to parse bitcode for thin LTO module `{}`",
|
||||||
|
module.name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let tm = (cgcx.tm_factory)().unwrap();
|
||||||
|
|
||||||
|
ModuleLlvm {
|
||||||
|
llmod_raw,
|
||||||
|
llcx,
|
||||||
|
tm,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(WorkItemResult::NeedsLTO(ModuleCodegen {
|
||||||
|
name: module.name.to_string(),
|
||||||
|
module_llvm,
|
||||||
|
kind: ModuleKind::Regular,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Message {
|
enum Message {
|
||||||
|
@ -1424,6 +1586,10 @@ enum Message {
|
||||||
llvm_work_item: WorkItem,
|
llvm_work_item: WorkItem,
|
||||||
cost: u64,
|
cost: u64,
|
||||||
},
|
},
|
||||||
|
AddImportOnlyModule {
|
||||||
|
module_data: SerializedModule,
|
||||||
|
module_name: CString,
|
||||||
|
},
|
||||||
CodegenComplete,
|
CodegenComplete,
|
||||||
CodegenItem,
|
CodegenItem,
|
||||||
}
|
}
|
||||||
|
@ -1703,6 +1869,7 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
let mut compiled_metadata_module = None;
|
let mut compiled_metadata_module = None;
|
||||||
let mut compiled_allocator_module = None;
|
let mut compiled_allocator_module = None;
|
||||||
let mut needs_lto = Vec::new();
|
let mut needs_lto = Vec::new();
|
||||||
|
let mut lto_import_only_modules = Vec::new();
|
||||||
let mut started_lto = false;
|
let mut started_lto = false;
|
||||||
|
|
||||||
// This flag tracks whether all items have gone through codegens
|
// This flag tracks whether all items have gone through codegens
|
||||||
|
@ -1749,7 +1916,7 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
worker: get_worker_id(&mut free_worker_ids),
|
worker: get_worker_id(&mut free_worker_ids),
|
||||||
.. cgcx.clone()
|
.. cgcx.clone()
|
||||||
};
|
};
|
||||||
maybe_start_llvm_timer(cgcx.config(item.kind()),
|
maybe_start_llvm_timer(cgcx.config(item.module_kind()),
|
||||||
&mut llvm_start_time);
|
&mut llvm_start_time);
|
||||||
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
||||||
spawn_work(cgcx, item);
|
spawn_work(cgcx, item);
|
||||||
|
@ -1768,7 +1935,9 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
assert!(needs_lto.len() > 0);
|
assert!(needs_lto.len() > 0);
|
||||||
started_lto = true;
|
started_lto = true;
|
||||||
let modules = mem::replace(&mut needs_lto, Vec::new());
|
let modules = mem::replace(&mut needs_lto, Vec::new());
|
||||||
for (work, cost) in generate_lto_work(&cgcx, modules) {
|
let import_only_modules =
|
||||||
|
mem::replace(&mut lto_import_only_modules, Vec::new());
|
||||||
|
for (work, cost) in generate_lto_work(&cgcx, modules, import_only_modules) {
|
||||||
let insertion_index = work_items
|
let insertion_index = work_items
|
||||||
.binary_search_by_key(&cost, |&(_, cost)| cost)
|
.binary_search_by_key(&cost, |&(_, cost)| cost)
|
||||||
.unwrap_or_else(|e| e);
|
.unwrap_or_else(|e| e);
|
||||||
|
@ -1789,7 +1958,7 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
worker: get_worker_id(&mut free_worker_ids),
|
worker: get_worker_id(&mut free_worker_ids),
|
||||||
.. cgcx.clone()
|
.. cgcx.clone()
|
||||||
};
|
};
|
||||||
maybe_start_llvm_timer(cgcx.config(item.kind()),
|
maybe_start_llvm_timer(cgcx.config(item.module_kind()),
|
||||||
&mut llvm_start_time);
|
&mut llvm_start_time);
|
||||||
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
||||||
spawn_work(cgcx, item);
|
spawn_work(cgcx, item);
|
||||||
|
@ -1820,7 +1989,7 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
while work_items.len() > 0 && running < tokens.len() {
|
while work_items.len() > 0 && running < tokens.len() {
|
||||||
let (item, _) = work_items.pop().unwrap();
|
let (item, _) = work_items.pop().unwrap();
|
||||||
|
|
||||||
maybe_start_llvm_timer(cgcx.config(item.kind()),
|
maybe_start_llvm_timer(cgcx.config(item.module_kind()),
|
||||||
&mut llvm_start_time);
|
&mut llvm_start_time);
|
||||||
|
|
||||||
let cgcx = CodegenContext {
|
let cgcx = CodegenContext {
|
||||||
|
@ -1932,10 +2101,14 @@ fn start_executing_work(tcx: TyCtxt,
|
||||||
} else {
|
} else {
|
||||||
running -= 1;
|
running -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
free_worker_ids.push(worker_id);
|
free_worker_ids.push(worker_id);
|
||||||
needs_lto.push(result);
|
needs_lto.push(result);
|
||||||
}
|
}
|
||||||
|
Message::AddImportOnlyModule { module_data, module_name } => {
|
||||||
|
assert!(!started_lto);
|
||||||
|
assert!(!codegen_done);
|
||||||
|
lto_import_only_modules.push((module_data, module_name));
|
||||||
|
}
|
||||||
Message::Done { result: Err(()), worker_id: _ } => {
|
Message::Done { result: Err(()), worker_id: _ } => {
|
||||||
shared_emitter.fatal("aborting due to worker thread failure");
|
shared_emitter.fatal("aborting due to worker thread failure");
|
||||||
// Exit the coordinator thread
|
// Exit the coordinator thread
|
||||||
|
@ -2308,9 +2481,10 @@ impl OngoingCodegen {
|
||||||
time_graph.dump(&format!("{}-timings", self.crate_name));
|
time_graph.dump(&format!("{}-timings", self.crate_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
let work_products = copy_all_cgu_workproducts_to_incr_comp_cache_dir(sess,
|
let work_products =
|
||||||
&compiled_modules);
|
copy_all_cgu_workproducts_to_incr_comp_cache_dir(sess,
|
||||||
|
&compiled_modules,
|
||||||
|
&self.output_filenames);
|
||||||
produce_final_output_artifacts(sess,
|
produce_final_output_artifacts(sess,
|
||||||
&compiled_modules,
|
&compiled_modules,
|
||||||
&self.output_filenames);
|
&self.output_filenames);
|
||||||
|
@ -2371,8 +2545,8 @@ impl OngoingCodegen {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn submit_codegened_module_to_llvm(tcx: TyCtxt,
|
pub(crate) fn submit_codegened_module_to_llvm(tcx: TyCtxt,
|
||||||
module: ModuleCodegen,
|
module: ModuleCodegen,
|
||||||
cost: u64) {
|
cost: u64) {
|
||||||
let llvm_work_item = WorkItem::Optimize(module);
|
let llvm_work_item = WorkItem::Optimize(module);
|
||||||
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone {
|
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone {
|
||||||
llvm_work_item,
|
llvm_work_item,
|
||||||
|
@ -2380,6 +2554,61 @@ pub(crate) fn submit_codegened_module_to_llvm(tcx: TyCtxt,
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn submit_post_lto_module_to_llvm(tcx: TyCtxt,
|
||||||
|
module: CachedModuleCodegen) {
|
||||||
|
let llvm_work_item = WorkItem::CopyPostLtoArtifacts(module);
|
||||||
|
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone {
|
||||||
|
llvm_work_item,
|
||||||
|
cost: 0,
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn submit_pre_lto_module_to_llvm(tcx: TyCtxt,
|
||||||
|
module: CachedModuleCodegen) {
|
||||||
|
let llvm_work_item = WorkItem::LoadPreLtoModule(module);
|
||||||
|
|
||||||
|
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone {
|
||||||
|
llvm_work_item,
|
||||||
|
// We don't know the size of the module, but just loading will have smaller
|
||||||
|
// cost than optimizing.
|
||||||
|
cost: 10,
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn submit_import_only_module_to_llvm(tcx: TyCtxt,
|
||||||
|
module: CachedModuleCodegen) {
|
||||||
|
let filename = pre_lto_bitcode_filename(&module.source);
|
||||||
|
let bc_path = in_incr_comp_dir_sess(tcx.sess, &filename);
|
||||||
|
let file = fs::File::open(&bc_path).unwrap_or_else(|e| {
|
||||||
|
panic!("failed to open bitcode file `{}`: {}", bc_path.display(), e)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mmap = unsafe {
|
||||||
|
memmap::Mmap::map(&file).unwrap_or_else(|e| {
|
||||||
|
panic!("failed to mmap bitcode file `{}`: {}", bc_path.display(), e)
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
// Schedule the module to be loaded
|
||||||
|
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::AddImportOnlyModule {
|
||||||
|
module_data: SerializedModule::FromUncompressedFile(mmap, file),
|
||||||
|
module_name: CString::new(module.name.clone()).unwrap(),
|
||||||
|
})));
|
||||||
|
|
||||||
|
// Note: We also schedule for the cached files to be copied to the output
|
||||||
|
// directory
|
||||||
|
submit_post_lto_module_to_llvm(tcx, module);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pre_lto_bitcode_filename(wp: &WorkProduct) -> String {
|
||||||
|
wp.saved_files
|
||||||
|
.iter()
|
||||||
|
.find(|&&(kind, _)| kind == WorkProductFileKind::PreThinLtoBytecode)
|
||||||
|
.map(|&(_, ref filename)| filename.clone())
|
||||||
|
.unwrap_or_else(|| panic!("Couldn't find pre-thin-lto bytecode for `{}`",
|
||||||
|
wp.cgu_name))
|
||||||
|
}
|
||||||
|
|
||||||
fn msvc_imps_needed(tcx: TyCtxt) -> bool {
|
fn msvc_imps_needed(tcx: TyCtxt) -> bool {
|
||||||
// This should never be true (because it's not supported). If it is true,
|
// This should never be true (because it's not supported). If it is true,
|
||||||
// something is wrong with commandline arg validation.
|
// something is wrong with commandline arg validation.
|
||||||
|
|
|
@ -24,9 +24,9 @@
|
||||||
//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
|
//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
|
||||||
|
|
||||||
use super::ModuleLlvm;
|
use super::ModuleLlvm;
|
||||||
use super::ModuleSource;
|
|
||||||
use super::ModuleCodegen;
|
use super::ModuleCodegen;
|
||||||
use super::ModuleKind;
|
use super::ModuleKind;
|
||||||
|
use super::CachedModuleCodegen;
|
||||||
|
|
||||||
use abi;
|
use abi;
|
||||||
use back::lto;
|
use back::lto;
|
||||||
|
@ -41,7 +41,6 @@ use rustc::middle::cstore::{EncodedMetadata};
|
||||||
use rustc::ty::{self, Ty, TyCtxt};
|
use rustc::ty::{self, Ty, TyCtxt};
|
||||||
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
|
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
|
||||||
use rustc::ty::query::Providers;
|
use rustc::ty::query::Providers;
|
||||||
use rustc::dep_graph::{DepNode, DepConstructor};
|
|
||||||
use rustc::middle::cstore::{self, LinkagePreference};
|
use rustc::middle::cstore::{self, LinkagePreference};
|
||||||
use rustc::middle::exported_symbols;
|
use rustc::middle::exported_symbols;
|
||||||
use rustc::util::common::{time, print_time_passes_entry};
|
use rustc::util::common::{time, print_time_passes_entry};
|
||||||
|
@ -699,6 +698,79 @@ pub fn iter_globals(llmod: &'ll llvm::Module) -> ValueIter<'ll> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
enum CguReUsable {
|
||||||
|
No,
|
||||||
|
PreThinLto,
|
||||||
|
PostThinLto,
|
||||||
|
PostThinLtoButImportedFrom,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn determine_cgu_reuse<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
codegen_units: &[Arc<CodegenUnit<'tcx>>])
|
||||||
|
-> FxHashMap<InternedString, CguReUsable> {
|
||||||
|
if !tcx.dep_graph.is_fully_enabled() {
|
||||||
|
return codegen_units.iter()
|
||||||
|
.map(|cgu| (cgu.name().clone(), CguReUsable::No))
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
|
||||||
|
let thin_lto_imports = load_thin_lto_imports(tcx.sess);
|
||||||
|
|
||||||
|
let mut reusable_cgus = FxHashMap();
|
||||||
|
let mut green_cgus = FxHashMap();
|
||||||
|
let mut need_for_importing = FxHashSet();
|
||||||
|
|
||||||
|
for cgu in codegen_units {
|
||||||
|
let work_product_id = &cgu.work_product_id();
|
||||||
|
if tcx.dep_graph.previous_work_product(work_product_id).is_none() {
|
||||||
|
// We don't have anything cached for this CGU. This can happen
|
||||||
|
// if the CGU did not exist in the previous session.
|
||||||
|
reusable_cgus.insert(cgu.name().clone(), CguReUsable::No);
|
||||||
|
continue
|
||||||
|
};
|
||||||
|
// Try to mark the CGU as green
|
||||||
|
let dep_node = cgu.codegen_dep_node(tcx);
|
||||||
|
assert!(!tcx.dep_graph.dep_node_exists(&dep_node),
|
||||||
|
"CompileCodegenUnit dep-node for CGU `{}` already exists before marking.",
|
||||||
|
cgu.name());
|
||||||
|
|
||||||
|
if tcx.dep_graph.try_mark_green(tcx, &dep_node).is_some() {
|
||||||
|
// We can re-use either the pre- or the post-thinlto state
|
||||||
|
green_cgus.insert(cgu.name().to_string(), cgu);
|
||||||
|
} else {
|
||||||
|
// We definitely cannot re-use this CGU
|
||||||
|
reusable_cgus.insert(cgu.name().clone(), CguReUsable::No);
|
||||||
|
|
||||||
|
let imported_cgus = thin_lto_imports.modules_imported_by(&cgu.name().as_str());
|
||||||
|
need_for_importing.extend(imported_cgus.iter().cloned());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now we know all CGUs that have not changed themselves. Next we need to
|
||||||
|
// check if anything they imported via ThinLTO has changed.
|
||||||
|
for (cgu_name, cgu) in &green_cgus {
|
||||||
|
let imported_cgus = thin_lto_imports.modules_imported_by(cgu_name);
|
||||||
|
let all_imports_green = imported_cgus.iter().all(|imported_cgu| {
|
||||||
|
green_cgus.contains_key(&imported_cgu[..])
|
||||||
|
});
|
||||||
|
if all_imports_green {
|
||||||
|
reusable_cgus.insert(cgu.name().clone(), CguReUsable::PostThinLto);
|
||||||
|
} else {
|
||||||
|
reusable_cgus.insert(cgu.name().clone(), CguReUsable::PreThinLto);
|
||||||
|
need_for_importing.extend(imported_cgus.iter().cloned());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name, state) in reusable_cgus.iter_mut() {
|
||||||
|
if *state == CguReUsable::PostThinLto && need_for_importing.contains(&name.as_str()[..]) {
|
||||||
|
*state = CguReUsable::PostThinLtoButImportedFrom;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reusable_cgus
|
||||||
|
}
|
||||||
|
|
||||||
pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
rx: mpsc::Receiver<Box<dyn Any + Send>>)
|
rx: mpsc::Receiver<Box<dyn Any + Send>>)
|
||||||
-> OngoingCodegen {
|
-> OngoingCodegen {
|
||||||
|
@ -735,7 +807,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
|
||||||
let metadata_module = ModuleCodegen {
|
let metadata_module = ModuleCodegen {
|
||||||
name: metadata_cgu_name,
|
name: metadata_cgu_name,
|
||||||
source: ModuleSource::Codegened(metadata_llvm_module),
|
module_llvm: metadata_llvm_module,
|
||||||
kind: ModuleKind::Metadata,
|
kind: ModuleKind::Metadata,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -824,7 +896,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
|
||||||
Some(ModuleCodegen {
|
Some(ModuleCodegen {
|
||||||
name: llmod_id,
|
name: llmod_id,
|
||||||
source: ModuleSource::Codegened(modules),
|
module_llvm: modules,
|
||||||
kind: ModuleKind::Allocator,
|
kind: ModuleKind::Allocator,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -848,52 +920,53 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
let mut total_codegen_time = Duration::new(0, 0);
|
let mut total_codegen_time = Duration::new(0, 0);
|
||||||
let mut all_stats = Stats::default();
|
let mut all_stats = Stats::default();
|
||||||
|
|
||||||
|
let cgu_reuse = determine_cgu_reuse(tcx, &codegen_units);
|
||||||
|
|
||||||
for cgu in codegen_units.into_iter() {
|
for cgu in codegen_units.into_iter() {
|
||||||
ongoing_codegen.wait_for_signal_to_codegen_item();
|
ongoing_codegen.wait_for_signal_to_codegen_item();
|
||||||
ongoing_codegen.check_for_errors(tcx.sess);
|
ongoing_codegen.check_for_errors(tcx.sess);
|
||||||
|
|
||||||
// First, if incremental compilation is enabled, we try to re-use the
|
let loaded_from_cache = match cgu_reuse[cgu.name()] {
|
||||||
// codegen unit from the cache.
|
CguReUsable::No => {
|
||||||
if tcx.dep_graph.is_fully_enabled() {
|
let _timing_guard = time_graph.as_ref().map(|time_graph| {
|
||||||
let cgu_id = cgu.work_product_id();
|
time_graph.start(write::CODEGEN_WORKER_TIMELINE,
|
||||||
|
write::CODEGEN_WORK_PACKAGE_KIND,
|
||||||
// Check whether there is a previous work-product we can
|
&format!("codegen {}", cgu.name()))
|
||||||
// re-use. Not only must the file exist, and the inputs not
|
});
|
||||||
// be dirty, but the hash of the symbols we will generate must
|
let start_time = Instant::now();
|
||||||
// be the same.
|
let stats = compile_codegen_unit(tcx, *cgu.name());
|
||||||
if let Some(buf) = tcx.dep_graph.previous_work_product(&cgu_id) {
|
all_stats.extend(stats);
|
||||||
let dep_node = &DepNode::new(tcx,
|
total_codegen_time += start_time.elapsed();
|
||||||
DepConstructor::CompileCodegenUnit(cgu.name().clone()));
|
false
|
||||||
|
|
||||||
// We try to mark the DepNode::CompileCodegenUnit green. If we
|
|
||||||
// succeed it means that none of the dependencies has changed
|
|
||||||
// and we can safely re-use.
|
|
||||||
if let Some(dep_node_index) = tcx.dep_graph.try_mark_green(tcx, dep_node) {
|
|
||||||
let module = ModuleCodegen {
|
|
||||||
name: cgu.name().to_string(),
|
|
||||||
source: ModuleSource::Preexisting(buf),
|
|
||||||
kind: ModuleKind::Regular,
|
|
||||||
};
|
|
||||||
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true);
|
|
||||||
write::submit_codegened_module_to_llvm(tcx, module, 0);
|
|
||||||
// Continue to next cgu, this one is done.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// This can happen if files were deleted from the cache
|
|
||||||
// directory for some reason. We just re-compile then.
|
|
||||||
}
|
}
|
||||||
}
|
CguReUsable::PreThinLto => {
|
||||||
|
write::submit_pre_lto_module_to_llvm(tcx, CachedModuleCodegen {
|
||||||
|
name: cgu.name().to_string(),
|
||||||
|
source: cgu.work_product(tcx),
|
||||||
|
});
|
||||||
|
true
|
||||||
|
}
|
||||||
|
CguReUsable::PostThinLtoButImportedFrom => {
|
||||||
|
write::submit_import_only_module_to_llvm(tcx, CachedModuleCodegen {
|
||||||
|
name: cgu.name().to_string(),
|
||||||
|
source: cgu.work_product(tcx),
|
||||||
|
});
|
||||||
|
true
|
||||||
|
}
|
||||||
|
CguReUsable::PostThinLto => {
|
||||||
|
write::submit_post_lto_module_to_llvm(tcx, CachedModuleCodegen {
|
||||||
|
name: cgu.name().to_string(),
|
||||||
|
source: cgu.work_product(tcx),
|
||||||
|
});
|
||||||
|
true
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let _timing_guard = time_graph.as_ref().map(|time_graph| {
|
if tcx.dep_graph.is_fully_enabled() {
|
||||||
time_graph.start(write::CODEGEN_WORKER_TIMELINE,
|
let dep_node = cgu.codegen_dep_node(tcx);
|
||||||
write::CODEGEN_WORK_PACKAGE_KIND,
|
let dep_node_index = tcx.dep_graph.dep_node_index_of(&dep_node);
|
||||||
&format!("codegen {}", cgu.name()))
|
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, loaded_from_cache);
|
||||||
});
|
}
|
||||||
let start_time = Instant::now();
|
|
||||||
all_stats.extend(compile_codegen_unit(tcx, *cgu.name()));
|
|
||||||
total_codegen_time += start_time.elapsed();
|
|
||||||
ongoing_codegen.check_for_errors(tcx.sess);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ongoing_codegen.codegen_finished(tcx);
|
ongoing_codegen.codegen_finished(tcx);
|
||||||
|
@ -1176,12 +1249,6 @@ fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
write::submit_codegened_module_to_llvm(tcx,
|
write::submit_codegened_module_to_llvm(tcx,
|
||||||
module,
|
module,
|
||||||
cost);
|
cost);
|
||||||
|
|
||||||
if tcx.dep_graph.is_fully_enabled() {
|
|
||||||
let dep_node_index = tcx.dep_graph.dep_node_index_of(&dep_node);
|
|
||||||
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats;
|
return stats;
|
||||||
|
|
||||||
fn module_codegen<'a, 'tcx>(
|
fn module_codegen<'a, 'tcx>(
|
||||||
|
@ -1246,7 +1313,7 @@ fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
|
||||||
(stats, ModuleCodegen {
|
(stats, ModuleCodegen {
|
||||||
name: cgu_name.to_string(),
|
name: cgu_name.to_string(),
|
||||||
source: ModuleSource::Codegened(llvm_module),
|
module_llvm: llvm_module,
|
||||||
kind: ModuleKind::Regular,
|
kind: ModuleKind::Regular,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1324,8 +1391,11 @@ pub fn visibility_to_llvm(linkage: Visibility) -> llvm::Visibility {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn load_thin_lto_imports(sess: &Session) -> lto::ThinLTOImports {
|
fn load_thin_lto_imports(sess: &Session) -> lto::ThinLTOImports {
|
||||||
|
if sess.opts.incremental.is_none() {
|
||||||
|
return lto::ThinLTOImports::new();
|
||||||
|
}
|
||||||
|
|
||||||
let path = rustc_incremental::in_incr_comp_dir_sess(
|
let path = rustc_incremental::in_incr_comp_dir_sess(
|
||||||
sess,
|
sess,
|
||||||
lto::THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME
|
lto::THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME
|
||||||
|
|
|
@ -66,13 +66,13 @@ extern crate rustc_errors as errors;
|
||||||
extern crate serialize;
|
extern crate serialize;
|
||||||
extern crate cc; // Used to locate MSVC
|
extern crate cc; // Used to locate MSVC
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
|
extern crate memmap;
|
||||||
|
|
||||||
use back::bytecode::RLIB_BYTECODE_EXTENSION;
|
use back::bytecode::RLIB_BYTECODE_EXTENSION;
|
||||||
|
|
||||||
pub use llvm_util::target_features;
|
pub use llvm_util::target_features;
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::path::PathBuf;
|
use std::path::{PathBuf};
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
|
||||||
|
@ -273,10 +273,15 @@ struct ModuleCodegen {
|
||||||
/// as the crate name and disambiguator.
|
/// as the crate name and disambiguator.
|
||||||
/// We currently generate these names via CodegenUnit::build_cgu_name().
|
/// We currently generate these names via CodegenUnit::build_cgu_name().
|
||||||
name: String,
|
name: String,
|
||||||
source: ModuleSource,
|
module_llvm: ModuleLlvm,
|
||||||
kind: ModuleKind,
|
kind: ModuleKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct CachedModuleCodegen {
|
||||||
|
name: String,
|
||||||
|
source: WorkProduct,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||||
enum ModuleKind {
|
enum ModuleKind {
|
||||||
Regular,
|
Regular,
|
||||||
|
@ -285,22 +290,11 @@ enum ModuleKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ModuleCodegen {
|
impl ModuleCodegen {
|
||||||
fn llvm(&self) -> Option<&ModuleLlvm> {
|
|
||||||
match self.source {
|
|
||||||
ModuleSource::Codegened(ref llvm) => Some(llvm),
|
|
||||||
ModuleSource::Preexisting(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_compiled_module(self,
|
fn into_compiled_module(self,
|
||||||
emit_obj: bool,
|
emit_obj: bool,
|
||||||
emit_bc: bool,
|
emit_bc: bool,
|
||||||
emit_bc_compressed: bool,
|
emit_bc_compressed: bool,
|
||||||
outputs: &OutputFilenames) -> CompiledModule {
|
outputs: &OutputFilenames) -> CompiledModule {
|
||||||
let pre_existing = match self.source {
|
|
||||||
ModuleSource::Preexisting(_) => true,
|
|
||||||
ModuleSource::Codegened(_) => false,
|
|
||||||
};
|
|
||||||
let object = if emit_obj {
|
let object = if emit_obj {
|
||||||
Some(outputs.temp_path(OutputType::Object, Some(&self.name)))
|
Some(outputs.temp_path(OutputType::Object, Some(&self.name)))
|
||||||
} else {
|
} else {
|
||||||
|
@ -321,7 +315,6 @@ impl ModuleCodegen {
|
||||||
CompiledModule {
|
CompiledModule {
|
||||||
name: self.name.clone(),
|
name: self.name.clone(),
|
||||||
kind: self.kind,
|
kind: self.kind,
|
||||||
pre_existing,
|
|
||||||
object,
|
object,
|
||||||
bytecode,
|
bytecode,
|
||||||
bytecode_compressed,
|
bytecode_compressed,
|
||||||
|
@ -333,20 +326,11 @@ impl ModuleCodegen {
|
||||||
struct CompiledModule {
|
struct CompiledModule {
|
||||||
name: String,
|
name: String,
|
||||||
kind: ModuleKind,
|
kind: ModuleKind,
|
||||||
pre_existing: bool,
|
|
||||||
object: Option<PathBuf>,
|
object: Option<PathBuf>,
|
||||||
bytecode: Option<PathBuf>,
|
bytecode: Option<PathBuf>,
|
||||||
bytecode_compressed: Option<PathBuf>,
|
bytecode_compressed: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ModuleSource {
|
|
||||||
/// Copy the `.o` files or whatever from the incr. comp. directory.
|
|
||||||
Preexisting(WorkProduct),
|
|
||||||
|
|
||||||
/// Rebuild from this LLVM module.
|
|
||||||
Codegened(ModuleLlvm),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ModuleLlvm {
|
struct ModuleLlvm {
|
||||||
llcx: &'static mut llvm::Context,
|
llcx: &'static mut llvm::Context,
|
||||||
llmod_raw: *const llvm::Module,
|
llmod_raw: *const llvm::Module,
|
||||||
|
|
|
@ -36,6 +36,7 @@ pub fn copy_cgu_workproducts_to_incr_comp_cache_dir(
|
||||||
WorkProductFileKind::Object => "o",
|
WorkProductFileKind::Object => "o",
|
||||||
WorkProductFileKind::Bytecode => "bc",
|
WorkProductFileKind::Bytecode => "bc",
|
||||||
WorkProductFileKind::BytecodeCompressed => "bc.z",
|
WorkProductFileKind::BytecodeCompressed => "bc.z",
|
||||||
|
WorkProductFileKind::PreThinLtoBytecode => "pre-thinlto.bc",
|
||||||
};
|
};
|
||||||
let file_name = format!("{}.{}", cgu_name, extension);
|
let file_name = format!("{}.{}", cgu_name, extension);
|
||||||
let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
|
let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
|
||||||
|
|
|
@ -103,7 +103,7 @@
|
||||||
//! inlining, even when they are not marked #[inline].
|
//! inlining, even when they are not marked #[inline].
|
||||||
|
|
||||||
use monomorphize::collector::InliningMap;
|
use monomorphize::collector::InliningMap;
|
||||||
use rustc::dep_graph::{WorkProductId, DepNode, DepConstructor};
|
use rustc::dep_graph::{WorkProductId, WorkProduct, DepNode, DepConstructor};
|
||||||
use rustc::hir::CodegenFnAttrFlags;
|
use rustc::hir::CodegenFnAttrFlags;
|
||||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
|
use rustc::hir::def_id::{DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
|
||||||
use rustc::hir::map::DefPathData;
|
use rustc::hir::map::DefPathData;
|
||||||
|
@ -150,6 +150,15 @@ pub trait CodegenUnitExt<'tcx> {
|
||||||
WorkProductId::from_cgu_name(&self.name().as_str())
|
WorkProductId::from_cgu_name(&self.name().as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn work_product(&self, tcx: TyCtxt) -> WorkProduct {
|
||||||
|
let work_product_id = self.work_product_id();
|
||||||
|
tcx.dep_graph
|
||||||
|
.previous_work_product(&work_product_id)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!("Could not find work-product for CGU `{}`", self.name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn items_in_deterministic_order<'a>(&self,
|
fn items_in_deterministic_order<'a>(&self,
|
||||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||||
-> Vec<(MonoItem<'tcx>,
|
-> Vec<(MonoItem<'tcx>,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue