Add -Z combine_cgu
flag
Introduce a compiler option to let rustc combines all regular CGUs into a single one at the end of compilation. Part of Issue #64191
This commit is contained in:
parent
e82584a77d
commit
c81b43d8ac
6 changed files with 90 additions and 15 deletions
|
@ -702,6 +702,7 @@ impl<B: WriteBackendMethods> WorkItem<B> {
|
|||
|
||||
enum WorkItemResult<B: WriteBackendMethods> {
|
||||
Compiled(CompiledModule),
|
||||
NeedsLink(ModuleCodegen<B::Module>),
|
||||
NeedsFatLTO(FatLTOInput<B>),
|
||||
NeedsThinLTO(String, B::ThinBuffer),
|
||||
}
|
||||
|
@ -801,11 +802,8 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
|
|||
None
|
||||
};
|
||||
|
||||
Ok(match lto_type {
|
||||
ComputedLtoType::No => {
|
||||
let module = unsafe { B::codegen(cgcx, &diag_handler, module, module_config)? };
|
||||
WorkItemResult::Compiled(module)
|
||||
}
|
||||
match lto_type {
|
||||
ComputedLtoType::No => finish_intra_module_work(cgcx, module, module_config),
|
||||
ComputedLtoType::Thin => {
|
||||
let (name, thin_buffer) = B::prepare_thin(module);
|
||||
if let Some(path) = bitcode {
|
||||
|
@ -813,7 +811,7 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
|
|||
panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e);
|
||||
});
|
||||
}
|
||||
WorkItemResult::NeedsThinLTO(name, thin_buffer)
|
||||
Ok(WorkItemResult::NeedsThinLTO(name, thin_buffer))
|
||||
}
|
||||
ComputedLtoType::Fat => match bitcode {
|
||||
Some(path) => {
|
||||
|
@ -821,11 +819,11 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
|
|||
fs::write(&path, buffer.data()).unwrap_or_else(|e| {
|
||||
panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e);
|
||||
});
|
||||
WorkItemResult::NeedsFatLTO(FatLTOInput::Serialized { name, buffer })
|
||||
Ok(WorkItemResult::NeedsFatLTO(FatLTOInput::Serialized { name, buffer }))
|
||||
}
|
||||
None => WorkItemResult::NeedsFatLTO(FatLTOInput::InMemory(module)),
|
||||
None => Ok(WorkItemResult::NeedsFatLTO(FatLTOInput::InMemory(module))),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
|
||||
|
@ -870,13 +868,26 @@ fn execute_lto_work_item<B: ExtraBackendMethods>(
|
|||
cgcx: &CodegenContext<B>,
|
||||
mut module: lto::LtoModuleCodegen<B>,
|
||||
module_config: &ModuleConfig,
|
||||
) -> Result<WorkItemResult<B>, FatalError> {
|
||||
let module = unsafe { module.optimize(cgcx)? };
|
||||
finish_intra_module_work(cgcx, module, module_config)
|
||||
}
|
||||
|
||||
fn finish_intra_module_work<B: ExtraBackendMethods>(
|
||||
cgcx: &CodegenContext<B>,
|
||||
module: ModuleCodegen<B::Module>,
|
||||
module_config: &ModuleConfig,
|
||||
) -> Result<WorkItemResult<B>, FatalError> {
|
||||
let diag_handler = cgcx.create_diag_handler();
|
||||
|
||||
unsafe {
|
||||
let module = module.optimize(cgcx)?;
|
||||
let module = B::codegen(cgcx, &diag_handler, module, module_config)?;
|
||||
if !cgcx.opts.debugging_opts.combine_cgu
|
||||
|| module.kind == ModuleKind::Metadata
|
||||
|| module.kind == ModuleKind::Allocator
|
||||
{
|
||||
let module = unsafe { B::codegen(cgcx, &diag_handler, module, module_config)? };
|
||||
Ok(WorkItemResult::Compiled(module))
|
||||
} else {
|
||||
Ok(WorkItemResult::NeedsLink(module))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -891,6 +902,10 @@ pub enum Message<B: WriteBackendMethods> {
|
|||
thin_buffer: B::ThinBuffer,
|
||||
worker_id: usize,
|
||||
},
|
||||
NeedsLink {
|
||||
module: ModuleCodegen<B::Module>,
|
||||
worker_id: usize,
|
||||
},
|
||||
Done {
|
||||
result: Result<CompiledModule, Option<WorkerFatalError>>,
|
||||
worker_id: usize,
|
||||
|
@ -1178,6 +1193,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
let mut compiled_modules = vec![];
|
||||
let mut compiled_metadata_module = None;
|
||||
let mut compiled_allocator_module = None;
|
||||
let mut needs_link = Vec::new();
|
||||
let mut needs_fat_lto = Vec::new();
|
||||
let mut needs_thin_lto = Vec::new();
|
||||
let mut lto_import_only_modules = Vec::new();
|
||||
|
@ -1434,6 +1450,10 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
}
|
||||
}
|
||||
}
|
||||
Message::NeedsLink { module, worker_id } => {
|
||||
free_worker(worker_id);
|
||||
needs_link.push(module);
|
||||
}
|
||||
Message::NeedsFatLTO { result, worker_id } => {
|
||||
assert!(!started_lto);
|
||||
free_worker(worker_id);
|
||||
|
@ -1462,6 +1482,18 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
}
|
||||
}
|
||||
|
||||
let needs_link = mem::take(&mut needs_link);
|
||||
if !needs_link.is_empty() {
|
||||
assert!(compiled_modules.is_empty());
|
||||
let diag_handler = cgcx.create_diag_handler();
|
||||
let module = B::run_link(&cgcx, &diag_handler, needs_link).map_err(|_| ())?;
|
||||
let module = unsafe {
|
||||
B::codegen(&cgcx, &diag_handler, module, cgcx.config(ModuleKind::Regular))
|
||||
.map_err(|_| ())?
|
||||
};
|
||||
compiled_modules.push(module);
|
||||
}
|
||||
|
||||
// Drop to print timings
|
||||
drop(llvm_start_time);
|
||||
|
||||
|
@ -1521,6 +1553,9 @@ fn spawn_work<B: ExtraBackendMethods>(cgcx: CodegenContext<B>, work: WorkItem<B>
|
|||
Some(Ok(WorkItemResult::Compiled(m))) => {
|
||||
Message::Done::<B> { result: Ok(m), worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsLink(m))) => {
|
||||
Message::NeedsLink::<B> { module: m, worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsFatLTO(m))) => {
|
||||
Message::NeedsFatLTO::<B> { result: m, worker_id }
|
||||
}
|
||||
|
|
|
@ -13,6 +13,12 @@ pub trait WriteBackendMethods: 'static + Sized + Clone {
|
|||
type ThinData: Send + Sync;
|
||||
type ThinBuffer: ThinBufferMethods;
|
||||
|
||||
/// Merge all modules into main_module and returning it
|
||||
fn run_link(
|
||||
cgcx: &CodegenContext<Self>,
|
||||
diag_handler: &Handler,
|
||||
modules: Vec<ModuleCodegen<Self::Module>>,
|
||||
) -> Result<ModuleCodegen<Self::Module>, FatalError>;
|
||||
/// Performs fat LTO by merging all modules into a single one and returning it
|
||||
/// for further optimization.
|
||||
fn run_fat_lto(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue