1
Fork 0

rustc_codegen_ssa: only create backend BasicBlocks as-needed.

This commit is contained in:
Eduard-Mihai Burtescu 2021-05-06 17:37:19 +03:00
parent 7dc9ff5c62
commit 402e9efc56
7 changed files with 50 additions and 51 deletions

View file

@ -1148,10 +1148,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
self.cx self.cx
} }
unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
llvm::LLVMDeleteBasicBlock(bb);
}
fn do_not_inline(&mut self, llret: &'ll Value) { fn do_not_inline(&mut self, llret: &'ll Value) {
llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret); llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
} }

View file

@ -1079,7 +1079,6 @@ extern "C" {
Fn: &'a Value, Fn: &'a Value,
Name: *const c_char, Name: *const c_char,
) -> &'a BasicBlock; ) -> &'a BasicBlock;
pub fn LLVMDeleteBasicBlock(BB: &BasicBlock);
// Operations on instructions // Operations on instructions
pub fn LLVMIsAInstruction(Val: &Value) -> Option<&Value>; pub fn LLVMIsAInstruction(Val: &Value) -> Option<&Value>;

View file

@ -68,7 +68,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
target: mir::BasicBlock, target: mir::BasicBlock,
) -> (Bx::BasicBlock, bool) { ) -> (Bx::BasicBlock, bool) {
let span = self.terminator.source_info.span; let span = self.terminator.source_info.span;
let lltarget = fx.blocks[target]; let lltarget = fx.llbb(target);
let target_funclet = fx.cleanup_kinds[target].funclet_bb(target); let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
match (self.funclet_bb, target_funclet) { match (self.funclet_bb, target_funclet) {
(None, None) => (lltarget, false), (None, None) => (lltarget, false),
@ -133,13 +133,13 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
// If there is a cleanup block and the function we're calling can unwind, then // If there is a cleanup block and the function we're calling can unwind, then
// do an invoke, otherwise do a call. // do an invoke, otherwise do a call.
if let Some(cleanup) = cleanup.filter(|_| fn_abi.can_unwind) { if let Some(cleanup) = cleanup.filter(|_| fn_abi.can_unwind) {
let ret_bx = if let Some((_, target)) = destination { let ret_llbb = if let Some((_, target)) = destination {
fx.blocks[target] fx.llbb(target)
} else { } else {
fx.unreachable_block() fx.unreachable_block()
}; };
let invokeret = let invokeret =
bx.invoke(fn_ptr, &llargs, ret_bx, self.llblock(fx, cleanup), self.funclet(fx)); bx.invoke(fn_ptr, &llargs, ret_llbb, self.llblock(fx, cleanup), self.funclet(fx));
bx.apply_attrs_callsite(&fn_abi, invokeret); bx.apply_attrs_callsite(&fn_abi, invokeret);
if let Some((ret_dest, target)) = destination { if let Some((ret_dest, target)) = destination {
@ -1205,7 +1205,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// FIXME(eddyb) rename this to `eh_pad_for_uncached`. // FIXME(eddyb) rename this to `eh_pad_for_uncached`.
fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock { fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
let llbb = self.blocks[bb]; let llbb = self.llbb(bb);
if base::wants_msvc_seh(self.cx.sess()) { if base::wants_msvc_seh(self.cx.sess()) {
let funclet; let funclet;
let ret_llbb; let ret_llbb;
@ -1293,9 +1293,23 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
Bx::new_block(self.cx, self.llfn, name) Bx::new_block(self.cx, self.llfn, name)
} }
pub fn build_block(&self, bb: mir::BasicBlock) -> Bx { /// Get the backend `BasicBlock` for a MIR `BasicBlock`, either already
/// cached in `self.cached_llbbs`, or created on demand (and cached).
// FIXME(eddyb) rename `llbb` and other `ll`-prefixed things to use a
// more backend-agnostic prefix such as `cg` (i.e. this would be `cgbb`).
pub fn llbb(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
self.cached_llbbs[bb].unwrap_or_else(|| {
// FIXME(eddyb) only name the block if `fewer_names` is `false`.
// FIXME(eddyb) create the block directly, without a builder.
let llbb = self.new_block(&format!("{:?}", bb)).llbb();
self.cached_llbbs[bb] = Some(llbb);
llbb
})
}
pub fn build_block(&mut self, bb: mir::BasicBlock) -> Bx {
let mut bx = Bx::with_cx(self.cx); let mut bx = Bx::with_cx(self.cx);
bx.position_at_end(self.blocks[bb]); bx.position_at_end(self.llbb(bb));
bx bx
} }

View file

@ -40,8 +40,11 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
/// then later loaded when generating the DIVERGE_BLOCK. /// then later loaded when generating the DIVERGE_BLOCK.
personality_slot: Option<PlaceRef<'tcx, Bx::Value>>, personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
/// A `Block` for each MIR `BasicBlock` /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
blocks: IndexVec<mir::BasicBlock, Bx::BasicBlock>, /// as-needed (e.g. RPO reaching it or another block branching to it).
// FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
// more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
/// The funclet status of each basic block /// The funclet status of each basic block
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>, cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
@ -151,17 +154,17 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// Allocate a `Block` for every basic block, except // Allocate a `Block` for every basic block, except
// the start block, if nothing loops back to it. // the start block, if nothing loops back to it.
let reentrant_start_block = !mir.predecessors()[mir::START_BLOCK].is_empty(); let reentrant_start_block = !mir.predecessors()[mir::START_BLOCK].is_empty();
let block_bxs: IndexVec<mir::BasicBlock, Bx::BasicBlock> = mir let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> =
.basic_blocks() mir.basic_blocks()
.indices() .indices()
.map(|bb| { .map(|bb| {
if bb == mir::START_BLOCK && !reentrant_start_block { if bb == mir::START_BLOCK && !reentrant_start_block {
bx.llbb() Some(bx.llbb())
} else { } else {
bx.build_sibling_block(&format!("{:?}", bb)).llbb() None
} }
}) })
.collect(); .collect();
let mut fx = FunctionCx { let mut fx = FunctionCx {
instance, instance,
@ -170,7 +173,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
fn_abi, fn_abi,
cx, cx,
personality_slot: None, personality_slot: None,
blocks: block_bxs, cached_llbbs,
unreachable_block: None, unreachable_block: None,
cleanup_kinds, cleanup_kinds,
landing_pads: IndexVec::from_elem(None, mir.basic_blocks()), landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
@ -245,29 +248,14 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// Branch to the START block, if it's not the entry block. // Branch to the START block, if it's not the entry block.
if reentrant_start_block { if reentrant_start_block {
bx.br(fx.blocks[mir::START_BLOCK]); bx.br(fx.llbb(mir::START_BLOCK));
} }
let rpo = traversal::reverse_postorder(&mir);
let mut visited = BitSet::new_empty(mir.basic_blocks().len());
// Codegen the body of each block using reverse postorder // Codegen the body of each block using reverse postorder
for (bb, _) in rpo { // FIXME(eddyb) reuse RPO iterator between `analysis` and this.
visited.insert(bb.index()); for (bb, _) in traversal::reverse_postorder(&mir) {
fx.codegen_block(bb); fx.codegen_block(bb);
} }
// Remove blocks that haven't been visited, or have no
// predecessors.
for bb in mir.basic_blocks().indices() {
// Unreachable block
if !visited.contains(bb.index()) {
debug!("codegen_mir: block {:?} was not visited", bb);
unsafe {
bx.delete_basic_block(fx.blocks[bb]);
}
}
}
} }
/// Produces, for each argument, a `Value` pointing at the /// Produces, for each argument, a `Value` pointing at the

View file

@ -291,6 +291,5 @@ pub trait BuilderMethods<'a, 'tcx>:
) -> Self::Value; ) -> Self::Value;
fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
unsafe fn delete_basic_block(&mut self, bb: Self::BasicBlock);
fn do_not_inline(&mut self, llret: Self::Value); fn do_not_inline(&mut self, llret: Self::Value);
} }

View file

@ -23,13 +23,16 @@ pub fn droppy() {
// FIXME(eddyb) the `void @` forces a match on the instruction, instead of the // FIXME(eddyb) the `void @` forces a match on the instruction, instead of the
// comment, that's `; call core::ptr::drop_in_place::<drop::SomeUniqueName>` // comment, that's `; call core::ptr::drop_in_place::<drop::SomeUniqueName>`
// for the `v0` mangling, should switch to matching on that once `legacy` is gone. // for the `v0` mangling, should switch to matching on that once `legacy` is gone.
// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName
// CHECK-NOT: {{(call|invoke) void @.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: {{(call|invoke) void @.*}}drop_in_place{{.*}}SomeUniqueName
// The next line checks for the } that ends the function definition // The next line checks for the } that ends the function definition

View file

@ -14,13 +14,13 @@ pub fn exhaustive_match(e: E) -> u8 {
// CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[A:[a-zA-Z0-9_]+]] // CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[A:[a-zA-Z0-9_]+]]
// CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[B:[a-zA-Z0-9_]+]] // CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[B:[a-zA-Z0-9_]+]]
// CHECK-NEXT: ] // CHECK-NEXT: ]
// CHECK: [[B]]:
// CHECK-NEXT: store i8 1, i8* %1, align 1
// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
// CHECK: [[OTHERWISE]]: // CHECK: [[OTHERWISE]]:
// CHECK-NEXT: unreachable // CHECK-NEXT: unreachable
// CHECK: [[A]]: // CHECK: [[A]]:
// CHECK-NEXT: store i8 0, i8* %1, align 1 // CHECK-NEXT: store i8 0, i8* %1, align 1
// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
// CHECK: [[B]]:
// CHECK-NEXT: store i8 1, i8* %1, align 1
// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]] // CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
match e { match e {
E::A => 0, E::A => 0,