Auto merge of #136318 - matthiaskrgr:rollup-a159mzo, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #135026 (Cast global variables to default address space)
 - #135475 (uefi: Implement path)
 - #135852 (Add `AsyncFn*` to `core` prelude)
 - #136004 (tests: Skip const OOM tests on aarch64-unknown-linux-gnu)
 - #136157 (override build profile for bootstrap tests)
 - #136180 (Introduce a wrapper for "typed valtrees" and properly check the type before extracting the value)
 - #136256 (Add release notes for 1.84.1)
 - #136271 (Remove minor future footgun in `impl Debug for MaybeUninit`)
 - #136288 (Improve documentation for file locking)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-01-30 23:11:38 +00:00
commit 6c1d960d88
69 changed files with 700 additions and 293 deletions

View file

@ -1325,7 +1325,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
fn get_static(&mut self, def_id: DefId) -> &'ll Value {
// Forward to the `get_static` method of `CodegenCx`
self.cx().get_static(def_id)
let s = self.cx().get_static(def_id);
// Cast to default address space if globals are in a different addrspace
self.cx().const_pointercast(s, self.type_ptr())
}
}

View file

@ -225,6 +225,8 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
llvm::LLVMSetUnnamedAddress(g, llvm::UnnamedAddr::Global);
}
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
// Cast to default address space if globals are in a different addrspace
let g = self.const_pointercast(g, self.type_ptr());
(s.to_owned(), g)
})
.1;
@ -289,7 +291,7 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
let alloc = alloc.inner();
let value = match alloc.mutability {
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
_ => self.static_addr_of(init, alloc.align, None),
_ => self.static_addr_of_impl(init, alloc.align, None),
};
if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty()
{
@ -315,7 +317,7 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
.global_alloc(self.tcx.vtable_allocation((ty, dyn_ty.principal())))
.unwrap_memory();
let init = const_alloc_to_llvm(self, alloc, /*static*/ false);
let value = self.static_addr_of(init, alloc.inner().align, None);
let value = self.static_addr_of_impl(init, alloc.inner().align, None);
(value, AddressSpace::DATA)
}
GlobalAlloc::Static(def_id) => {
@ -327,7 +329,8 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
let llval = unsafe {
llvm::LLVMConstInBoundsGEP2(
self.type_i8(),
self.const_bitcast(base_addr, self.type_ptr_ext(base_addr_space)),
// Cast to the required address space if necessary
self.const_pointercast(base_addr, self.type_ptr_ext(base_addr_space)),
&self.const_usize(offset.bytes()),
1,
)

View file

@ -210,6 +210,14 @@ impl<'ll> CodegenCx<'ll, '_> {
unsafe { llvm::LLVMConstBitCast(val, ty) }
}
pub(crate) fn const_pointercast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
unsafe { llvm::LLVMConstPointerCast(val, ty) }
}
/// Create a global variable.
///
/// The returned global variable is a pointer in the default address space for globals.
/// Fails if a symbol with the given name already exists.
pub(crate) fn static_addr_of_mut(
&self,
cv: &'ll Value,
@ -233,6 +241,34 @@ impl<'ll> CodegenCx<'ll, '_> {
gv
}
/// Create a global constant.
///
/// The returned global variable is a pointer in the default address space for globals.
pub(crate) fn static_addr_of_impl(
&self,
cv: &'ll Value,
align: Align,
kind: Option<&str>,
) -> &'ll Value {
if let Some(&gv) = self.const_globals.borrow().get(&cv) {
unsafe {
// Upgrade the alignment in cases where the same constant is used with different
// alignment requirements
let llalign = align.bytes() as u32;
if llalign > llvm::LLVMGetAlignment(gv) {
llvm::LLVMSetAlignment(gv, llalign);
}
}
return gv;
}
let gv = self.static_addr_of_mut(cv, align, kind);
unsafe {
llvm::LLVMSetGlobalConstant(gv, True);
}
self.const_globals.borrow_mut().insert(cv, gv);
gv
}
#[instrument(level = "debug", skip(self))]
pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value {
let instance = Instance::mono(self.tcx, def_id);
@ -505,24 +541,15 @@ impl<'ll> CodegenCx<'ll, '_> {
}
impl<'ll> StaticCodegenMethods for CodegenCx<'ll, '_> {
/// Get a pointer to a global variable.
///
/// The pointer will always be in the default address space. If global variables default to a
/// different address space, an addrspacecast is inserted.
fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value {
if let Some(&gv) = self.const_globals.borrow().get(&cv) {
unsafe {
// Upgrade the alignment in cases where the same constant is used with different
// alignment requirements
let llalign = align.bytes() as u32;
if llalign > llvm::LLVMGetAlignment(gv) {
llvm::LLVMSetAlignment(gv, llalign);
}
}
return gv;
}
let gv = self.static_addr_of_mut(cv, align, kind);
unsafe {
llvm::LLVMSetGlobalConstant(gv, True);
}
self.const_globals.borrow_mut().insert(cv, gv);
gv
let gv = self.static_addr_of_impl(cv, align, kind);
// static_addr_of_impl returns the bare global variable, which might not be in the default
// address space. Cast to the default address space if necessary.
self.const_pointercast(gv, self.type_ptr())
}
fn codegen_static(&self, def_id: DefId) {

View file

@ -1487,6 +1487,26 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
.di_node
}
/// Get the global variable for the vtable.
///
/// When using global variables, we may have created an addrspacecast to get a pointer to the
/// default address space if global variables are created in a different address space.
/// For modifying the vtable, we need the real global variable. This function accepts either a
/// global variable (which is simply returned), or an addrspacecast constant expression.
/// If the given value is an addrspacecast, the cast is removed and the global variable behind
/// the cast is returned.
fn find_vtable_behind_cast<'ll>(vtable: &'ll Value) -> &'ll Value {
// The vtable is a global variable, which may be behind an addrspacecast.
unsafe {
if let Some(c) = llvm::LLVMIsAConstantExpr(vtable) {
if llvm::LLVMGetConstOpcode(c) == llvm::Opcode::AddrSpaceCast {
return llvm::LLVMGetOperand(c, 0).unwrap();
}
}
}
vtable
}
pub(crate) fn apply_vcall_visibility_metadata<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
ty: Ty<'tcx>,
@ -1507,6 +1527,8 @@ pub(crate) fn apply_vcall_visibility_metadata<'ll, 'tcx>(
let Some(trait_ref) = trait_ref else { return };
// Unwrap potential addrspacecast
let vtable = find_vtable_behind_cast(vtable);
let trait_ref_self = trait_ref.with_self_ty(cx.tcx, ty);
let trait_ref_self = cx.tcx.erase_regions(trait_ref_self);
let trait_def_id = trait_ref_self.def_id();
@ -1580,6 +1602,9 @@ pub(crate) fn create_vtable_di_node<'ll, 'tcx>(
return;
}
// Unwrap potential addrspacecast
let vtable = find_vtable_behind_cast(vtable);
// When full debuginfo is enabled, we want to try and prevent vtables from being
// merged. Otherwise debuggers will have a hard time mapping from dyn pointer
// to concrete type.

View file

@ -1329,7 +1329,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
}
if name == sym::simd_shuffle_generic {
let idx = fn_args[2].expect_const().try_to_valtree().unwrap().0.unwrap_branch();
let idx = fn_args[2].expect_const().to_value().valtree.unwrap_branch();
let n = idx.len() as u64;
let (out_len, out_ty) = require_simd!(ret_ty, SimdReturn);

View file

@ -661,6 +661,79 @@ pub enum MemoryEffects {
InaccessibleMemOnly,
}
/// LLVMOpcode
#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(C)]
pub enum Opcode {
Ret = 1,
Br = 2,
Switch = 3,
IndirectBr = 4,
Invoke = 5,
Unreachable = 7,
CallBr = 67,
FNeg = 66,
Add = 8,
FAdd = 9,
Sub = 10,
FSub = 11,
Mul = 12,
FMul = 13,
UDiv = 14,
SDiv = 15,
FDiv = 16,
URem = 17,
SRem = 18,
FRem = 19,
Shl = 20,
LShr = 21,
AShr = 22,
And = 23,
Or = 24,
Xor = 25,
Alloca = 26,
Load = 27,
Store = 28,
GetElementPtr = 29,
Trunc = 30,
ZExt = 31,
SExt = 32,
FPToUI = 33,
FPToSI = 34,
UIToFP = 35,
SIToFP = 36,
FPTrunc = 37,
FPExt = 38,
PtrToInt = 39,
IntToPtr = 40,
BitCast = 41,
AddrSpaceCast = 60,
ICmp = 42,
FCmp = 43,
PHI = 44,
Call = 45,
Select = 46,
UserOp1 = 47,
UserOp2 = 48,
VAArg = 49,
ExtractElement = 50,
InsertElement = 51,
ShuffleVector = 52,
ExtractValue = 53,
InsertValue = 54,
Freeze = 68,
Fence = 55,
AtomicCmpXchg = 56,
AtomicRMW = 57,
Resume = 58,
LandingPad = 59,
CleanupRet = 61,
CatchRet = 62,
CatchPad = 63,
CleanupPad = 64,
CatchSwitch = 65,
}
unsafe extern "C" {
type Opaque;
}
@ -991,7 +1064,10 @@ unsafe extern "C" {
pub fn LLVMConstPtrToInt<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value;
pub fn LLVMConstIntToPtr<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value;
pub fn LLVMConstBitCast<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value;
pub fn LLVMConstPointerCast<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value;
pub fn LLVMGetAggregateElement(ConstantVal: &Value, Idx: c_uint) -> Option<&Value>;
pub fn LLVMGetConstOpcode(ConstantVal: &Value) -> Opcode;
pub fn LLVMIsAConstantExpr(Val: &Value) -> Option<&Value>;
// Operations on global variables, functions, and aliases (globals)
pub fn LLVMIsDeclaration(Global: &Value) -> Bool;
@ -1048,6 +1124,7 @@ unsafe extern "C" {
// Operations on instructions
pub fn LLVMIsAInstruction(Val: &Value) -> Option<&Value>;
pub fn LLVMGetFirstBasicBlock(Fn: &Value) -> &BasicBlock;
pub fn LLVMGetOperand(Val: &Value, Index: c_uint) -> Option<&Value>;
// Operations on call sites
pub fn LLVMSetInstructionCallConv(Instr: &Value, CC: c_uint);