1
Fork 0

Cast pointers to usize before passing them to atomic operations as some platforms do not support atomic operations on pointers.

This commit is contained in:
oli 2020-11-28 18:12:45 +00:00
commit 392ea29757
2 changed files with 42 additions and 12 deletions

View file

@ -439,14 +439,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ty = substs.type_at(0); let ty = substs.type_at(0);
if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() {
let weak = split[1] == "cxchgweak"; let weak = split[1] == "cxchgweak";
let pair = bx.atomic_cmpxchg( let mut dst = args[0].immediate();
args[0].immediate(), let mut cmp = args[1].immediate();
args[1].immediate(), let mut src = args[2].immediate();
args[2].immediate(), if ty.is_unsafe_ptr() {
order, // Some platforms do not support atomic operations on pointers,
failorder, // so we cast to integer first.
weak, let ptr_llty = bx.type_ptr_to(bx.type_isize());
); dst = bx.pointercast(dst, ptr_llty);
cmp = bx.ptrtoint(cmp, bx.type_isize());
src = bx.ptrtoint(src, bx.type_isize());
}
let pair = bx.atomic_cmpxchg(dst, cmp, src, order, failorder, weak);
let val = bx.extract_value(pair, 0); let val = bx.extract_value(pair, 0);
let success = bx.extract_value(pair, 1); let success = bx.extract_value(pair, 1);
let val = bx.from_immediate(val); let val = bx.from_immediate(val);
@ -465,8 +469,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
"load" => { "load" => {
let ty = substs.type_at(0); let ty = substs.type_at(0);
if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() {
let size = bx.layout_of(ty).size; let layout = bx.layout_of(ty);
bx.atomic_load(args[0].immediate(), order, size) let size = layout.size;
let mut source = args[0].immediate();
if ty.is_unsafe_ptr() {
// Some platforms do not support atomic operations on pointers,
// so we cast to integer first...
let ptr_llty = bx.type_ptr_to(bx.type_isize());
source = bx.pointercast(source, ptr_llty);
}
let result = bx.atomic_load(source, order, size);
if ty.is_unsafe_ptr() {
// ... and then cast the result back to a pointer
bx.inttoptr(result, bx.backend_type(layout))
} else {
result
}
} else { } else {
return invalid_monomorphization(ty); return invalid_monomorphization(ty);
} }
@ -476,7 +494,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ty = substs.type_at(0); let ty = substs.type_at(0);
if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() {
let size = bx.layout_of(ty).size; let size = bx.layout_of(ty).size;
bx.atomic_store(args[1].immediate(), args[0].immediate(), order, size); let mut val = args[1].immediate();
let mut ptr = args[0].immediate();
if ty.is_unsafe_ptr() {
// Some platforms do not support atomic operations on pointers,
// so we cast to integer first.
let ptr_llty = bx.type_ptr_to(bx.type_isize());
ptr = bx.pointercast(ptr, ptr_llty);
val = bx.ptrtoint(val, bx.type_isize());
}
bx.atomic_store(val, ptr, order, size);
return; return;
} else { } else {
return invalid_monomorphization(ty); return invalid_monomorphization(ty);

View file

@ -1201,7 +1201,10 @@ impl<T> AtomicPtr<T> {
} }
} }
#[cfg(not(bootstrap))] #[cfg(not(bootstrap))]
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: This intrinsic is unsafe because it operates on a raw pointer
// but we know for sure that the pointer is valid (we just got it from
// an `UnsafeCell` that we have by reference) and the atomic operation
// itself allows us to safely mutate the `UnsafeCell` contents.
unsafe { unsafe {
atomic_compare_exchange_weak(self.p.get(), current, new, success, failure) atomic_compare_exchange_weak(self.p.get(), current, new, success, failure)
} }