diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs index 94340e92048..3026eadefe3 100644 --- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs @@ -437,7 +437,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { match split[1] { "cxchg" | "cxchgweak" => { let ty = substs.type_at(0); - if int_type_width_signed(ty, bx.tcx()).is_some() { + if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { let weak = split[1] == "cxchgweak"; let pair = bx.atomic_cmpxchg( args[0].immediate(), @@ -464,7 +464,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { "load" => { let ty = substs.type_at(0); - if int_type_width_signed(ty, bx.tcx()).is_some() { + if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { let size = bx.layout_of(ty).size; bx.atomic_load(args[0].immediate(), order, size) } else { @@ -474,7 +474,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { "store" => { let ty = substs.type_at(0); - if int_type_width_signed(ty, bx.tcx()).is_some() { + if int_type_width_signed(ty, bx.tcx()).is_some() || ty.is_unsafe_ptr() { let size = bx.layout_of(ty).size; bx.atomic_store(args[1].immediate(), args[0].immediate(), order, size); return; diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 9d204599057..c167a9d8e5b 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -966,8 +966,16 @@ impl AtomicPtr { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn load(&self, order: Ordering) -> *mut T { + #[cfg(not(bootstrap))] // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_load(self.p.get() as *mut usize, order) as *mut T } + unsafe { + atomic_load(self.p.get(), order) + } + #[cfg(bootstrap)] + // SAFETY: data races are prevented by atomic intrinsics. + unsafe { + atomic_load(self.p.get() as *mut usize, order) as *mut T + } } /// Stores a value into the pointer. @@ -994,6 +1002,12 @@ impl AtomicPtr { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn store(&self, ptr: *mut T, order: Ordering) { + #[cfg(not(bootstrap))] + // SAFETY: data races are prevented by atomic intrinsics. + unsafe { + atomic_store(self.p.get(), ptr, order); + } + #[cfg(bootstrap)] // SAFETY: data races are prevented by atomic intrinsics. unsafe { atomic_store(self.p.get() as *mut usize, ptr as usize, order); @@ -1105,6 +1119,7 @@ impl AtomicPtr { success: Ordering, failure: Ordering, ) -> Result<*mut T, *mut T> { + #[cfg(bootstrap)] // SAFETY: data races are prevented by atomic intrinsics. unsafe { let res = atomic_compare_exchange( @@ -1119,6 +1134,11 @@ impl AtomicPtr { Err(x) => Err(x as *mut T), } } + #[cfg(not(bootstrap))] + // SAFETY: data races are prevented by atomic intrinsics. + unsafe { + atomic_compare_exchange(self.p.get(), current, new, success, failure) + } } /// Stores a value into the pointer if the current value is the same as the `current` value. @@ -1165,6 +1185,7 @@ impl AtomicPtr { success: Ordering, failure: Ordering, ) -> Result<*mut T, *mut T> { + #[cfg(bootstrap)] // SAFETY: data races are prevented by atomic intrinsics. unsafe { let res = atomic_compare_exchange_weak( @@ -1179,6 +1200,11 @@ impl AtomicPtr { Err(x) => Err(x as *mut T), } } + #[cfg(not(bootstrap))] + // SAFETY: data races are prevented by atomic intrinsics. + unsafe { + atomic_compare_exchange_weak(self.p.get(), current, new, success, failure) + } } /// Fetches the value, and applies a function to it that returns an optional