1
Fork 0

Remove alloc::Opaque and use *mut u8 as pointer type for GlobalAlloc

This commit is contained in:
Mike Hommey 2018-05-31 15:57:43 +09:00 committed by Simon Sapin
parent 0b7c9e756e
commit f6ab74b8e7
17 changed files with 130 additions and 175 deletions

View file

@ -51,52 +51,49 @@ pub const Heap: Global = Global;
unsafe impl GlobalAlloc for Global {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
let ptr = __rust_alloc(layout.size(), layout.align());
ptr as *mut Opaque
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
__rust_alloc(layout.size(), layout.align())
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
__rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
__rust_dealloc(ptr, layout.size(), layout.align())
}
#[inline]
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
ptr as *mut Opaque
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
ptr as *mut Opaque
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
__rust_alloc_zeroed(layout.size(), layout.align())
}
}
unsafe impl Alloc for Global {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}
#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<Opaque>,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize)
-> Result<NonNull<Opaque>, AllocErr>
-> Result<NonNull<u8>, AllocErr>
{
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}
#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
}
}
@ -113,7 +110,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align);
let ptr = Global.alloc(layout);
if !ptr.is_null() {
ptr as *mut u8
ptr
} else {
oom(layout)
}
@ -129,7 +126,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr as *mut Opaque, layout);
Global.dealloc(ptr as *mut u8, layout);
}
}

View file

@ -519,7 +519,7 @@ impl<T: ?Sized> Arc<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
@ -639,7 +639,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(self.mem.as_opaque(), self.layout.clone());
Global.dealloc(self.mem.cast(), self.layout.clone());
}
}
}
@ -1196,7 +1196,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
}

View file

@ -287,7 +287,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();
unsafe {
Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
@ -478,7 +478,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
debug_assert!(!self.is_shared_root());
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_opaque(), Layout::new::<LeafNode<K, V>>());
Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
ret
}
}
@ -499,7 +499,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
> {
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_opaque(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
ret
}
}
@ -1321,12 +1321,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}
Global.dealloc(
right_node.node.as_opaque(),
right_node.node.cast(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Global.dealloc(
right_node.node.as_opaque(),
right_node.node.cast(),
Layout::new::<LeafNode<K, V>>(),
);
}

View file

@ -10,7 +10,7 @@
#![allow(deprecated)]
pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Opaque};
pub use alloc::{Layout, AllocErr, CannotReallocInPlace};
use core::alloc::Alloc as CoreAlloc;
use core::ptr::NonNull;
@ -54,7 +54,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
let ptr = NonNull::new_unchecked(ptr);
CoreAlloc::dealloc(self, ptr, layout)
}
@ -70,7 +70,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<*mut u8, AllocErr> {
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
let ptr = NonNull::new_unchecked(ptr);
CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
}
@ -87,7 +87,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<Excess, AllocErr> {
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
let ptr = NonNull::new_unchecked(ptr);
CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
}
@ -96,7 +96,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
let ptr = NonNull::new_unchecked(ptr);
CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
}
@ -104,7 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
let ptr = NonNull::new_unchecked(ptr);
CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
}
}

View file

@ -93,7 +93,7 @@ impl<T, A: Alloc> RawVec<T, A> {
// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling().as_opaque()
NonNull::<T>::dangling().cast()
} else {
let align = mem::align_of::<T>();
let layout = Layout::from_size_align(alloc_size, align).unwrap();
@ -314,7 +314,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
cur,
new_size);
match ptr_res {
@ -373,7 +373,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
Ok(_) => {
// We can't directly divide `size`.
self.cap = new_cap;
@ -546,7 +546,7 @@ impl<T, A: Alloc> RawVec<T, A> {
// FIXME: may crash and burn on over-reserve
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
) {
Ok(_) => {
self.cap = new_cap;
@ -607,7 +607,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(NonNull::from(self.ptr).as_opaque(),
match self.a.realloc(NonNull::from(self.ptr).cast(),
old_layout,
new_size) {
Ok(p) => self.ptr = p.cast().into(),
@ -667,7 +667,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
@ -710,7 +710,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout);
self.a.dealloc(NonNull::from(self.ptr).cast(), layout);
}
}
}
@ -753,7 +753,6 @@ fn capacity_overflow() -> ! {
#[cfg(test)]
mod tests {
use super::*;
use alloc::Opaque;
#[test]
fn allocator_param() {
@ -773,7 +772,7 @@ mod tests {
// before allocation attempts start failing.
struct BoundedAlloc { fuel: usize }
unsafe impl Alloc for BoundedAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr);
@ -783,7 +782,7 @@ mod tests {
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
Global.dealloc(ptr, layout)
}
}

View file

@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
use core::ptr::{self, NonNull};
use core::convert::From;
use alloc::{Global, Alloc, Layout, Opaque, box_free, oom};
use alloc::{Global, Alloc, Layout, box_free, oom};
use string::String;
use vec::Vec;
@ -732,7 +732,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
// In the event of a panic, elements that have been written
// into the new RcBox will be dropped, then the memory freed.
struct Guard<T> {
mem: NonNull<Opaque>,
mem: NonNull<u8>,
elems: *mut T,
layout: Layout,
n_elems: usize,
@ -755,7 +755,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);
let mem = ptr as *mut _ as *mut Opaque;
let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);
// Pointer to first element
@ -839,7 +839,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.dec_weak();
if self.weak() == 0 {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@ -1263,7 +1263,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}