parent
fd242ee64c
commit
fddf51ee0b
18 changed files with 136 additions and 129 deletions
|
@ -12,7 +12,7 @@ use alloc::{Alloc, Layout, Global};
|
|||
use core::cmp;
|
||||
use core::mem;
|
||||
use core::ops::Drop;
|
||||
use core::ptr::{self, Unique};
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::slice;
|
||||
use super::boxed::Box;
|
||||
use super::allocator::CollectionAllocErr;
|
||||
|
@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
|
||||
// handles ZSTs and `cap = 0` alike
|
||||
let ptr = if alloc_size == 0 {
|
||||
mem::align_of::<T>() as *mut u8
|
||||
NonNull::<T>::dangling().as_void()
|
||||
} else {
|
||||
let align = mem::align_of::<T>();
|
||||
let result = if zeroed {
|
||||
|
@ -105,7 +105,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
};
|
||||
|
||||
RawVec {
|
||||
ptr: Unique::new_unchecked(ptr as *mut _),
|
||||
ptr: ptr.cast().into(),
|
||||
cap,
|
||||
a,
|
||||
}
|
||||
|
@ -310,11 +310,11 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).expect("capacity overflow");
|
||||
let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
|
||||
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
|
||||
cur,
|
||||
new_size);
|
||||
match ptr_res {
|
||||
Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
|
||||
Ok(ptr) => (new_cap, ptr.cast().into()),
|
||||
Err(_) => self.a.oom(),
|
||||
}
|
||||
}
|
||||
|
@ -369,8 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).expect("capacity overflow");
|
||||
let ptr = self.ptr() as *mut _;
|
||||
match self.a.grow_in_place(ptr, old_layout, new_size) {
|
||||
match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
|
||||
Ok(_) => {
|
||||
// We can't directly divide `size`.
|
||||
self.cap = new_cap;
|
||||
|
@ -427,13 +426,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let old_ptr = self.ptr.as_ptr() as *mut u8;
|
||||
self.a.realloc(old_ptr, layout, new_layout.size())
|
||||
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = Unique::new_unchecked(res? as *mut T);
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
|
@ -537,13 +535,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let old_ptr = self.ptr.as_ptr() as *mut u8;
|
||||
self.a.realloc(old_ptr, layout, new_layout.size())
|
||||
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = Unique::new_unchecked(res? as *mut T);
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
|
@ -600,11 +597,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
// (regardless of whether `self.cap - used_cap` wrapped).
|
||||
// Therefore we can safely call grow_in_place.
|
||||
|
||||
let ptr = self.ptr() as *mut _;
|
||||
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
|
||||
// FIXME: may crash and burn on over-reserve
|
||||
alloc_guard(new_layout.size()).expect("capacity overflow");
|
||||
match self.a.grow_in_place(ptr, old_layout, new_layout.size()) {
|
||||
match self.a.grow_in_place(
|
||||
NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
|
||||
) {
|
||||
Ok(_) => {
|
||||
self.cap = new_cap;
|
||||
true
|
||||
|
@ -664,10 +662,10 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let new_size = elem_size * amount;
|
||||
let align = mem::align_of::<T>();
|
||||
let old_layout = Layout::from_size_align_unchecked(old_size, align);
|
||||
match self.a.realloc(self.ptr.as_ptr() as *mut u8,
|
||||
match self.a.realloc(NonNull::from(self.ptr).as_void(),
|
||||
old_layout,
|
||||
new_size) {
|
||||
Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
|
||||
Ok(p) => self.ptr = p.cast().into(),
|
||||
Err(_) => self.a.oom(),
|
||||
}
|
||||
}
|
||||
|
@ -700,8 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
|||
let elem_size = mem::size_of::<T>();
|
||||
if elem_size != 0 {
|
||||
if let Some(layout) = self.current_layout() {
|
||||
let ptr = self.ptr() as *mut u8;
|
||||
self.a.dealloc(ptr, layout);
|
||||
self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -737,6 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use alloc::Void;
|
||||
|
||||
#[test]
|
||||
fn allocator_param() {
|
||||
|
@ -756,7 +754,7 @@ mod tests {
|
|||
// before allocation attempts start failing.
|
||||
struct BoundedAlloc { fuel: usize }
|
||||
unsafe impl Alloc for BoundedAlloc {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
let size = layout.size();
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr);
|
||||
|
@ -766,7 +764,7 @@ mod tests {
|
|||
err @ Err(_) => err,
|
||||
}
|
||||
}
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
|
||||
Global.dealloc(ptr, layout)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue