1
Fork 0

fix sized deallocation for TypedArena<T>

This commit is contained in:
Daniel Micay 2014-09-05 09:08:30 -04:00
parent 2fdad65a05
commit c76e3caf10

View file

@ -39,7 +39,7 @@ use std::mem;
use std::num; use std::num;
use std::ptr; use std::ptr;
use std::rc::Rc; use std::rc::Rc;
use std::rt::heap::allocate; use std::rt::heap::{allocate, deallocate};
// The way arena uses arrays is really deeply awful. The arrays are // The way arena uses arrays is really deeply awful. The arrays are
// allocated, and have capacities reserved, but the fill for the array // allocated, and have capacities reserved, but the fill for the array
@ -50,6 +50,7 @@ struct Chunk {
fill: Cell<uint>, fill: Cell<uint>,
is_copy: Cell<bool>, is_copy: Cell<bool>,
} }
impl Chunk { impl Chunk {
fn capacity(&self) -> uint { fn capacity(&self) -> uint {
self.data.borrow().capacity() self.data.borrow().capacity()
@ -357,13 +358,12 @@ pub struct TypedArena<T> {
end: Cell<*const T>, end: Cell<*const T>,
/// A pointer to the first arena segment. /// A pointer to the first arena segment.
first: RefCell<TypedArenaChunkRef<T>>, first: RefCell<*mut TypedArenaChunk<T>>,
} }
type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;
struct TypedArenaChunk<T> { struct TypedArenaChunk<T> {
/// Pointer to the next arena segment. /// Pointer to the next arena segment.
next: TypedArenaChunkRef<T>, next: *mut TypedArenaChunk<T>,
/// The number of elements that this chunk can hold. /// The number of elements that this chunk can hold.
capacity: uint, capacity: uint,
@ -371,24 +371,24 @@ struct TypedArenaChunk<T> {
// Objects follow here, suitably aligned. // Objects follow here, suitably aligned.
} }
impl<T> TypedArenaChunk<T> { fn calculate_size<T>(capacity: uint) -> uint {
#[inline]
fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
-> Box<TypedArenaChunk<T>> {
let mut size = mem::size_of::<TypedArenaChunk<T>>(); let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>()); size = round_up(size, mem::min_align_of::<T>());
let elem_size = mem::size_of::<T>(); let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(&capacity).unwrap(); let elems_size = elem_size.checked_mul(&capacity).unwrap();
size = size.checked_add(&elems_size).unwrap(); size = size.checked_add(&elems_size).unwrap();
size
}
let mut chunk = unsafe { impl<T> TypedArenaChunk<T> {
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>()); #[inline]
let mut chunk: Box<TypedArenaChunk<T>> = mem::transmute(chunk); unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: uint)
ptr::write(&mut chunk.next, next); -> *mut TypedArenaChunk<T> {
chunk let size = calculate_size::<T>(capacity);
}; let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
chunk.capacity = capacity; (*chunk).next = next;
(*chunk).capacity = capacity;
chunk chunk
} }
@ -406,14 +406,13 @@ impl<T> TypedArenaChunk<T> {
} }
// Destroy the next chunk. // Destroy the next chunk.
let next_opt = mem::replace(&mut self.next, None); let next = self.next;
match next_opt { let size = calculate_size::<T>(self.capacity);
None => {} deallocate(self as *mut TypedArenaChunk<T> as *mut u8, size,
Some(mut next) => { mem::min_align_of::<TypedArenaChunk<T>>());
// We assume that the next chunk is completely filled. if next.is_not_null() {
let capacity = next.capacity; let capacity = (*next).capacity;
next.destroy(capacity) (*next).destroy(capacity);
}
} }
} }
@ -448,11 +447,13 @@ impl<T> TypedArena<T> {
/// objects. /// objects.
#[inline] #[inline]
pub fn with_capacity(capacity: uint) -> TypedArena<T> { pub fn with_capacity(capacity: uint) -> TypedArena<T> {
let chunk = TypedArenaChunk::<T>::new(None, capacity); unsafe {
let chunk = TypedArenaChunk::<T>::new(ptr::mut_null(), capacity);
TypedArena { TypedArena {
ptr: Cell::new(chunk.start() as *const T), ptr: Cell::new((*chunk).start() as *const T),
end: Cell::new(chunk.end() as *const T), end: Cell::new((*chunk).end() as *const T),
first: RefCell::new(Some(chunk)), first: RefCell::new(chunk),
}
} }
} }
@ -476,26 +477,28 @@ impl<T> TypedArena<T> {
/// Grows the arena. /// Grows the arena.
#[inline(never)] #[inline(never)]
fn grow(&self) { fn grow(&self) {
let chunk = self.first.borrow_mut().take().unwrap(); unsafe {
let new_capacity = chunk.capacity.checked_mul(&2).unwrap(); let chunk = *self.first.borrow_mut();
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity); let new_capacity = (*chunk).capacity.checked_mul(&2).unwrap();
self.ptr.set(chunk.start() as *const T); let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
self.end.set(chunk.end() as *const T); self.ptr.set((*chunk).start() as *const T);
*self.first.borrow_mut() = Some(chunk) self.end.set((*chunk).end() as *const T);
*self.first.borrow_mut() = chunk
}
} }
} }
#[unsafe_destructor] #[unsafe_destructor]
impl<T> Drop for TypedArena<T> { impl<T> Drop for TypedArena<T> {
fn drop(&mut self) { fn drop(&mut self) {
unsafe {
// Determine how much was filled. // Determine how much was filled.
let start = self.first.borrow().as_ref().unwrap().start() as uint; let start = self.first.borrow().as_ref().unwrap().start() as uint;
let end = self.ptr.get() as uint; let end = self.ptr.get() as uint;
let diff = (end - start) / mem::size_of::<T>(); let diff = (end - start) / mem::size_of::<T>();
// Pass that to the `destroy` method. // Pass that to the `destroy` method.
unsafe { (**self.first.borrow_mut()).destroy(diff)
self.first.borrow_mut().as_mut().unwrap().destroy(diff)
} }
} }
} }