initial port of the exchange allocator to jemalloc
In stage0, all allocations are 8-byte aligned. Passing a size and alignment to free is not yet implemented everywhere (0 size and 8 align are used as placeholders). Fixing this is part of #13994. Closes #13616
This commit is contained in:
parent
aaf6e06b01
commit
138437956c
11 changed files with 219 additions and 151 deletions
|
@ -37,7 +37,7 @@ use std::mem::min_align_of;
|
||||||
use std::num;
|
use std::num;
|
||||||
use std::ptr::read;
|
use std::ptr::read;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::rt::global_heap;
|
use std::rt::heap::exchange_malloc;
|
||||||
|
|
||||||
// The way arena uses arrays is really deeply awful. The arrays are
|
// The way arena uses arrays is really deeply awful. The arrays are
|
||||||
// allocated, and have capacities reserved, but the fill for the array
|
// allocated, and have capacities reserved, but the fill for the array
|
||||||
|
@ -365,7 +365,7 @@ impl<T> TypedArenaChunk<T> {
|
||||||
size = size.checked_add(&elems_size).unwrap();
|
size = size.checked_add(&elems_size).unwrap();
|
||||||
|
|
||||||
let mut chunk = unsafe {
|
let mut chunk = unsafe {
|
||||||
let chunk = global_heap::exchange_malloc(size);
|
let chunk = exchange_malloc(size);
|
||||||
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
|
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
|
||||||
mem::move_val_init(&mut chunk.next, next);
|
mem::move_val_init(&mut chunk.next, next);
|
||||||
chunk
|
chunk
|
||||||
|
@ -386,7 +386,7 @@ impl<T> TypedArenaChunk<T> {
|
||||||
size = size.checked_add(&elems_size).unwrap();
|
size = size.checked_add(&elems_size).unwrap();
|
||||||
|
|
||||||
let mut chunk = unsafe {
|
let mut chunk = unsafe {
|
||||||
let chunk = global_heap::exchange_malloc(size, min_align_of::<TypedArenaChunk<T>>());
|
let chunk = exchange_malloc(size, min_align_of::<TypedArenaChunk<T>>());
|
||||||
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
|
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
|
||||||
mem::move_val_init(&mut chunk.next, next);
|
mem::move_val_init(&mut chunk.next, next);
|
||||||
chunk
|
chunk
|
||||||
|
|
|
@ -33,7 +33,7 @@ extern {
|
||||||
fn rust_malloc(size: uint) -> *u8;
|
fn rust_malloc(size: uint) -> *u8;
|
||||||
#[cfg(not(stage0))]
|
#[cfg(not(stage0))]
|
||||||
fn rust_malloc(size: uint, align: uint) -> *u8;
|
fn rust_malloc(size: uint, align: uint) -> *u8;
|
||||||
fn rust_free(ptr: *u8);
|
fn rust_free(ptr: *u8, size: uint, align: uint);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(stage0)]
|
#[cfg(stage0)]
|
||||||
|
@ -51,6 +51,7 @@ unsafe fn alloc(cap: uint) -> *mut Vec<()> {
|
||||||
#[cfg(not(stage0))]
|
#[cfg(not(stage0))]
|
||||||
unsafe fn alloc(cap: uint) -> *mut Vec<()> {
|
unsafe fn alloc(cap: uint) -> *mut Vec<()> {
|
||||||
let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
|
let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
|
||||||
|
// this should use the real alignment, but the new representation will take care of that
|
||||||
let ret = rust_malloc(cap, 8) as *mut Vec<()>;
|
let ret = rust_malloc(cap, 8) as *mut Vec<()>;
|
||||||
if ret.is_null() {
|
if ret.is_null() {
|
||||||
intrinsics::abort();
|
intrinsics::abort();
|
||||||
|
@ -118,7 +119,8 @@ impl FromIterator<char> for ~str {
|
||||||
ptr::copy_nonoverlapping_memory(&mut (*ptr2).data,
|
ptr::copy_nonoverlapping_memory(&mut (*ptr2).data,
|
||||||
&(*ptr).data,
|
&(*ptr).data,
|
||||||
len);
|
len);
|
||||||
rust_free(ptr as *u8);
|
// FIXME: #13994: port to the sized deallocation API when available
|
||||||
|
rust_free(ptr as *u8, 0, 8);
|
||||||
cast::forget(ret);
|
cast::forget(ret);
|
||||||
ret = cast::transmute(ptr2);
|
ret = cast::transmute(ptr2);
|
||||||
ptr = ptr2;
|
ptr = ptr2;
|
||||||
|
@ -188,7 +190,7 @@ impl<A: Clone> Clone for ~[A] {
|
||||||
for j in range(0, *i as int) {
|
for j in range(0, *i as int) {
|
||||||
ptr::read(&*p.offset(j));
|
ptr::read(&*p.offset(j));
|
||||||
}
|
}
|
||||||
rust_free(ret as *u8);
|
rust_free(ret as *u8, 0, 8);
|
||||||
});
|
});
|
||||||
cast::transmute(ret)
|
cast::transmute(ret)
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,6 +110,7 @@
|
||||||
// Don't link to std. We are std.
|
// Don't link to std. We are std.
|
||||||
#![no_std]
|
#![no_std]
|
||||||
|
|
||||||
|
#![allow(deprecated)]
|
||||||
#![deny(missing_doc)]
|
#![deny(missing_doc)]
|
||||||
|
|
||||||
// When testing libstd, bring in libuv as the I/O backend so tests can print
|
// When testing libstd, bring in libuv as the I/O backend so tests can print
|
||||||
|
|
|
@ -32,7 +32,8 @@ use ops::{Deref, Drop};
|
||||||
use option::{Option, Some, None};
|
use option::{Option, Some, None};
|
||||||
use ptr;
|
use ptr;
|
||||||
use ptr::RawPtr;
|
use ptr::RawPtr;
|
||||||
use rt::global_heap::exchange_free;
|
use mem::{min_align_of, size_of};
|
||||||
|
use rt::heap::exchange_free;
|
||||||
|
|
||||||
struct RcBox<T> {
|
struct RcBox<T> {
|
||||||
value: T,
|
value: T,
|
||||||
|
@ -104,7 +105,8 @@ impl<T> Drop for Rc<T> {
|
||||||
self.dec_weak();
|
self.dec_weak();
|
||||||
|
|
||||||
if self.weak() == 0 {
|
if self.weak() == 0 {
|
||||||
exchange_free(self.ptr as *u8)
|
exchange_free(self.ptr as *mut u8, size_of::<RcBox<T>>(),
|
||||||
|
min_align_of::<RcBox<T>>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -177,7 +179,8 @@ impl<T> Drop for Weak<T> {
|
||||||
// the weak count starts at 1, and will only go to
|
// the weak count starts at 1, and will only go to
|
||||||
// zero if all the strong pointers have disappeared.
|
// zero if all the strong pointers have disappeared.
|
||||||
if self.weak() == 0 {
|
if self.weak() == 0 {
|
||||||
exchange_free(self.ptr as *u8)
|
exchange_free(self.ptr as *mut u8, size_of::<RcBox<T>>(),
|
||||||
|
min_align_of::<RcBox<T>>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,23 +14,6 @@
|
||||||
use libc::{c_void, size_t, free, malloc, realloc};
|
use libc::{c_void, size_t, free, malloc, realloc};
|
||||||
use ptr::{RawPtr, mut_null};
|
use ptr::{RawPtr, mut_null};
|
||||||
use intrinsics::abort;
|
use intrinsics::abort;
|
||||||
use raw;
|
|
||||||
use mem::size_of;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn get_box_size(body_size: uint, body_align: uint) -> uint {
|
|
||||||
let header_size = size_of::<raw::Box<()>>();
|
|
||||||
let total_size = align_to(header_size, body_align) + body_size;
|
|
||||||
total_size
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rounds |size| to the nearest |alignment|. Invariant: |alignment| is a power
|
|
||||||
// of two.
|
|
||||||
#[inline]
|
|
||||||
fn align_to(size: uint, align: uint) -> uint {
|
|
||||||
assert!(align != 0);
|
|
||||||
(size + align - 1) & !(align - 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A wrapper around libc::malloc, aborting on out-of-memory
|
/// A wrapper around libc::malloc, aborting on out-of-memory
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -66,117 +49,3 @@ pub unsafe fn realloc_raw(ptr: *mut u8, size: uint) -> *mut u8 {
|
||||||
p as *mut u8
|
p as *mut u8
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The allocator for unique pointers without contained managed pointers.
|
|
||||||
#[cfg(not(test), stage0)]
|
|
||||||
#[lang="exchange_malloc"]
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn exchange_malloc(size: uint) -> *mut u8 {
|
|
||||||
// The compiler never calls `exchange_free` on Box<ZeroSizeType>, so
|
|
||||||
// zero-size allocations can point to this `static`. It would be incorrect
|
|
||||||
// to use a null pointer, due to enums assuming types like unique pointers
|
|
||||||
// are never null.
|
|
||||||
static EMPTY: () = ();
|
|
||||||
|
|
||||||
if size == 0 {
|
|
||||||
&EMPTY as *() as *mut u8
|
|
||||||
} else {
|
|
||||||
malloc_raw(size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The allocator for unique pointers without contained managed pointers.
|
|
||||||
#[cfg(not(test), not(stage0))]
|
|
||||||
#[lang="exchange_malloc"]
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn exchange_malloc(size: uint, _align: uint) -> *mut u8 {
|
|
||||||
// The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size
|
|
||||||
// allocations can point to this `static`. It would be incorrect to use a null
|
|
||||||
// pointer, due to enums assuming types like unique pointers are never null.
|
|
||||||
static EMPTY: () = ();
|
|
||||||
|
|
||||||
if size == 0 {
|
|
||||||
&EMPTY as *() as *mut u8
|
|
||||||
} else {
|
|
||||||
malloc_raw(size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: #7496
|
|
||||||
#[cfg(not(test))]
|
|
||||||
#[lang="closure_exchange_malloc"]
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn closure_exchange_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
|
|
||||||
closure_exchange_malloc(drop_glue, size, align)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
|
|
||||||
let total_size = get_box_size(size, align);
|
|
||||||
let p = malloc_raw(total_size);
|
|
||||||
|
|
||||||
let alloc = p as *mut raw::Box<()>;
|
|
||||||
(*alloc).drop_glue = drop_glue;
|
|
||||||
|
|
||||||
alloc as *u8
|
|
||||||
}
|
|
||||||
|
|
||||||
// NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
|
|
||||||
// inside a landing pad may corrupt the state of the exception handler.
|
|
||||||
#[cfg(not(test))]
|
|
||||||
#[lang="exchange_free"]
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn exchange_free_(ptr: *u8) {
|
|
||||||
exchange_free(ptr)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub unsafe fn exchange_free(ptr: *u8) {
|
|
||||||
free(ptr as *mut c_void);
|
|
||||||
}
|
|
||||||
|
|
||||||
// hack for libcore
|
|
||||||
#[no_mangle]
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[deprecated]
|
|
||||||
#[cfg(stage0)]
|
|
||||||
pub extern "C" fn rust_malloc(size: uint) -> *mut u8 {
|
|
||||||
unsafe { exchange_malloc(size) }
|
|
||||||
}
|
|
||||||
|
|
||||||
// hack for libcore
|
|
||||||
#[no_mangle]
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[deprecated]
|
|
||||||
#[cfg(not(stage0))]
|
|
||||||
pub extern "C" fn rust_malloc(size: uint, align: uint) -> *mut u8 {
|
|
||||||
unsafe { exchange_malloc(size, align) }
|
|
||||||
}
|
|
||||||
|
|
||||||
// hack for libcore
|
|
||||||
#[no_mangle]
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[deprecated]
|
|
||||||
pub extern "C" fn rust_free(ptr: *u8) {
|
|
||||||
unsafe { exchange_free(ptr) }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod bench {
|
|
||||||
extern crate test;
|
|
||||||
use self::test::Bencher;
|
|
||||||
|
|
||||||
#[bench]
|
|
||||||
fn alloc_owned_small(b: &mut Bencher) {
|
|
||||||
b.iter(|| {
|
|
||||||
box 10
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[bench]
|
|
||||||
fn alloc_owned_big(b: &mut Bencher) {
|
|
||||||
b.iter(|| {
|
|
||||||
box [10, ..1000]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -98,3 +98,125 @@ pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) {
|
||||||
pub fn usable_size(size: uint, align: uint) -> uint {
|
pub fn usable_size(size: uint, align: uint) -> uint {
|
||||||
unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint }
|
unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The allocator for unique pointers.
|
||||||
|
#[cfg(stage0)]
|
||||||
|
#[lang="exchange_malloc"]
|
||||||
|
#[inline(always)]
|
||||||
|
pub unsafe fn exchange_malloc_(size: uint) -> *mut u8 {
|
||||||
|
exchange_malloc(size)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The allocator for unique pointers.
|
||||||
|
#[cfg(not(test), not(stage0))]
|
||||||
|
#[lang="exchange_malloc"]
|
||||||
|
#[inline(always)]
|
||||||
|
pub unsafe fn exchange_malloc_(size: uint, align: uint) -> *mut u8 {
|
||||||
|
exchange_malloc(size, align)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The allocator for unique pointers.
|
||||||
|
#[cfg(stage0)]
|
||||||
|
#[inline]
|
||||||
|
pub unsafe fn exchange_malloc(size: uint) -> *mut u8 {
|
||||||
|
// The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size
|
||||||
|
// allocations can point to this `static`. It would be incorrect to use a null
|
||||||
|
// pointer, due to enums assuming types like unique pointers are never null.
|
||||||
|
static EMPTY: () = ();
|
||||||
|
|
||||||
|
if size == 0 {
|
||||||
|
&EMPTY as *() as *mut u8
|
||||||
|
} else {
|
||||||
|
allocate(size, 8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The allocator for unique pointers.
|
||||||
|
#[cfg(not(stage0))]
|
||||||
|
#[inline]
|
||||||
|
pub unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
|
||||||
|
// The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size
|
||||||
|
// allocations can point to this `static`. It would be incorrect to use a null
|
||||||
|
// pointer, due to enums assuming types like unique pointers are never null.
|
||||||
|
static EMPTY: () = ();
|
||||||
|
|
||||||
|
if size == 0 {
|
||||||
|
&EMPTY as *() as *mut u8
|
||||||
|
} else {
|
||||||
|
allocate(size, align)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(test))]
|
||||||
|
#[lang="exchange_free"]
|
||||||
|
#[inline]
|
||||||
|
// FIXME: #13994 (rustc should pass align and size here)
|
||||||
|
pub unsafe fn exchange_free_(ptr: *mut u8) {
|
||||||
|
exchange_free(ptr, 0, 8)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub unsafe fn exchange_free(ptr: *mut u8, size: uint, align: uint) {
|
||||||
|
deallocate(ptr, size, align);
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: #7496
|
||||||
|
#[cfg(not(test))]
|
||||||
|
#[lang="closure_exchange_malloc"]
|
||||||
|
#[inline]
|
||||||
|
unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut u8 {
|
||||||
|
let total_size = ::rt::util::get_box_size(size, align);
|
||||||
|
let p = allocate(total_size, 8);
|
||||||
|
|
||||||
|
let alloc = p as *mut ::raw::Box<()>;
|
||||||
|
(*alloc).drop_glue = drop_glue;
|
||||||
|
|
||||||
|
alloc as *mut u8
|
||||||
|
}
|
||||||
|
|
||||||
|
// hack for libcore
|
||||||
|
#[no_mangle]
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[deprecated]
|
||||||
|
#[cfg(stage0, not(test))]
|
||||||
|
pub extern "C" fn rust_malloc(size: uint) -> *mut u8 {
|
||||||
|
unsafe { exchange_malloc(size) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// hack for libcore
|
||||||
|
#[no_mangle]
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[deprecated]
|
||||||
|
#[cfg(not(stage0), not(test))]
|
||||||
|
pub extern "C" fn rust_malloc(size: uint, align: uint) -> *mut u8 {
|
||||||
|
unsafe { exchange_malloc(size, align) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// hack for libcore
|
||||||
|
#[no_mangle]
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[deprecated]
|
||||||
|
#[cfg(not(test))]
|
||||||
|
pub extern "C" fn rust_free(ptr: *mut u8, size: uint, align: uint) {
|
||||||
|
unsafe { exchange_free(ptr, size, align) }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod bench {
|
||||||
|
extern crate test;
|
||||||
|
use self::test::Bencher;
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn alloc_owned_small(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
box 10
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn alloc_owned_big(b: &mut Bencher) {
|
||||||
|
b.iter(|| {
|
||||||
|
box [10, ..1000]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
|
|
||||||
use cast;
|
use cast;
|
||||||
use iter::Iterator;
|
use iter::Iterator;
|
||||||
|
use libc::{c_void, free};
|
||||||
use mem;
|
use mem;
|
||||||
use ops::Drop;
|
use ops::Drop;
|
||||||
use option::{Option, None, Some};
|
use option::{Option, None, Some};
|
||||||
|
@ -58,7 +59,7 @@ impl LocalHeap {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
|
pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
|
||||||
let total_size = global_heap::get_box_size(size, align);
|
let total_size = ::rt::util::get_box_size(size, align);
|
||||||
let alloc = self.memory_region.malloc(total_size);
|
let alloc = self.memory_region.malloc(total_size);
|
||||||
{
|
{
|
||||||
// Make sure that we can't use `mybox` outside of this scope
|
// Make sure that we can't use `mybox` outside of this scope
|
||||||
|
@ -226,7 +227,7 @@ impl MemoryRegion {
|
||||||
self.release(cast::transmute(alloc));
|
self.release(cast::transmute(alloc));
|
||||||
rtassert!(self.live_allocations > 0);
|
rtassert!(self.live_allocations > 0);
|
||||||
self.live_allocations -= 1;
|
self.live_allocations -= 1;
|
||||||
global_heap::exchange_free(alloc as *u8)
|
free(alloc as *mut c_void)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,23 @@ use slice::ImmutableVector;
|
||||||
// FIXME: Once the runtime matures remove the `true` below to turn off rtassert, etc.
|
// FIXME: Once the runtime matures remove the `true` below to turn off rtassert, etc.
|
||||||
pub static ENFORCE_SANITY: bool = true || !cfg!(rtopt) || cfg!(rtdebug) || cfg!(rtassert);
|
pub static ENFORCE_SANITY: bool = true || !cfg!(rtopt) || cfg!(rtdebug) || cfg!(rtassert);
|
||||||
|
|
||||||
|
#[deprecated]
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[inline]
|
||||||
|
pub fn get_box_size(body_size: uint, body_align: uint) -> uint {
|
||||||
|
let header_size = ::mem::size_of::<::raw::Box<()>>();
|
||||||
|
let total_size = align_to(header_size, body_align) + body_size;
|
||||||
|
total_size
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rounds |size| to the nearest |alignment|. Invariant: |alignment| is a power
|
||||||
|
// of two.
|
||||||
|
#[inline]
|
||||||
|
fn align_to(size: uint, align: uint) -> uint {
|
||||||
|
assert!(align != 0);
|
||||||
|
(size + align - 1) & !(align - 1)
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the number of cores available
|
/// Get the number of cores available
|
||||||
pub fn num_cpus() -> uint {
|
pub fn num_cpus() -> uint {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
@ -110,7 +110,7 @@ use ops::Drop;
|
||||||
use option::{None, Option, Some};
|
use option::{None, Option, Some};
|
||||||
use ptr::RawPtr;
|
use ptr::RawPtr;
|
||||||
use ptr;
|
use ptr;
|
||||||
use rt::global_heap::{exchange_free};
|
use rt::heap::{exchange_malloc, exchange_free};
|
||||||
use unstable::finally::try_finally;
|
use unstable::finally::try_finally;
|
||||||
use vec::Vec;
|
use vec::Vec;
|
||||||
|
|
||||||
|
@ -292,9 +292,9 @@ pub trait CloneableVector<T> {
|
||||||
impl<'a, T: Clone> CloneableVector<T> for &'a [T] {
|
impl<'a, T: Clone> CloneableVector<T> for &'a [T] {
|
||||||
/// Returns a copy of `v`.
|
/// Returns a copy of `v`.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[cfg(stage0)]
|
||||||
fn to_owned(&self) -> ~[T] {
|
fn to_owned(&self) -> ~[T] {
|
||||||
use RawVec = core::raw::Vec;
|
use RawVec = core::raw::Vec;
|
||||||
use rt::global_heap::{malloc_raw, exchange_free};
|
|
||||||
use num::{CheckedAdd, CheckedMul};
|
use num::{CheckedAdd, CheckedMul};
|
||||||
use option::Expect;
|
use option::Expect;
|
||||||
|
|
||||||
|
@ -305,7 +305,8 @@ impl<'a, T: Clone> CloneableVector<T> for &'a [T] {
|
||||||
let size = size.expect("overflow in to_owned()");
|
let size = size.expect("overflow in to_owned()");
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let ret = malloc_raw(size) as *mut RawVec<()>;
|
// this should pass the real required alignment
|
||||||
|
let ret = exchange_malloc(size) as *mut RawVec<()>;
|
||||||
|
|
||||||
(*ret).fill = len * mem::nonzero_size_of::<T>();
|
(*ret).fill = len * mem::nonzero_size_of::<T>();
|
||||||
(*ret).alloc = len * mem::nonzero_size_of::<T>();
|
(*ret).alloc = len * mem::nonzero_size_of::<T>();
|
||||||
|
@ -329,7 +330,55 @@ impl<'a, T: Clone> CloneableVector<T> for &'a [T] {
|
||||||
for j in range(0, *i as int) {
|
for j in range(0, *i as int) {
|
||||||
ptr::read(&*p.offset(j));
|
ptr::read(&*p.offset(j));
|
||||||
}
|
}
|
||||||
exchange_free(ret as *u8);
|
// FIXME: #13994 (should pass align and size here)
|
||||||
|
exchange_free(ret as *mut u8, 0, 8);
|
||||||
|
});
|
||||||
|
cast::transmute(ret)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a copy of `v`.
|
||||||
|
#[inline]
|
||||||
|
#[cfg(not(stage0))]
|
||||||
|
fn to_owned(&self) -> ~[T] {
|
||||||
|
use RawVec = core::raw::Vec;
|
||||||
|
use num::{CheckedAdd, CheckedMul};
|
||||||
|
use option::Expect;
|
||||||
|
|
||||||
|
let len = self.len();
|
||||||
|
let data_size = len.checked_mul(&mem::size_of::<T>());
|
||||||
|
let data_size = data_size.expect("overflow in to_owned()");
|
||||||
|
let size = mem::size_of::<RawVec<()>>().checked_add(&data_size);
|
||||||
|
let size = size.expect("overflow in to_owned()");
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
// this should pass the real required alignment
|
||||||
|
let ret = exchange_malloc(size, 8) as *mut RawVec<()>;
|
||||||
|
|
||||||
|
(*ret).fill = len * mem::nonzero_size_of::<T>();
|
||||||
|
(*ret).alloc = len * mem::nonzero_size_of::<T>();
|
||||||
|
|
||||||
|
// Be careful with the following loop. We want it to be optimized
|
||||||
|
// to a memcpy (or something similarly fast) when T is Copy. LLVM
|
||||||
|
// is easily confused, so any extra operations during the loop can
|
||||||
|
// prevent this optimization.
|
||||||
|
let mut i = 0;
|
||||||
|
let p = &mut (*ret).data as *mut _ as *mut T;
|
||||||
|
try_finally(
|
||||||
|
&mut i, (),
|
||||||
|
|i, ()| while *i < len {
|
||||||
|
mem::move_val_init(
|
||||||
|
&mut(*p.offset(*i as int)),
|
||||||
|
self.unsafe_ref(*i).clone());
|
||||||
|
*i += 1;
|
||||||
|
},
|
||||||
|
|i| if *i < len {
|
||||||
|
// we must be failing, clean up after ourselves
|
||||||
|
for j in range(0, *i as int) {
|
||||||
|
ptr::read(&*p.offset(j));
|
||||||
|
}
|
||||||
|
// FIXME: #13994 (should pass align and size here)
|
||||||
|
exchange_free(ret as *mut u8, 0, 8);
|
||||||
});
|
});
|
||||||
cast::transmute(ret)
|
cast::transmute(ret)
|
||||||
}
|
}
|
||||||
|
@ -768,7 +817,8 @@ impl<T> Drop for MoveItems<T> {
|
||||||
// destroy the remaining elements
|
// destroy the remaining elements
|
||||||
for _x in *self {}
|
for _x in *self {}
|
||||||
unsafe {
|
unsafe {
|
||||||
exchange_free(self.allocation as *u8)
|
// FIXME: #13994 (should pass align and size here)
|
||||||
|
exchange_free(self.allocation, 0, 8)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1512,7 +1512,7 @@ impl<T> FromVec<T> for ~[T] {
|
||||||
let vp = v.as_mut_ptr();
|
let vp = v.as_mut_ptr();
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let ret = malloc_raw(size) as *mut RawVec<()>;
|
let ret = allocate(size, 8) as *mut RawVec<()>;
|
||||||
|
|
||||||
(*ret).fill = len * mem::nonzero_size_of::<T>();
|
(*ret).fill = len * mem::nonzero_size_of::<T>();
|
||||||
(*ret).alloc = len * mem::nonzero_size_of::<T>();
|
(*ret).alloc = len * mem::nonzero_size_of::<T>();
|
||||||
|
|
|
@ -15,8 +15,9 @@
|
||||||
|
|
||||||
use std::cast;
|
use std::cast;
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
use std::rt::global_heap;
|
use std::rt::heap::exchange_free;
|
||||||
use std::sync::atomics;
|
use std::sync::atomics;
|
||||||
|
use std::mem::{min_align_of, size_of};
|
||||||
|
|
||||||
/// An atomically reference counted wrapper for shared state.
|
/// An atomically reference counted wrapper for shared state.
|
||||||
///
|
///
|
||||||
|
@ -190,7 +191,8 @@ impl<T: Share + Send> Drop for Arc<T> {
|
||||||
|
|
||||||
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
|
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
|
||||||
atomics::fence(atomics::Acquire);
|
atomics::fence(atomics::Acquire);
|
||||||
unsafe { global_heap::exchange_free(self.x as *u8) }
|
unsafe { exchange_free(self.x as *mut u8, size_of::<ArcInner<T>>(),
|
||||||
|
min_align_of::<ArcInner<T>>()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -240,7 +242,8 @@ impl<T: Share + Send> Drop for Weak<T> {
|
||||||
// the memory orderings
|
// the memory orderings
|
||||||
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
|
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
|
||||||
atomics::fence(atomics::Acquire);
|
atomics::fence(atomics::Acquire);
|
||||||
unsafe { global_heap::exchange_free(self.x as *u8) }
|
unsafe { exchange_free(self.x as *mut u8, size_of::<ArcInner<T>>(),
|
||||||
|
min_align_of::<ArcInner<T>>()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue