1
Fork 0

Remove in-place allocation and revert to separate methods for zeroed allocations

Fix docs
This commit is contained in:
Tim Diekmann 2020-07-28 12:41:18 +02:00
parent 1f5d69dacc
commit 076ef66ba2
14 changed files with 397 additions and 354 deletions

View file

@ -164,16 +164,26 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl AllocRef for Global { unsafe impl AllocRef for Global {
#[inline] #[inline]
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> { fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
unsafe { unsafe {
let size = layout.size(); let size = layout.size();
if size == 0 { if size == 0 {
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} else { } else {
let raw_ptr = match init { let raw_ptr = alloc(layout);
AllocInit::Uninitialized => alloc(layout), let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
AllocInit::Zeroed => alloc_zeroed(layout), Ok(MemoryBlock { ptr, size })
}; }
}
}
fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
unsafe {
let size = layout.size();
if size == 0 {
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} else {
let raw_ptr = alloc_zeroed(layout);
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
Ok(MemoryBlock { ptr, size }) Ok(MemoryBlock { ptr, size })
} }
@ -193,8 +203,6 @@ unsafe impl AllocRef for Global {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
@ -206,28 +214,51 @@ unsafe impl AllocRef for Global {
return Ok(MemoryBlock { ptr, size }); return Ok(MemoryBlock { ptr, size });
} }
match placement { if layout.size() == 0 {
ReallocPlacement::InPlace => Err(AllocErr), let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
ReallocPlacement::MayMove if layout.size() == 0 => { self.alloc(new_layout)
let new_layout = } else {
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > size` or something similar. // `realloc` probably checks for `new_size > size` or something similar.
let ptr = unsafe { let ptr = unsafe {
intrinsics::assume(new_size > size); intrinsics::assume(new_size > size);
realloc(ptr.as_ptr(), layout, new_size) realloc(ptr.as_ptr(), layout, new_size)
}; };
let memory = Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; }
}
unsafe fn grow_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<MemoryBlock, AllocErr> {
let size = layout.size();
debug_assert!(
new_size >= size,
"`new_size` must be greater than or equal to `memory.size()`"
);
if size == new_size {
return Ok(MemoryBlock { ptr, size });
}
if layout.size() == 0 {
let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout)
} else {
// `realloc` probably checks for `new_size > size` or something similar.
let ptr = unsafe {
intrinsics::assume(new_size > size);
realloc(ptr.as_ptr(), layout, new_size)
};
let memory = MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
unsafe { unsafe {
init.init_offset(memory, size); memory.ptr.as_ptr().add(size).write_bytes(0, memory.size - size);
} }
Ok(memory) Ok(memory)
} }
} }
}
#[inline] #[inline]
unsafe fn shrink( unsafe fn shrink(
@ -235,7 +266,6 @@ unsafe impl AllocRef for Global {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
@ -247,15 +277,12 @@ unsafe impl AllocRef for Global {
return Ok(MemoryBlock { ptr, size }); return Ok(MemoryBlock { ptr, size });
} }
match placement { if new_size == 0 {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
unsafe { unsafe {
self.dealloc(ptr, layout); self.dealloc(ptr, layout);
} }
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} } else {
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < size` or something similar. // `realloc` probably checks for `new_size < size` or something similar.
let ptr = unsafe { let ptr = unsafe {
intrinsics::assume(new_size < size); intrinsics::assume(new_size < size);
@ -265,7 +292,6 @@ unsafe impl AllocRef for Global {
} }
} }
} }
}
/// The allocator for unique pointers. /// The allocator for unique pointers.
// This function must not unwind. If it does, MIR codegen will fail. // This function must not unwind. If it does, MIR codegen will fail.
@ -274,7 +300,7 @@ unsafe impl AllocRef for Global {
#[inline] #[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.alloc(layout, AllocInit::Uninitialized) { match Global.alloc(layout) {
Ok(memory) => memory.ptr.as_ptr(), Ok(memory) => memory.ptr.as_ptr(),
Err(_) => handle_alloc_error(layout), Err(_) => handle_alloc_error(layout),
} }

View file

@ -8,9 +8,8 @@ use test::Bencher;
fn allocate_zeroed() { fn allocate_zeroed() {
unsafe { unsafe {
let layout = Layout::from_size_align(1024, 1).unwrap(); let layout = Layout::from_size_align(1024, 1).unwrap();
let memory = Global let memory =
.alloc(layout.clone(), AllocInit::Zeroed) Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
.unwrap_or_else(|_| handle_alloc_error(layout));
let mut i = memory.ptr.cast::<u8>().as_ptr(); let mut i = memory.ptr.cast::<u8>().as_ptr();
let end = i.add(layout.size()); let end = i.add(layout.size());

View file

@ -146,7 +146,7 @@ use core::pin::Pin;
use core::ptr::{self, NonNull, Unique}; use core::ptr::{self, NonNull, Unique};
use core::task::{Context, Poll}; use core::task::{Context, Poll};
use crate::alloc::{self, AllocInit, AllocRef, Global}; use crate::alloc::{self, AllocRef, Global};
use crate::borrow::Cow; use crate::borrow::Cow;
use crate::raw_vec::RawVec; use crate::raw_vec::RawVec;
use crate::str::from_boxed_utf8_unchecked; use crate::str::from_boxed_utf8_unchecked;
@ -197,11 +197,8 @@ impl<T> Box<T> {
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> { pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>(); let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
let ptr = Global let ptr =
.alloc(layout, AllocInit::Uninitialized) Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).ptr.cast();
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
.ptr
.cast();
unsafe { Box::from_raw(ptr.as_ptr()) } unsafe { Box::from_raw(ptr.as_ptr()) }
} }
@ -227,7 +224,7 @@ impl<T> Box<T> {
pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> { pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>(); let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
let ptr = Global let ptr = Global
.alloc(layout, AllocInit::Zeroed) .alloc_zeroed(layout)
.unwrap_or_else(|_| alloc::handle_alloc_error(layout)) .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
.ptr .ptr
.cast(); .cast();

View file

@ -8,18 +8,20 @@ use core::ops::Drop;
use core::ptr::{NonNull, Unique}; use core::ptr::{NonNull, Unique};
use core::slice; use core::slice;
use crate::alloc::{ use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout};
handle_alloc_error,
AllocInit::{self, *},
AllocRef, Global, Layout,
ReallocPlacement::{self, *},
};
use crate::boxed::Box; use crate::boxed::Box;
use crate::collections::TryReserveError::{self, *}; use crate::collections::TryReserveError::{self, *};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
enum AllocInit {
/// The contents of the new memory are uninitialized.
Uninitialized,
/// The new memory is guaranteed to be zeroed.
Zeroed,
}
/// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// A low-level utility for more ergonomically allocating, reallocating, and deallocating
/// a buffer of memory on the heap without having to worry about all the corner cases /// a buffer of memory on the heap without having to worry about all the corner cases
/// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// involved. This type is excellent for building your own data structures like Vec and VecDeque.
@ -156,14 +158,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// allocator for the returned `RawVec`. /// allocator for the returned `RawVec`.
#[inline] #[inline]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self::allocate_in(capacity, Uninitialized, alloc) Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
} }
/// Like `with_capacity_zeroed`, but parameterized over the choice /// Like `with_capacity_zeroed`, but parameterized over the choice
/// of allocator for the returned `RawVec`. /// of allocator for the returned `RawVec`.
#[inline] #[inline]
pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
Self::allocate_in(capacity, Zeroed, alloc) Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
} }
fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self { fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self {
@ -180,7 +182,11 @@ impl<T, A: AllocRef> RawVec<T, A> {
Ok(_) => {} Ok(_) => {}
Err(_) => capacity_overflow(), Err(_) => capacity_overflow(),
} }
let memory = match alloc.alloc(layout, init) { let result = match init {
AllocInit::Uninitialized => alloc.alloc(layout),
AllocInit::Zeroed => alloc.alloc_zeroed(layout),
};
let memory = match result {
Ok(memory) => memory, Ok(memory) => memory,
Err(_) => handle_alloc_error(layout), Err(_) => handle_alloc_error(layout),
}; };
@ -358,7 +364,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// ///
/// Aborts on OOM. /// Aborts on OOM.
pub fn shrink_to_fit(&mut self, amount: usize) { pub fn shrink_to_fit(&mut self, amount: usize) {
match self.shrink(amount, MayMove) { match self.shrink(amount) {
Err(CapacityOverflow) => capacity_overflow(), Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout), Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { /* yay */ } Ok(()) => { /* yay */ }
@ -450,22 +456,16 @@ impl<T, A: AllocRef> RawVec<T, A> {
Ok(()) Ok(())
} }
fn shrink( fn shrink(&mut self, amount: usize) -> Result<(), TryReserveError> {
&mut self,
amount: usize,
placement: ReallocPlacement,
) -> Result<(), TryReserveError> {
assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity"); assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
let new_size = amount * mem::size_of::<T>(); let new_size = amount * mem::size_of::<T>();
let memory = unsafe { let memory = unsafe {
self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| { self.alloc.shrink(ptr, layout, new_size).map_err(|_| TryReserveError::AllocError {
TryReserveError::AllocError {
layout: Layout::from_size_align_unchecked(new_size, layout.align()), layout: Layout::from_size_align_unchecked(new_size, layout.align()),
non_exhaustive: (), non_exhaustive: (),
}
})? })?
}; };
self.set_memory(memory); self.set_memory(memory);
@ -492,9 +492,9 @@ where
let memory = if let Some((ptr, old_layout)) = current_memory { let memory = if let Some((ptr, old_layout)) = current_memory {
debug_assert_eq!(old_layout.align(), new_layout.align()); debug_assert_eq!(old_layout.align(), new_layout.align());
unsafe { alloc.grow(ptr, old_layout, new_layout.size(), MayMove, Uninitialized) } unsafe { alloc.grow(ptr, old_layout, new_layout.size()) }
} else { } else {
alloc.alloc(new_layout, Uninitialized) alloc.alloc(new_layout)
} }
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?; .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?;

View file

@ -20,12 +20,12 @@ fn allocator_param() {
fuel: usize, fuel: usize,
} }
unsafe impl AllocRef for BoundedAlloc { unsafe impl AllocRef for BoundedAlloc {
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> { fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
let size = layout.size(); let size = layout.size();
if size > self.fuel { if size > self.fuel {
return Err(AllocErr); return Err(AllocErr);
} }
match Global.alloc(layout, init) { match Global.alloc(layout) {
ok @ Ok(_) => { ok @ Ok(_) => {
self.fuel -= size; self.fuel -= size;
ok ok

View file

@ -250,7 +250,7 @@ use core::pin::Pin;
use core::ptr::{self, NonNull}; use core::ptr::{self, NonNull};
use core::slice::from_raw_parts_mut; use core::slice::from_raw_parts_mut;
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
use crate::borrow::{Cow, ToOwned}; use crate::borrow::{Cow, ToOwned};
use crate::string::String; use crate::string::String;
use crate::vec::Vec; use crate::vec::Vec;
@ -951,9 +951,7 @@ impl<T: ?Sized> Rc<T> {
let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align(); let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
// Allocate for the layout. // Allocate for the layout.
let mem = Global let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the RcBox // Initialize the RcBox
let inner = mem_to_rcbox(mem.ptr.as_ptr()); let inner = mem_to_rcbox(mem.ptr.as_ptr());

View file

@ -23,7 +23,7 @@ use core::slice::from_raw_parts_mut;
use core::sync::atomic; use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
use crate::borrow::{Cow, ToOwned}; use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box; use crate::boxed::Box;
use crate::rc::is_dangling; use crate::rc::is_dangling;
@ -906,9 +906,7 @@ impl<T: ?Sized> Arc<T> {
// reference (see #54908). // reference (see #54908).
let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align(); let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
let mem = Global let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
// Initialize the ArcInner // Initialize the ArcInner
let inner = mem_to_arcinner(mem.ptr.as_ptr()); let inner = mem_to_arcinner(mem.ptr.as_ptr());

View file

@ -1,4 +1,4 @@
use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; use std::alloc::{AllocRef, Global, Layout, System};
/// Issue #45955 and #62251. /// Issue #45955 and #62251.
#[test] #[test]
@ -20,13 +20,7 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
unsafe { unsafe {
let pointers: Vec<_> = (0..iterations) let pointers: Vec<_> = (0..iterations)
.map(|_| { .map(|_| {
allocator allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().ptr
.alloc(
Layout::from_size_align(size, align).unwrap(),
AllocInit::Uninitialized,
)
.unwrap()
.ptr
}) })
.collect(); .collect();
for &ptr in &pointers { for &ptr in &pointers {

View file

@ -29,66 +29,6 @@ impl fmt::Display for AllocErr {
} }
} }
/// A desired initial state for allocated memory.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[unstable(feature = "allocator_api", issue = "32838")]
pub enum AllocInit {
/// The contents of the new memory are uninitialized.
Uninitialized,
/// The new memory is guaranteed to be zeroed.
Zeroed,
}
impl AllocInit {
/// Initialize the specified memory block.
///
/// This behaves like calling [`AllocInit::init_offset(memory, 0)`][off].
///
/// [off]: AllocInit::init_offset
///
/// # Safety
///
/// * `memory.ptr` must be [valid] for writes of `memory.size` bytes.
///
/// [valid]: ../../core/ptr/index.html#safety
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe fn init(self, memory: MemoryBlock) {
// SAFETY: the safety contract for `init_offset` must be
// upheld by the caller.
unsafe { self.init_offset(memory, 0) }
}
/// Initialize the memory block like specified by `init` at the specified `offset`.
///
/// This is a no-op for [`AllocInit::Uninitialized`][] and writes zeroes for
/// [`AllocInit::Zeroed`][] at `ptr + offset` until `ptr + layout.size()`.
///
/// # Safety
///
/// * `memory.ptr` must be [valid] for writes of `memory.size` bytes.
/// * `offset` must be smaller than or equal to `memory.size`
///
/// [valid]: ../../core/ptr/index.html#safety
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe fn init_offset(self, memory: MemoryBlock, offset: usize) {
debug_assert!(
offset <= memory.size,
"`offset` must be smaller than or equal to `memory.size`"
);
match self {
AllocInit::Uninitialized => (),
AllocInit::Zeroed => {
// SAFETY: the caller must guarantee that `offset` is smaller than or equal to `memory.size`,
// so the memory from `memory.ptr + offset` of length `memory.size - offset`
// is guaranteed to be contaned in `memory` and thus valid for writes.
unsafe { memory.ptr.as_ptr().add(offset).write_bytes(0, memory.size - offset) }
}
}
}
}
/// Represents a block of allocated memory returned by an allocator. /// Represents a block of allocated memory returned by an allocator.
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
@ -97,24 +37,6 @@ pub struct MemoryBlock {
pub size: usize, pub size: usize,
} }
/// A placement constraint when growing or shrinking an existing allocation.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[unstable(feature = "allocator_api", issue = "32838")]
pub enum ReallocPlacement {
/// The allocator is allowed to move the allocation to a different memory address.
// FIXME(wg-allocators#46): Add a section to the module documentation "What is a legal
// allocator" and link it at "valid location".
///
/// If the allocation _does_ move, it's the responsibility of the allocator
/// to also move the data from the previous location to the new location.
MayMove,
/// The address of the new memory must not change.
///
/// If the allocation would have to be moved to a new location to fit, the
/// reallocation request will fail.
InPlace,
}
/// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of /// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of
/// data described via [`Layout`][]. /// data described via [`Layout`][].
/// ///
@ -177,10 +99,8 @@ pub unsafe trait AllocRef {
/// ///
/// On success, returns a [`MemoryBlock`][] meeting the size and alignment guarantees of `layout`. /// On success, returns a [`MemoryBlock`][] meeting the size and alignment guarantees of `layout`.
/// ///
/// The returned block may have a larger size than specified by `layout.size()` and is /// The returned block may have a larger size than specified by `layout.size()`, and may or may
/// initialized as specified by [`init`], all the way up to the returned size of the block. /// not have its contents initialized.
///
/// [`init`]: AllocInit
/// ///
/// # Errors /// # Errors
/// ///
@ -195,7 +115,29 @@ pub unsafe trait AllocRef {
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
/// ///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr>; fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr>;
/// Behaves like `alloc`, but also ensures that the contents are set to zero before being returned.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or `layout` does not meet
/// allocator's size or alignment constraints.
///
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
/// this trait atop an underlying native allocation library that aborts on memory exhaustion.)
///
/// Clients wishing to abort computation in response to an allocation error are encouraged to
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
let memory = self.alloc(layout)?;
// SAFETY: `alloc` returns a valid memory block
unsafe { memory.ptr.as_ptr().write_bytes(0, memory.size) }
Ok(memory)
}
/// Deallocates the memory referenced by `ptr`. /// Deallocates the memory referenced by `ptr`.
/// ///
@ -213,31 +155,11 @@ pub unsafe trait AllocRef {
/// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated /// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated
/// memory. The pointer is suitable for holding data described by a new layout with `layout`s /// memory. The pointer is suitable for holding data described by a new layout with `layout`s
/// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the /// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the
/// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is /// allocation referenced by `ptr` to fit the new layout.
/// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. ///~
///
/// If [`MayMove`] is used then ownership of the memory block referenced by `ptr`
/// is transferred to this allocator. The memory may or may not be freed, and should be
/// considered unusable (unless of course it is transferred back to the caller again via the
/// return value of this method).
///
/// If this method returns `Err`, then ownership of the memory block has not been transferred to /// If this method returns `Err`, then ownership of the memory block has not been transferred to
/// this allocator, and the contents of the memory block are unaltered. /// this allocator, and the contents of the memory block are unaltered.
/// ///
/// The memory block will contain the following contents after a successful call to `grow`:
/// * Bytes `0..layout.size()` are preserved from the original allocation.
/// * Bytes `layout.size()..old_size` will either be preserved or initialized according to
/// [`init`], depending on the allocator implementation. `old_size` refers to the size of
/// the `MemoryBlock` prior to the `grow` call, which may be larger than the size
/// that was originally requested when it was allocated.
/// * Bytes `old_size..new_size` are initialized according to [`init`]. `new_size` refers to
/// the size of the `MemoryBlock` returned by the `grow` call.
///
/// [`InPlace`]: ReallocPlacement::InPlace
/// [`MayMove`]: ReallocPlacement::MayMove
/// [`placement`]: ReallocPlacement
/// [`init`]: AllocInit
///
/// # Safety /// # Safety
/// ///
/// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
@ -270,12 +192,7 @@ pub unsafe trait AllocRef {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove => {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
new_size >= size, new_size >= size,
@ -289,9 +206,10 @@ pub unsafe trait AllocRef {
let new_layout = let new_layout =
// SAFETY: the caller must ensure that the `new_size` does not overflow. // SAFETY: the caller must ensure that the `new_size` does not overflow.
// `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout. // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout.
// The caller must ensure that `new_size` is greater than zero. // The caller must ensure that `new_size` is greater than or equal to zero. If it's equal
// to zero, it's catched beforehand.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
let new_memory = self.alloc(new_layout, init)?; let new_memory = self.alloc(new_layout)?;
// SAFETY: because `new_size` must be greater than or equal to `size`, both the old and new // SAFETY: because `new_size` must be greater than or equal to `size`, both the old and new
// memory allocation are valid for reads and writes for `size` bytes. Also, because the old // memory allocation are valid for reads and writes for `size` bytes. Also, because the old
@ -304,6 +222,75 @@ pub unsafe trait AllocRef {
Ok(new_memory) Ok(new_memory)
} }
} }
/// Behaves like `grow`, but also ensures that the new contents are set to zero before being
/// returned.
///
/// The memory block will contain the following contents after a successful call to `grow`:
/// * Bytes `0..layout.size()` are preserved from the original allocation.
/// * Bytes `layout.size()..new_size` are zeroed. `new_size` refers to
/// the size of the `MemoryBlock` returned by the `grow` call.
///
/// # Safety
///
/// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
/// * `layout` must [*fit*] that block of memory (The `new_size` argument need not fit it.),
// We can't require that `new_size` is strictly greater than `memory.size` because of ZSTs.
// An alternative would be
// * `new_size must be strictly greater than `memory.size` or both are zero
/// * `new_size` must be greater than or equal to `layout.size()`, and
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
/// (i.e., the rounded value must be less than or equal to `usize::MAX`).
///
/// [*currently allocated*]: #currently-allocated-memory
/// [*fit*]: #memory-fitting
///
/// # Errors
///
/// Returns `Err` if the new layout does not meet the allocator's size and alignment
/// constraints of the allocator, or if growing otherwise fails.
///
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
/// this trait atop an underlying native allocation library that aborts on memory exhaustion.)
///
/// Clients wishing to abort computation in response to an allocation error are encouraged to
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn grow_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<MemoryBlock, AllocErr> {
let size = layout.size();
debug_assert!(
new_size >= size,
"`new_size` must be greater than or equal to `layout.size()`"
);
if new_size == size {
return Ok(MemoryBlock { ptr, size });
}
let new_layout =
// SAFETY: the caller must ensure that the `new_size` does not overflow.
// `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout.
// The caller must ensure that `new_size` is greater than or equal to zero. If it's equal
// to zero, it's catched beforehand.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
let new_memory = self.alloc_zeroed(new_layout)?;
// SAFETY: because `new_size` must be greater than or equal to `size`, both the old and new
// memory allocation are valid for reads and writes for `size` bytes. Also, because the old
// allocation wasn't yet deallocated, it cannot overlap `new_memory`. Thus, the call to
// `copy_nonoverlapping` is safe.
// The safety contract for `dealloc` must be upheld by the caller.
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), size);
self.dealloc(ptr, layout);
Ok(new_memory)
} }
} }
@ -312,8 +299,7 @@ pub unsafe trait AllocRef {
/// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated /// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated
/// memory. The pointer is suitable for holding data described by a new layout with `layout`s /// memory. The pointer is suitable for holding data described by a new layout with `layout`s
/// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the /// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the
/// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is /// allocation referenced by `ptr` to fit the new layout.
/// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`.
/// ///
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
/// transferred to this allocator. The memory may or may not have been freed, and should be /// transferred to this allocator. The memory may or may not have been freed, and should be
@ -323,11 +309,6 @@ pub unsafe trait AllocRef {
/// If this method returns `Err`, then ownership of the memory block has not been transferred to /// If this method returns `Err`, then ownership of the memory block has not been transferred to
/// this allocator, and the contents of the memory block are unaltered. /// this allocator, and the contents of the memory block are unaltered.
/// ///
/// The behavior of how the allocator tries to shrink the memory is specified by [`placement`].
///
/// [`InPlace`]: ReallocPlacement::InPlace
/// [`placement`]: ReallocPlacement
///
/// # Safety /// # Safety
/// ///
/// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
@ -358,11 +339,7 @@ pub unsafe trait AllocRef {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove => {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
new_size <= size, new_size <= size,
@ -378,7 +355,7 @@ pub unsafe trait AllocRef {
// `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout. // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout.
// The caller must ensure that `new_size` is greater than zero. // The caller must ensure that `new_size` is greater than zero.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; let new_memory = self.alloc(new_layout)?;
// SAFETY: because `new_size` must be lower than or equal to `size`, both the old and new // SAFETY: because `new_size` must be lower than or equal to `size`, both the old and new
// memory allocation are valid for reads and writes for `new_size` bytes. Also, because the // memory allocation are valid for reads and writes for `new_size` bytes. Also, because the
@ -391,8 +368,6 @@ pub unsafe trait AllocRef {
Ok(new_memory) Ok(new_memory)
} }
} }
}
}
/// Creates a "by reference" adaptor for this instance of `AllocRef`. /// Creates a "by reference" adaptor for this instance of `AllocRef`.
/// ///
@ -409,8 +384,13 @@ where
A: AllocRef + ?Sized, A: AllocRef + ?Sized,
{ {
#[inline] #[inline]
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> { fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
(**self).alloc(layout, init) (**self).alloc(layout)
}
#[inline]
fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
(**self).alloc_zeroed(layout)
} }
#[inline] #[inline]
@ -425,11 +405,20 @@ where
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
// SAFETY: the safety contract must be upheld by the caller // SAFETY: the safety contract must be upheld by the caller
unsafe { (**self).grow(ptr, layout, new_size, placement, init) } unsafe { (**self).grow(ptr, layout, new_size) }
}
#[inline]
unsafe fn grow_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<MemoryBlock, AllocErr> {
// SAFETY: the safety contract must be upheld by the caller
unsafe { (**self).grow_zeroed(ptr, layout, new_size) }
} }
#[inline] #[inline]
@ -438,9 +427,8 @@ where
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
// SAFETY: the safety contract must be upheld by the caller // SAFETY: the safety contract must be upheld by the caller
unsafe { (**self).shrink(ptr, layout, new_size, placement) } unsafe { (**self).shrink(ptr, layout, new_size) }
} }
} }

View file

@ -140,16 +140,27 @@ pub struct System;
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl AllocRef for System { unsafe impl AllocRef for System {
#[inline] #[inline]
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> { fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
unsafe { unsafe {
let size = layout.size(); let size = layout.size();
if size == 0 { if size == 0 {
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} else { } else {
let raw_ptr = match init { let raw_ptr = GlobalAlloc::alloc(self, layout);
AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), Ok(MemoryBlock { ptr, size })
}; }
}
}
#[inline]
fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
unsafe {
let size = layout.size();
if size == 0 {
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} else {
let raw_ptr = GlobalAlloc::alloc_zeroed(self, layout);
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
Ok(MemoryBlock { ptr, size }) Ok(MemoryBlock { ptr, size })
} }
@ -171,8 +182,6 @@ unsafe impl AllocRef for System {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
@ -184,9 +193,7 @@ unsafe impl AllocRef for System {
return Ok(MemoryBlock { ptr, size }); return Ok(MemoryBlock { ptr, size });
} }
match placement { if layout.size() == 0 {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = let new_layout =
// SAFETY: The new size and layout alignement guarantees // SAFETY: The new size and layout alignement guarantees
// are transfered to the caller (they come from parameters). // are transfered to the caller (they come from parameters).
@ -194,9 +201,57 @@ unsafe impl AllocRef for System {
// See the preconditions for `Layout::from_size_align` to // See the preconditions for `Layout::from_size_align` to
// see what must be checked. // see what must be checked.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init) self.alloc(new_layout)
} else {
// SAFETY:
//
// The safety guarantees are explained in the documentation
// for the `GlobalAlloc` trait and its `dealloc` method.
//
// `realloc` probably checks for `new_size > size` or something
// similar.
//
// For the guarantees about `init_offset`, see its documentation:
// `ptr` is assumed valid (and checked for non-NUL) and
// `memory.size` is set to `new_size` so the offset being `size`
// is valid.
unsafe {
intrinsics::assume(new_size > size);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
Ok(memory)
} }
ReallocPlacement::MayMove => { }
}
#[inline]
unsafe fn grow_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<MemoryBlock, AllocErr> {
let size = layout.size();
debug_assert!(
new_size >= size,
"`new_size` must be greater than or equal to `memory.size()`"
);
if size == new_size {
return Ok(MemoryBlock { ptr, size });
}
if layout.size() == 0 {
let new_layout =
// SAFETY: The new size and layout alignement guarantees
// are transfered to the caller (they come from parameters).
//
// See the preconditions for `Layout::from_size_align` to
// see what must be checked.
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc_zeroed(new_layout)
} else {
// SAFETY: // SAFETY:
// //
// The safety guarantees are explained in the documentation // The safety guarantees are explained in the documentation
@ -214,13 +269,12 @@ unsafe impl AllocRef for System {
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
let memory = let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
init.init_offset(memory, size); memory.ptr.as_ptr().add(size).write_bytes(0, memory.size - size);
memory memory
}; };
Ok(memory) Ok(memory)
} }
} }
}
#[inline] #[inline]
unsafe fn shrink( unsafe fn shrink(
@ -228,7 +282,6 @@ unsafe impl AllocRef for System {
ptr: NonNull<u8>, ptr: NonNull<u8>,
layout: Layout, layout: Layout,
new_size: usize, new_size: usize,
placement: ReallocPlacement,
) -> Result<MemoryBlock, AllocErr> { ) -> Result<MemoryBlock, AllocErr> {
let size = layout.size(); let size = layout.size();
debug_assert!( debug_assert!(
@ -240,16 +293,13 @@ unsafe impl AllocRef for System {
return Ok(MemoryBlock { ptr, size }); return Ok(MemoryBlock { ptr, size });
} }
match placement { if new_size == 0 {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
// SAFETY: see `GlobalAlloc::dealloc` for the guarantees that // SAFETY: see `GlobalAlloc::dealloc` for the guarantees that
// must be respected. `ptr` and `layout` are parameters and so // must be respected. `ptr` and `layout` are parameters and so
// those guarantees must be checked by the caller. // those guarantees must be checked by the caller.
unsafe { self.dealloc(ptr, layout) }; unsafe { self.dealloc(ptr, layout) };
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
} } else {
ReallocPlacement::MayMove => {
// SAFETY: // SAFETY:
// //
// See `GlobalAlloc::realloc` for more informations about the // See `GlobalAlloc::realloc` for more informations about the
@ -268,7 +318,6 @@ unsafe impl AllocRef for System {
} }
} }
} }
}
static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
/// Registers a custom allocation error hook, replacing any that was previously registered. /// Registers a custom allocation error hook, replacing any that was previously registered.

View file

@ -7,7 +7,7 @@
extern crate helper; extern crate helper;
use std::alloc::{self, AllocInit, AllocRef, Global, Layout, System}; use std::alloc::{self, AllocRef, Global, Layout, System};
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
static HITS: AtomicUsize = AtomicUsize::new(0); static HITS: AtomicUsize = AtomicUsize::new(0);
@ -37,7 +37,7 @@ fn main() {
unsafe { unsafe {
let layout = Layout::from_size_align(4, 2).unwrap(); let layout = Layout::from_size_align(4, 2).unwrap();
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); let memory = Global.alloc(layout.clone()).unwrap();
helper::work_with(&memory.ptr); helper::work_with(&memory.ptr);
assert_eq!(HITS.load(Ordering::SeqCst), n + 1); assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory.ptr, layout); Global.dealloc(memory.ptr, layout);
@ -49,7 +49,7 @@ fn main() {
drop(s); drop(s);
assert_eq!(HITS.load(Ordering::SeqCst), n + 4); assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); let memory = System.alloc(layout.clone()).unwrap();
assert_eq!(HITS.load(Ordering::SeqCst), n + 4); assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
helper::work_with(&memory.ptr); helper::work_with(&memory.ptr);
System.dealloc(memory.ptr, layout); System.dealloc(memory.ptr, layout);

View file

@ -9,7 +9,7 @@
extern crate custom; extern crate custom;
extern crate helper; extern crate helper;
use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; use std::alloc::{AllocRef, Global, Layout, System};
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
#[global_allocator] #[global_allocator]
@ -20,13 +20,13 @@ fn main() {
let n = GLOBAL.0.load(Ordering::SeqCst); let n = GLOBAL.0.load(Ordering::SeqCst);
let layout = Layout::from_size_align(4, 2).unwrap(); let layout = Layout::from_size_align(4, 2).unwrap();
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); let memory = Global.alloc(layout.clone()).unwrap();
helper::work_with(&memory.ptr); helper::work_with(&memory.ptr);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory.ptr, layout); Global.dealloc(memory.ptr, layout);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); let memory = System.alloc(layout.clone()).unwrap();
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
helper::work_with(&memory.ptr); helper::work_with(&memory.ptr);
System.dealloc(memory.ptr, layout); System.dealloc(memory.ptr, layout);

View file

@ -6,7 +6,7 @@
#![feature(allocator_api)] #![feature(allocator_api)]
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; use std::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use std::ptr::{self, NonNull}; use std::ptr::{self, NonNull};
fn main() { fn main() {
@ -41,9 +41,7 @@ unsafe fn test_triangle() -> bool {
println!("allocate({:?})", layout); println!("allocate({:?})", layout);
} }
let memory = Global let memory = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
if PRINT { if PRINT {
println!("allocate({:?}) = {:?}", layout, memory.ptr); println!("allocate({:?}) = {:?}", layout, memory.ptr);
@ -70,11 +68,9 @@ unsafe fn test_triangle() -> bool {
NonNull::new_unchecked(ptr), NonNull::new_unchecked(ptr),
old, old,
new.size(), new.size(),
ReallocPlacement::MayMove,
AllocInit::Uninitialized,
) )
} else { } else {
Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) Global.shrink(NonNull::new_unchecked(ptr), old, new.size())
}; };
let memory = memory.unwrap_or_else(|_| { let memory = memory.unwrap_or_else(|_| {

View file

@ -4,7 +4,7 @@
// pretty-expanded FIXME #23616 // pretty-expanded FIXME #23616
#![feature(allocator_api)] #![feature(allocator_api)]
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use std::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use std::ptr::NonNull; use std::ptr::NonNull;
struct arena(()); struct arena(());
@ -25,9 +25,7 @@ struct Ccx {
fn alloc(_bcx: &arena) -> &Bcx<'_> { fn alloc(_bcx: &arena) -> &Bcx<'_> {
unsafe { unsafe {
let layout = Layout::new::<Bcx>(); let layout = Layout::new::<Bcx>();
let memory = Global let memory = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
.alloc(layout, AllocInit::Uninitialized)
.unwrap_or_else(|_| handle_alloc_error(layout));
&*(memory.ptr.as_ptr() as *const _) &*(memory.ptr.as_ptr() as *const _)
} }
} }