Add core::marker::PhantomData.
Port `core::ptr::Unique` to have `PhantomData`. Add `PhantomData` to `TypedArena` and `Vec` as well. As a drive-by, switch `ptr::Unique` from a tuple-struct to a struct with fields.
This commit is contained in:
parent
e02b6d1748
commit
f90c3864b6
9 changed files with 106 additions and 26 deletions
|
@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell};
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::intrinsics::{TyDesc, get_tydesc};
|
use std::intrinsics::{TyDesc, get_tydesc};
|
||||||
use std::intrinsics;
|
use std::intrinsics;
|
||||||
|
use std::marker;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::num::{Int, UnsignedInt};
|
use std::num::{Int, UnsignedInt};
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
@ -365,6 +366,10 @@ pub struct TypedArena<T> {
|
||||||
|
|
||||||
/// A pointer to the first arena segment.
|
/// A pointer to the first arena segment.
|
||||||
first: RefCell<*mut TypedArenaChunk<T>>,
|
first: RefCell<*mut TypedArenaChunk<T>>,
|
||||||
|
|
||||||
|
/// Marker indicating that dropping the arena causes its owned
|
||||||
|
/// instances of `T` to be dropped.
|
||||||
|
_own: marker::PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TypedArenaChunk<T> {
|
struct TypedArenaChunk<T> {
|
||||||
|
@ -460,6 +465,7 @@ impl<T> TypedArena<T> {
|
||||||
ptr: Cell::new((*chunk).start() as *const T),
|
ptr: Cell::new((*chunk).start() as *const T),
|
||||||
end: Cell::new((*chunk).end() as *const T),
|
end: Cell::new((*chunk).end() as *const T),
|
||||||
first: RefCell::new(chunk),
|
first: RefCell::new(chunk),
|
||||||
|
_own: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -278,7 +278,7 @@ impl<T> Drop for RawItems<T> {
|
||||||
#[unsafe_destructor]
|
#[unsafe_destructor]
|
||||||
impl<K, V> Drop for Node<K, V> {
|
impl<K, V> Drop for Node<K, V> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if self.keys.0.is_null() {
|
if self.keys.ptr.is_null() {
|
||||||
// We have already cleaned up this node.
|
// We have already cleaned up this node.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -292,7 +292,7 @@ impl<K, V> Drop for Node<K, V> {
|
||||||
self.destroy();
|
self.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.keys.0 = ptr::null_mut();
|
self.keys.ptr = ptr::null_mut();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,18 +337,18 @@ impl<K, V> Node<K, V> {
|
||||||
unsafe fn destroy(&mut self) {
|
unsafe fn destroy(&mut self) {
|
||||||
let (alignment, size) =
|
let (alignment, size) =
|
||||||
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
|
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
|
||||||
heap::deallocate(self.keys.0 as *mut u8, size, alignment);
|
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
|
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
|
||||||
unsafe {(
|
unsafe {(
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.keys.0,
|
data: self.keys.ptr,
|
||||||
len: self.len()
|
len: self.len()
|
||||||
}),
|
}),
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.vals.0,
|
data: self.vals.ptr,
|
||||||
len: self.len()
|
len: self.len()
|
||||||
})
|
})
|
||||||
)}
|
)}
|
||||||
|
@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
|
||||||
} else {
|
} else {
|
||||||
unsafe {
|
unsafe {
|
||||||
mem::transmute(raw::Slice {
|
mem::transmute(raw::Slice {
|
||||||
data: self.edges.0,
|
data: self.edges.ptr,
|
||||||
len: self.len() + 1
|
len: self.len() + 1
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -586,7 +586,7 @@ impl <K, V> Node<K, V> {
|
||||||
|
|
||||||
/// If the node has any children
|
/// If the node has any children
|
||||||
pub fn is_leaf(&self) -> bool {
|
pub fn is_leaf(&self) -> bool {
|
||||||
self.edges.0.is_null()
|
self.edges.ptr.is_null()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// if the node has too few elements
|
/// if the node has too few elements
|
||||||
|
@ -1064,7 +1064,7 @@ impl<K, V> Node<K, V> {
|
||||||
vals: RawItems::from_slice(self.vals()),
|
vals: RawItems::from_slice(self.vals()),
|
||||||
edges: RawItems::from_slice(self.edges()),
|
edges: RawItems::from_slice(self.edges()),
|
||||||
|
|
||||||
ptr: self.keys.0 as *mut u8,
|
ptr: self.keys.ptr as *mut u8,
|
||||||
capacity: self.capacity(),
|
capacity: self.capacity(),
|
||||||
is_leaf: self.is_leaf()
|
is_leaf: self.is_leaf()
|
||||||
},
|
},
|
||||||
|
|
|
@ -57,7 +57,7 @@ use core::default::Default;
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
use core::hash::{self, Hash};
|
use core::hash::{self, Hash};
|
||||||
use core::iter::{repeat, FromIterator, IntoIterator};
|
use core::iter::{repeat, FromIterator, IntoIterator};
|
||||||
use core::marker::{ContravariantLifetime, InvariantType};
|
use core::marker::{self, ContravariantLifetime, InvariantType};
|
||||||
use core::mem;
|
use core::mem;
|
||||||
use core::nonzero::NonZero;
|
use core::nonzero::NonZero;
|
||||||
use core::num::{Int, UnsignedInt};
|
use core::num::{Int, UnsignedInt};
|
||||||
|
@ -140,6 +140,7 @@ pub struct Vec<T> {
|
||||||
ptr: NonZero<*mut T>,
|
ptr: NonZero<*mut T>,
|
||||||
len: usize,
|
len: usize,
|
||||||
cap: usize,
|
cap: usize,
|
||||||
|
_own: marker::PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<T: Send> Send for Vec<T> { }
|
unsafe impl<T: Send> Send for Vec<T> { }
|
||||||
|
@ -166,7 +167,7 @@ impl<T> Vec<T> {
|
||||||
// non-null value which is fine since we never call deallocate on the ptr
|
// non-null value which is fine since we never call deallocate on the ptr
|
||||||
// if cap is 0. The reason for this is because the pointer of a slice
|
// if cap is 0. The reason for this is because the pointer of a slice
|
||||||
// being NULL would break the null pointer optimization for enums.
|
// being NULL would break the null pointer optimization for enums.
|
||||||
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
|
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new, empty `Vec<T>` with the specified capacity.
|
/// Constructs a new, empty `Vec<T>` with the specified capacity.
|
||||||
|
@ -198,7 +199,7 @@ impl<T> Vec<T> {
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn with_capacity(capacity: usize) -> Vec<T> {
|
pub fn with_capacity(capacity: usize) -> Vec<T> {
|
||||||
if mem::size_of::<T>() == 0 {
|
if mem::size_of::<T>() == 0 {
|
||||||
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
|
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
|
||||||
} else if capacity == 0 {
|
} else if capacity == 0 {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
} else {
|
} else {
|
||||||
|
@ -206,7 +207,7 @@ impl<T> Vec<T> {
|
||||||
.expect("capacity overflow");
|
.expect("capacity overflow");
|
||||||
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
|
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
|
||||||
if ptr.is_null() { ::alloc::oom() }
|
if ptr.is_null() { ::alloc::oom() }
|
||||||
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
|
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,7 +248,12 @@ impl<T> Vec<T> {
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
|
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
|
||||||
capacity: usize) -> Vec<T> {
|
capacity: usize) -> Vec<T> {
|
||||||
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
|
Vec {
|
||||||
|
ptr: NonZero::new(ptr),
|
||||||
|
len: length,
|
||||||
|
cap: capacity,
|
||||||
|
_own: marker::PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a vector by copying the elements from a raw pointer.
|
/// Creates a vector by copying the elements from a raw pointer.
|
||||||
|
@ -1626,7 +1632,7 @@ impl<T> IntoIter<T> {
|
||||||
for _x in self.by_ref() { }
|
for _x in self.by_ref() { }
|
||||||
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
|
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
|
||||||
mem::forget(self);
|
mem::forget(self);
|
||||||
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
|
Vec::from_raw_parts(allocation, 0, cap)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -202,6 +202,24 @@ pub unsafe trait Sync {
|
||||||
// Empty
|
// Empty
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A marker type that indicates to the compiler that the instances
|
||||||
|
/// of the type itself owns instances of the type parameter `T`.
|
||||||
|
///
|
||||||
|
/// This is used to indicate that one or more instances of the type
|
||||||
|
/// `T` could be dropped when instances of the type itself is dropped,
|
||||||
|
/// though that may not be apparent from the other structure of the
|
||||||
|
/// type itself. For example, the type may hold a `*mut T`, which the
|
||||||
|
/// compiler does not automatically treat as owned.
|
||||||
|
#[unstable(feature = "core",
|
||||||
|
reason = "Newly added to deal with scoping and destructor changes")]
|
||||||
|
#[lang="phantom_data"]
|
||||||
|
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub struct PhantomData<T: ?Sized>;
|
||||||
|
|
||||||
|
impl<T: ?Sized> Copy for PhantomData<T> {}
|
||||||
|
impl<T: ?Sized> Clone for PhantomData<T> {
|
||||||
|
fn clone(&self) -> PhantomData<T> { *self }
|
||||||
|
}
|
||||||
|
|
||||||
/// A marker type whose type parameter `T` is considered to be
|
/// A marker type whose type parameter `T` is considered to be
|
||||||
/// covariant with respect to the type itself. This is (typically)
|
/// covariant with respect to the type itself. This is (typically)
|
||||||
|
|
|
@ -92,7 +92,7 @@ use mem;
|
||||||
use clone::Clone;
|
use clone::Clone;
|
||||||
use intrinsics;
|
use intrinsics;
|
||||||
use option::Option::{self, Some, None};
|
use option::Option::{self, Some, None};
|
||||||
use marker::{Send, Sized, Sync};
|
use marker::{self, Send, Sized, Sync};
|
||||||
|
|
||||||
use cmp::{PartialEq, Eq, Ord, PartialOrd};
|
use cmp::{PartialEq, Eq, Ord, PartialOrd};
|
||||||
use cmp::Ordering::{self, Less, Equal, Greater};
|
use cmp::Ordering::{self, Less, Equal, Greater};
|
||||||
|
@ -522,7 +522,11 @@ impl<T> PartialOrd for *mut T {
|
||||||
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
|
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
|
||||||
/// internally use raw pointers to manage the memory that they own.
|
/// internally use raw pointers to manage the memory that they own.
|
||||||
#[unstable(feature = "core", reason = "recently added to this module")]
|
#[unstable(feature = "core", reason = "recently added to this module")]
|
||||||
pub struct Unique<T: ?Sized>(pub *mut T);
|
pub struct Unique<T: ?Sized> {
|
||||||
|
/// The wrapped `*mut T`.
|
||||||
|
pub ptr: *mut T,
|
||||||
|
_own: marker::PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
|
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
|
||||||
/// reference is unaliased. Note that this aliasing invariant is
|
/// reference is unaliased. Note that this aliasing invariant is
|
||||||
|
@ -550,6 +554,13 @@ impl<T> Unique<T> {
|
||||||
#[unstable(feature = "core",
|
#[unstable(feature = "core",
|
||||||
reason = "recently added to this module")]
|
reason = "recently added to this module")]
|
||||||
pub unsafe fn offset(self, offset: int) -> *mut T {
|
pub unsafe fn offset(self, offset: int) -> *mut T {
|
||||||
self.0.offset(offset)
|
self.ptr.offset(offset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a `Unique` wrapped around `ptr`, taking ownership of the
|
||||||
|
/// data referenced by `ptr`.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn Unique<T: ?Sized>(ptr: *mut T) -> Unique<T> {
|
||||||
|
Unique { ptr: ptr, _own: marker::PhantomData }
|
||||||
|
}
|
||||||
|
|
|
@ -172,7 +172,7 @@ fn test_set_memory() {
|
||||||
fn test_unsized_unique() {
|
fn test_unsized_unique() {
|
||||||
let xs: &mut [_] = &mut [1, 2, 3];
|
let xs: &mut [_] = &mut [1, 2, 3];
|
||||||
let ptr = Unique(xs as *mut [_]);
|
let ptr = Unique(xs as *mut [_]);
|
||||||
let ys = unsafe { &mut *ptr.0 };
|
let ys = unsafe { &mut *ptr.ptr };
|
||||||
let zs: &mut [_] = &mut [1, 2, 3];
|
let zs: &mut [_] = &mut [1, 2, 3];
|
||||||
assert!(ys == zs);
|
assert!(ys == zs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,13 +45,13 @@ pub struct Bytes {
|
||||||
impl Deref for Bytes {
|
impl Deref for Bytes {
|
||||||
type Target = [u8];
|
type Target = [u8];
|
||||||
fn deref(&self) -> &[u8] {
|
fn deref(&self) -> &[u8] {
|
||||||
unsafe { slice::from_raw_parts_mut(self.ptr.0, self.len) }
|
unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Bytes {
|
impl Drop for Bytes {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
unsafe { libc::free(self.ptr.0 as *mut _); }
|
unsafe { libc::free(self.ptr.ptr as *mut _); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -312,6 +312,8 @@ lets_do_this! {
|
||||||
ExchangeHeapLangItem, "exchange_heap", exchange_heap;
|
ExchangeHeapLangItem, "exchange_heap", exchange_heap;
|
||||||
OwnedBoxLangItem, "owned_box", owned_box;
|
OwnedBoxLangItem, "owned_box", owned_box;
|
||||||
|
|
||||||
|
PhantomDataItem, "phantom_data", phantom_data;
|
||||||
|
|
||||||
CovariantTypeItem, "covariant_type", covariant_type;
|
CovariantTypeItem, "covariant_type", covariant_type;
|
||||||
ContravariantTypeItem, "contravariant_type", contravariant_type;
|
ContravariantTypeItem, "contravariant_type", contravariant_type;
|
||||||
InvariantTypeItem, "invariant_type", invariant_type;
|
InvariantTypeItem, "invariant_type", invariant_type;
|
||||||
|
|
|
@ -72,6 +72,8 @@ use std::cell::{Cell, RefCell};
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::{Hash, Writer, SipHasher, Hasher};
|
use std::hash::{Hash, Writer, SipHasher, Hasher};
|
||||||
|
#[cfg(stage0)]
|
||||||
|
use std::marker;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops;
|
use std::ops;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
@ -931,6 +933,26 @@ pub struct TyS<'tcx> {
|
||||||
|
|
||||||
// the maximal depth of any bound regions appearing in this type.
|
// the maximal depth of any bound regions appearing in this type.
|
||||||
region_depth: u32,
|
region_depth: u32,
|
||||||
|
|
||||||
|
// force the lifetime to be invariant to work-around
|
||||||
|
// region-inference issues with a covariant lifetime.
|
||||||
|
#[cfg(stage0)]
|
||||||
|
marker: ShowInvariantLifetime<'tcx>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(stage0)]
|
||||||
|
struct ShowInvariantLifetime<'a>(marker::InvariantLifetime<'a>);
|
||||||
|
#[cfg(stage0)]
|
||||||
|
impl<'a> ShowInvariantLifetime<'a> {
|
||||||
|
fn new() -> ShowInvariantLifetime<'a> {
|
||||||
|
ShowInvariantLifetime(marker::InvariantLifetime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(stage0)]
|
||||||
|
impl<'a> fmt::Debug for ShowInvariantLifetime<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "InvariantLifetime")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for TypeFlags {
|
impl fmt::Debug for TypeFlags {
|
||||||
|
@ -939,9 +961,18 @@ impl fmt::Debug for TypeFlags {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(stage0)]
|
||||||
|
impl<'tcx> PartialEq for TyS<'tcx> {
|
||||||
|
fn eq<'a,'b>(&'a self, other: &'b TyS<'tcx>) -> bool {
|
||||||
|
let other: &'a TyS<'tcx> = unsafe { mem::transmute(other) };
|
||||||
|
(self as *const _) == (other as *const _)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(not(stage0))]
|
||||||
impl<'tcx> PartialEq for TyS<'tcx> {
|
impl<'tcx> PartialEq for TyS<'tcx> {
|
||||||
fn eq(&self, other: &TyS<'tcx>) -> bool {
|
fn eq(&self, other: &TyS<'tcx>) -> bool {
|
||||||
(self as *const _) == (other as *const _)
|
// (self as *const _) == (other as *const _)
|
||||||
|
(self as *const TyS<'tcx>) == (other as *const TyS<'tcx>)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'tcx> Eq for TyS<'tcx> {}
|
impl<'tcx> Eq for TyS<'tcx> {}
|
||||||
|
@ -2475,11 +2506,17 @@ fn intern_ty<'tcx>(type_arena: &'tcx TypedArena<TyS<'tcx>>,
|
||||||
|
|
||||||
let flags = FlagComputation::for_sty(&st);
|
let flags = FlagComputation::for_sty(&st);
|
||||||
|
|
||||||
let ty = type_arena.alloc(TyS {
|
let ty = match () {
|
||||||
sty: st,
|
#[cfg(stage0)]
|
||||||
flags: flags.flags,
|
() => type_arena.alloc(TyS { sty: st,
|
||||||
region_depth: flags.depth,
|
flags: flags.flags,
|
||||||
});
|
region_depth: flags.depth,
|
||||||
|
marker: ShowInvariantLifetime::new(), }),
|
||||||
|
#[cfg(not(stage0))]
|
||||||
|
() => type_arena.alloc(TyS { sty: st,
|
||||||
|
flags: flags.flags,
|
||||||
|
region_depth: flags.depth, }),
|
||||||
|
};
|
||||||
|
|
||||||
debug!("Interned type: {:?} Pointer: {:?}",
|
debug!("Interned type: {:?} Pointer: {:?}",
|
||||||
ty, ty as *const _);
|
ty, ty as *const _);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue