remove reference counting headers from ~

Unique pointers and vectors currently contain a reference counting
header when containing a managed pointer.

This `{ ref_count, type_desc, prev, next }` header is not necessary and
not a sensible foundation for tracing. It adds needless complexity to
library code and is responsible for breakage in places where the branch
has been left out.

The `borrow_offset` field can now be removed from `TyDesc` along with
the associated handling in the compiler.

Closes #9510
Closes #11533
This commit is contained in:
Daniel Micay 2014-01-14 02:46:58 -05:00
parent 9075025c7b
commit 0e885e42b1
22 changed files with 152 additions and 148 deletions

View file

@ -1584,14 +1584,9 @@ fn compile_submatch_continue<'r,
} }
if any_uniq_pat(m, col) { if any_uniq_pat(m, col) {
let pat_ty = node_id_type(bcx, pat_id);
let llbox = Load(bcx, val); let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty {
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val), compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
vec::append(~[unboxed], vals_left), chk); vec::append(~[llbox], vals_left), chk);
return; return;
} }
@ -2231,13 +2226,8 @@ fn bind_irrefutable_pat<'a>(
} }
} }
ast::PatUniq(inner) => { ast::PatUniq(inner) => {
let pat_ty = node_id_type(bcx, pat.id);
let llbox = Load(bcx, val); let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty { bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode);
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
bcx = bind_irrefutable_pat(bcx, inner, unboxed, binding_mode);
} }
ast::PatRegion(inner) => { ast::PatRegion(inner) => {
let loaded_val = Load(bcx, val); let loaded_val = Load(bcx, val);

View file

@ -358,7 +358,7 @@ pub fn malloc_raw_dyn<'a>(
} else { } else {
// we treat ~fn, @fn and @[] as @ here, which isn't ideal // we treat ~fn, @fn and @[] as @ here, which isn't ideal
let langcall = match heap { let langcall = match heap {
heap_managed | heap_managed_unique => { heap_managed => {
require_alloc_fn(bcx, t, MallocFnLangItem) require_alloc_fn(bcx, t, MallocFnLangItem)
} }
heap_exchange_closure => { heap_exchange_closure => {
@ -382,9 +382,7 @@ pub fn malloc_raw_dyn<'a>(
langcall, langcall,
[tydesc, size], [tydesc, size],
None); None);
let r = rslt(r.bcx, PointerCast(r.bcx, r.val, llty)); rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
maybe_set_managed_unique_rc(r.bcx, r.val, heap);
r
} }
} }
@ -431,27 +429,6 @@ pub fn malloc_general<'a>(bcx: &'a Block, t: ty::t, heap: heap)
malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty)) malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty))
} }
pub fn heap_for_unique(bcx: &Block, t: ty::t) -> heap {
if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange
}
}
pub fn maybe_set_managed_unique_rc(bcx: &Block, bx: ValueRef, heap: heap) {
assert!(heap != heap_exchange);
if heap == heap_managed_unique {
// In cases where we are looking at a unique-typed allocation in the
// managed heap (thus have refcount 1 from the managed allocator),
// such as a ~(@foo) or such. These need to have their refcount forced
// to -2 so the annihilator ignores them.
let rc = GEPi(bcx, bx, [0u, abi::box_field_refcnt]);
let rc_val = C_int(bcx.ccx(), -2);
Store(bcx, rc_val, rc);
}
}
// Type descriptor and type glue stuff // Type descriptor and type glue stuff
pub fn get_tydesc_simple(ccx: &CrateContext, t: ty::t) -> ValueRef { pub fn get_tydesc_simple(ccx: &CrateContext, t: ty::t) -> ValueRef {

View file

@ -150,14 +150,6 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
return cdata_ty; return cdata_ty;
} }
fn heap_for_unique_closure(bcx: &Block, t: ty::t) -> heap {
if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange_closure
}
}
pub fn allocate_cbox<'a>( pub fn allocate_cbox<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
sigil: ast::Sigil, sigil: ast::Sigil,
@ -173,7 +165,7 @@ pub fn allocate_cbox<'a>(
tcx.sess.bug("trying to trans allocation of @fn") tcx.sess.bug("trying to trans allocation of @fn")
} }
ast::OwnedSigil => { ast::OwnedSigil => {
malloc_raw(bcx, cdata_ty, heap_for_unique_closure(bcx, cdata_ty)) malloc_raw(bcx, cdata_ty, heap_exchange_closure)
} }
ast::BorrowedSigil => { ast::BorrowedSigil => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty); let cbox_ty = tuplify_box_ty(tcx, cdata_ty);

View file

@ -316,7 +316,6 @@ pub fn warn_not_to_commit(ccx: &CrateContext, msg: &str) {
#[deriving(Eq)] #[deriving(Eq)]
pub enum heap { pub enum heap {
heap_managed, heap_managed,
heap_managed_unique,
heap_exchange, heap_exchange,
heap_exchange_closure heap_exchange_closure
} }
@ -498,7 +497,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: &Block, scope_id: Option<ast::NodeId>,
pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) { pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) {
let free_fn = match heap { let free_fn = match heap {
heap_managed | heap_managed_unique => { heap_managed => {
@GCHeapFreeingCleanupFunction { @GCHeapFreeingCleanupFunction {
ptr: ptr, ptr: ptr,
} as @CleanupFunction } as @CleanupFunction

View file

@ -570,11 +570,6 @@ impl Datum {
let (content_ty, header) = match ty::get(self.ty).sty { let (content_ty, header) = match ty::get(self.ty).sty {
ty::ty_box(typ) => (typ, true), ty::ty_box(typ) => (typ, true),
ty::ty_uniq(typ) => (typ, false), ty::ty_uniq(typ) => (typ, false),
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty);
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
(unboxed_vec_ty, true)
}
_ => { _ => {
bcx.tcx().sess.bug(format!( bcx.tcx().sess.bug(format!(
"box_body() invoked on non-box type {}", "box_body() invoked on non-box type {}",
@ -582,7 +577,7 @@ impl Datum {
} }
}; };
if !header && !ty::type_contents(bcx.tcx(), content_ty).owns_managed() { if !header {
let ptr = self.to_value_llval(bcx); let ptr = self.to_value_llval(bcx);
let ty = type_of::type_of(bcx.ccx(), content_ty); let ty = type_of::type_of(bcx.ccx(), content_ty);
let body = PointerCast(bcx, ptr, ty.ptr_to()); let body = PointerCast(bcx, ptr, ty.ptr_to());

View file

@ -2147,10 +2147,6 @@ fn type_metadata(cx: &CrateContext,
ty::vstore_fixed(len) => { ty::vstore_fixed(len) => {
fixed_vec_metadata(cx, mt.ty, len, usage_site_span) fixed_vec_metadata(cx, mt.ty, len, usage_site_span)
} }
ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).owns_managed() => {
let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, boxed_vec_metadata)
}
ty::vstore_uniq => { ty::vstore_uniq => {
let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span); let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, vec_metadata) pointer_type_metadata(cx, t, vec_metadata)
@ -2165,12 +2161,8 @@ fn type_metadata(cx: &CrateContext,
} }
}, },
ty::ty_uniq(typ) => { ty::ty_uniq(typ) => {
if ty::type_contents(cx.tcx, typ).owns_managed() { let pointee = type_metadata(cx, typ, usage_site_span);
create_pointer_to_box_metadata(cx, t, typ) pointer_type_metadata(cx, t, pointee)
} else {
let pointee = type_metadata(cx, typ, usage_site_span);
pointer_type_metadata(cx, t, pointee)
}
} }
ty::ty_ptr(ref mt) | ty::ty_rptr(_, ref mt) => { ty::ty_ptr(ref mt) | ty::ty_rptr(_, ref mt) => {
let pointee = type_metadata(cx, mt.ty, usage_site_span); let pointee = type_metadata(cx, mt.ty, usage_site_span);

View file

@ -608,8 +608,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
expr, contents); expr, contents);
} }
ast::ExprVstore(contents, ast::ExprVstoreUniq) => { ast::ExprVstore(contents, ast::ExprVstoreUniq) => {
let heap = heap_for_unique(bcx, expr_ty(bcx, contents)); return tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange,
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
expr, contents); expr, contents);
} }
ast::ExprBox(_, contents) => { ast::ExprBox(_, contents) => {
@ -617,7 +616,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
// `trans_rvalue_dps_unadjusted`.) // `trans_rvalue_dps_unadjusted`.)
let box_ty = expr_ty(bcx, expr); let box_ty = expr_ty(bcx, expr);
let contents_ty = expr_ty(bcx, contents); let contents_ty = expr_ty(bcx, contents);
let heap = heap_for_unique(bcx, contents_ty); let heap = heap_exchange;
return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap) return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap)
} }
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
@ -1461,8 +1460,7 @@ fn trans_unary_datum<'a>(
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_managed) trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_managed)
} }
ast::UnUniq => { ast::UnUniq => {
let heap = heap_for_unique(bcx, un_ty); trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_exchange)
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap)
} }
ast::UnDeref => { ast::UnDeref => {
bcx.sess().bug("deref expressions should have been \ bcx.sess().bug("deref expressions should have been \

View file

@ -303,11 +303,7 @@ pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
with_cond(bcx, not_null, |bcx| { with_cond(bcx, not_null, |bcx| {
let body_datum = box_datum.box_body(bcx); let body_datum = box_datum.box_body(bcx);
let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty); let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty);
if ty::type_contents(bcx.tcx(), t).owns_managed() { trans_exchange_free(bcx, box_datum.val)
trans_free(bcx, box_datum.val)
} else {
trans_exchange_free(bcx, box_datum.val)
}
}) })
} }
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) | ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) |
@ -552,7 +548,6 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
let has_header = match ty::get(t).sty { let has_header = match ty::get(t).sty {
ty::ty_box(..) => true, ty::ty_box(..) => true,
ty::ty_uniq(..) => ty::type_contents(ccx.tcx, t).owns_managed(),
_ => false _ => false
}; };

View file

@ -184,11 +184,7 @@ impl<'a> Reflector<'a> {
ty::ty_vec(ref mt, vst) => { ty::ty_vec(ref mt, vst) => {
let (name, extra) = self.vstore_name_and_extra(t, vst); let (name, extra) = self.vstore_name_and_extra(t, vst);
let extra = extra + self.c_mt(mt); let extra = extra + self.c_mt(mt);
if "uniq" == name && ty::type_contents(bcx.tcx(), t).owns_managed() { self.visit(~"evec_" + name, extra)
self.visit("evec_uniq_managed", extra)
} else {
self.visit(~"evec_" + name, extra)
}
} }
// Should remove mt from box and uniq. // Should remove mt from box and uniq.
ty::ty_box(typ) => { ty::ty_box(typ) => {
@ -203,11 +199,7 @@ impl<'a> Reflector<'a> {
ty: typ, ty: typ,
mutbl: ast::MutImmutable, mutbl: ast::MutImmutable,
}); });
if ty::type_contents(bcx.tcx(), t).owns_managed() { self.visit("uniq", extra)
self.visit("uniq_managed", extra)
} else {
self.visit("uniq", extra)
}
} }
ty::ty_ptr(ref mt) => { ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt); let extra = self.c_mt(mt);

View file

@ -64,7 +64,14 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef {
} }
pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef { pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef {
if ty::type_contents(bcx.tcx(), t).owns_managed() { let vt = vec_types(bcx, t);
let managed = match ty::get(vt.vec_ty).sty {
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true,
_ => false
};
if managed {
GEPi(bcx, vptr, [0u, abi::box_field_body]) GEPi(bcx, vptr, [0u, abi::box_field_body])
} else { } else {
vptr vptr
@ -106,7 +113,6 @@ pub fn alloc_raw<'a>(
base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize); base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize);
Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill])); Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]));
Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc])); Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]));
base::maybe_set_managed_unique_rc(bcx, bx, heap);
return rslt(bcx, bx); return rslt(bcx, bx);
} }
} }
@ -117,7 +123,7 @@ pub fn alloc_uniq_raw<'a>(
fill: ValueRef, fill: ValueRef,
alloc: ValueRef) alloc: ValueRef)
-> Result<'a> { -> Result<'a> {
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty)) alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange)
} }
pub fn alloc_vec<'a>( pub fn alloc_vec<'a>(
@ -350,7 +356,7 @@ pub fn trans_uniq_or_managed_vstore<'a>(
} }
} }
heap_exchange_closure => fail!("vectors use exchange_alloc"), heap_exchange_closure => fail!("vectors use exchange_alloc"),
heap_managed | heap_managed_unique => {} heap_managed => {}
} }
let vt = vec_types_from_expr(bcx, vstore_expr); let vt = vec_types_from_expr(bcx, vstore_expr);

View file

@ -269,10 +269,6 @@ impl Type {
Type::smart_ptr(ctx, &Type::opaque()) Type::smart_ptr(ctx, &Type::opaque())
} }
pub fn unique(ctx: &CrateContext, ty: &Type) -> Type {
Type::smart_ptr(ctx, ty)
}
pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type { pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type {
Type::opaque_box(cx).ptr_to() Type::opaque_box(cx).ptr_to()
} }
@ -281,7 +277,7 @@ impl Type {
let tydesc_ptr = ctx.tydesc_type.ptr_to(); let tydesc_ptr = ctx.tydesc_type.ptr_to();
let box_ty = match store { let box_ty = match store {
ty::BoxTraitStore => Type::opaque_box(ctx), ty::BoxTraitStore => Type::opaque_box(ctx),
ty::UniqTraitStore => Type::unique(ctx, &Type::i8()), ty::UniqTraitStore => Type::i8(),
ty::RegionTraitStore(..) => Type::i8() ty::RegionTraitStore(..) => Type::i8()
}; };
Type::struct_([tydesc_ptr, box_ty.ptr_to()], false) Type::struct_([tydesc_ptr, box_ty.ptr_to()], false)

View file

@ -245,21 +245,11 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
Type::smart_ptr(cx, &ty).ptr_to() Type::smart_ptr(cx, &ty).ptr_to()
} }
ty::ty_uniq(typ) => { ty::ty_uniq(typ) => {
let ty = type_of(cx, typ); type_of(cx, typ).ptr_to()
if ty::type_contents(cx.tcx, typ).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
}
} }
ty::ty_vec(ref mt, ty::vstore_uniq) => { ty::ty_vec(ref mt, ty::vstore_uniq) => {
let ty = type_of(cx, mt.ty); let ty = type_of(cx, mt.ty);
let ty = Type::vec(cx.sess.targ_cfg.arch, &ty); Type::vec(cx.sess.targ_cfg.arch, &ty).ptr_to()
if ty::type_contents(cx.tcx, mt.ty).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
}
} }
ty::ty_unboxed_vec(ref mt) => { ty::ty_unboxed_vec(ref mt) => {
let ty = type_of(cx, mt.ty); let ty = type_of(cx, mt.ty);

View file

@ -230,4 +230,12 @@ mod tests {
drop(x); drop(x);
assert!(y.upgrade().is_none()); assert!(y.upgrade().is_none());
} }
#[test]
fn gc_inside() {
// see issue #11532
use gc::Gc;
let a = Rc::new(RefCell::new(Gc::new(1)));
assert!(a.borrow().try_borrow_mut().is_some());
}
} }

View file

@ -227,6 +227,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
true true
} }
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>(); self.align_to::<~u8>();
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; } if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
@ -275,6 +276,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
true true
} }
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>(); self.align_to::<~[@u8]>();
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; } if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }

View file

@ -310,6 +310,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
}) })
} }
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool { fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.writer.write(['~' as u8]); self.writer.write(['~' as u8]);
self.get::<&raw::Box<()>>(|this, b| { self.get::<&raw::Box<()>>(|this, b| {
@ -358,6 +359,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
}) })
} }
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.get::<&raw::Box<raw::Vec<()>>>(|this, b| { self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
this.writer.write(['~' as u8]); this.writer.write(['~' as u8]);

View file

@ -146,6 +146,7 @@ pub trait TyVisitor {
fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
@ -154,6 +155,7 @@ pub trait TyVisitor {
fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint, fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,

View file

@ -116,14 +116,18 @@ use ptr::to_unsafe_ptr;
use ptr; use ptr;
use ptr::RawPtr; use ptr::RawPtr;
use rt::global_heap::{malloc_raw, realloc_raw, exchange_free}; use rt::global_heap::{malloc_raw, realloc_raw, exchange_free};
#[cfg(stage0)]
use rt::local_heap::local_free; use rt::local_heap::local_free;
use mem; use mem;
use mem::size_of; use mem::size_of;
use uint; use uint;
use unstable::finally::Finally; use unstable::finally::Finally;
use unstable::intrinsics; use unstable::intrinsics;
#[cfg(stage0)]
use unstable::intrinsics::{get_tydesc, owns_managed}; use unstable::intrinsics::{get_tydesc, owns_managed};
use unstable::raw::{Box, Repr, Slice, Vec}; use unstable::raw::{Repr, Slice, Vec};
#[cfg(stage0)]
use unstable::raw::Box;
use util; use util;
/** /**
@ -178,6 +182,7 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
/// Creates a new vector with a capacity of `capacity` /// Creates a new vector with a capacity of `capacity`
#[inline] #[inline]
#[cfg(stage0)]
pub fn with_capacity<T>(capacity: uint) -> ~[T] { pub fn with_capacity<T>(capacity: uint) -> ~[T] {
unsafe { unsafe {
if owns_managed::<T>() { if owns_managed::<T>() {
@ -198,6 +203,23 @@ pub fn with_capacity<T>(capacity: uint) -> ~[T] {
} }
} }
/// Creates a new vector with a capacity of `capacity`
#[inline]
#[cfg(not(stage0))]
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
unsafe {
let alloc = capacity * mem::nonzero_size_of::<T>();
let size = alloc + mem::size_of::<Vec<()>>();
if alloc / mem::nonzero_size_of::<T>() != capacity || size < alloc {
fail!("vector size is too large: {}", capacity);
}
let ptr = malloc_raw(size) as *mut Vec<()>;
(*ptr).alloc = alloc;
(*ptr).fill = 0;
cast::transmute(ptr)
}
}
/** /**
* Builds a vector by calling a provided function with an argument * Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector. * function that pushes an element to the back of a vector.
@ -1481,6 +1503,7 @@ impl<T> OwnedVector<T> for ~[T] {
self.move_iter().invert() self.move_iter().invert()
} }
#[cfg(stage0)]
fn reserve(&mut self, n: uint) { fn reserve(&mut self, n: uint) {
// Only make the (slow) call into the runtime if we have to // Only make the (slow) call into the runtime if we have to
if self.capacity() < n { if self.capacity() < n {
@ -1504,6 +1527,24 @@ impl<T> OwnedVector<T> for ~[T] {
} }
} }
#[cfg(not(stage0))]
fn reserve(&mut self, n: uint) {
// Only make the (slow) call into the runtime if we have to
if self.capacity() < n {
unsafe {
let ptr: *mut *mut Vec<()> = cast::transmute(self);
let alloc = n * mem::nonzero_size_of::<T>();
let size = alloc + mem::size_of::<Vec<()>>();
if alloc / mem::nonzero_size_of::<T>() != n || size < alloc {
fail!("vector size is too large: {}", n);
}
*ptr = realloc_raw(*ptr as *mut c_void, size)
as *mut Vec<()>;
(**ptr).alloc = alloc;
}
}
}
#[inline] #[inline]
fn reserve_at_least(&mut self, n: uint) { fn reserve_at_least(&mut self, n: uint) {
self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n)); self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n));
@ -1520,6 +1561,7 @@ impl<T> OwnedVector<T> for ~[T] {
} }
#[inline] #[inline]
#[cfg(stage0)]
fn capacity(&self) -> uint { fn capacity(&self) -> uint {
unsafe { unsafe {
if owns_managed::<T>() { if owns_managed::<T>() {
@ -1532,6 +1574,15 @@ impl<T> OwnedVector<T> for ~[T] {
} }
} }
#[inline]
#[cfg(not(stage0))]
fn capacity(&self) -> uint {
unsafe {
let repr: **Vec<()> = cast::transmute(self);
(**repr).alloc / mem::nonzero_size_of::<T>()
}
}
fn shrink_to_fit(&mut self) { fn shrink_to_fit(&mut self) {
unsafe { unsafe {
let ptr: *mut *mut Vec<()> = cast::transmute(self); let ptr: *mut *mut Vec<()> = cast::transmute(self);
@ -1543,6 +1594,7 @@ impl<T> OwnedVector<T> for ~[T] {
} }
#[inline] #[inline]
#[cfg(stage0)]
fn push(&mut self, t: T) { fn push(&mut self, t: T) {
unsafe { unsafe {
if owns_managed::<T>() { if owns_managed::<T>() {
@ -1583,7 +1635,31 @@ impl<T> OwnedVector<T> for ~[T] {
intrinsics::move_val_init(&mut(*p), t); intrinsics::move_val_init(&mut(*p), t);
} }
} }
}
#[inline]
#[cfg(not(stage0))]
fn push(&mut self, t: T) {
unsafe {
let repr: **Vec<()> = cast::transmute(&mut *self);
let fill = (**repr).fill;
if (**repr).alloc <= fill {
self.reserve_additional(1);
}
push_fast(self, t);
}
// This doesn't bother to make sure we have space.
#[inline] // really pretty please
unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
let repr: **mut Vec<u8> = cast::transmute(this);
let fill = (**repr).fill;
(**repr).fill += mem::nonzero_size_of::<T>();
let p = to_unsafe_ptr(&((**repr).data));
let p = ptr::offset(p, fill as int) as *mut T;
intrinsics::move_val_init(&mut(*p), t);
}
} }
#[inline] #[inline]
@ -1746,6 +1822,7 @@ impl<T> OwnedVector<T> for ~[T] {
} }
} }
#[inline] #[inline]
#[cfg(stage0)]
unsafe fn set_len(&mut self, new_len: uint) { unsafe fn set_len(&mut self, new_len: uint) {
if owns_managed::<T>() { if owns_managed::<T>() {
let repr: **mut Box<Vec<()>> = cast::transmute(self); let repr: **mut Box<Vec<()>> = cast::transmute(self);
@ -1755,6 +1832,13 @@ impl<T> OwnedVector<T> for ~[T] {
(**repr).fill = new_len * mem::nonzero_size_of::<T>(); (**repr).fill = new_len * mem::nonzero_size_of::<T>();
} }
} }
#[inline]
#[cfg(not(stage0))]
unsafe fn set_len(&mut self, new_len: uint) {
let repr: **mut Vec<()> = cast::transmute(self);
(**repr).fill = new_len * mem::nonzero_size_of::<T>();
}
} }
impl<T> Mutable for ~[T] { impl<T> Mutable for ~[T] {
@ -2926,6 +3010,7 @@ impl<T> DoubleEndedIterator<T> for MoveIterator<T> {
} }
#[unsafe_destructor] #[unsafe_destructor]
#[cfg(stage0)]
impl<T> Drop for MoveIterator<T> { impl<T> Drop for MoveIterator<T> {
fn drop(&mut self) { fn drop(&mut self) {
// destroy the remaining elements // destroy the remaining elements
@ -2940,6 +3025,18 @@ impl<T> Drop for MoveIterator<T> {
} }
} }
#[unsafe_destructor]
#[cfg(not(stage0))]
impl<T> Drop for MoveIterator<T> {
fn drop(&mut self) {
// destroy the remaining elements
for _x in *self {}
unsafe {
exchange_free(self.allocation as *u8 as *c_char)
}
}
}
/// An iterator that moves out of a vector in reverse order. /// An iterator that moves out of a vector in reverse order.
pub type MoveRevIterator<T> = Invert<MoveIterator<T>>; pub type MoveRevIterator<T> = Invert<MoveIterator<T>>;

View file

@ -17,16 +17,16 @@
// debugger:run // debugger:run
// debugger:finish // debugger:finish
// debugger:print unique->val.elements[0]->val // debugger:print unique->elements[0]->val
// check:$1 = 10 // check:$1 = 10
// debugger:print unique->val.elements[1]->val // debugger:print unique->elements[1]->val
// check:$2 = 11 // check:$2 = 11
// debugger:print unique->val.elements[2]->val // debugger:print unique->elements[2]->val
// check:$3 = 12 // check:$3 = 12
// debugger:print unique->val.elements[3]->val // debugger:print unique->elements[3]->val
// check:$4 = 13 // check:$4 = 13
#[allow(unused_variable)]; #[allow(unused_variable)];

View file

@ -21,28 +21,22 @@
// debugger:print *ordinary_unique // debugger:print *ordinary_unique
// check:$1 = {-1, -2} // check:$1 = {-1, -2}
// debugger:print managed_within_unique.val->x // debugger:print managed_within_unique->x
// check:$2 = -3 // check:$2 = -3
// debugger:print managed_within_unique.val->y->val // debugger:print managed_within_unique->y->val
// check:$3 = -4 // check:$3 = -4
#[allow(unused_variable)]; #[allow(unused_variable)];
struct ContainsManaged struct ContainsManaged {
{ x: int,
x: int, y: @int
y: @int
} }
fn main() { fn main() {
let ordinary_unique = ~(-1, -2);
let ordinary_unique = ~(-1, -2);
// This is a special case: Normally values allocated in the exchange heap are not boxed, unless,
// however, if they contain managed pointers.
// This test case verifies that both cases are handled correctly.
let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 }; let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 };
zzz(); zzz();

View file

@ -49,9 +49,9 @@
// debugger:print stack_managed.next.val->val.value // debugger:print stack_managed.next.val->val.value
// check:$12 = 11 // check:$12 = 11
// debugger:print unique_managed->val.value // debugger:print unique_managed->value
// check:$13 = 12 // check:$13 = 12
// debugger:print unique_managed->val.next.val->val.value // debugger:print unique_managed->next.val->val.value
// check:$14 = 13 // check:$14 = 13
// debugger:print box_managed->val.value // debugger:print box_managed->val.value

View file

@ -223,13 +223,6 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
true true
} }
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner().visit_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~u8>();
true
}
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool { fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<*u8>(); self.align_to::<*u8>();
if ! self.inner().visit_ptr(mtbl, inner) { return false; } if ! self.inner().visit_ptr(mtbl, inner) { return false; }
@ -275,13 +268,6 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
true true
} }
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>();
if ! self.inner().visit_evec_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~[@u8]>();
true
}
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool { fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<&'static [u8]>(); self.align_to::<&'static [u8]>();
if ! self.inner().visit_evec_slice(mtbl, inner) { return false; } if ! self.inner().visit_evec_slice(mtbl, inner) { return false; }
@ -549,7 +535,6 @@ impl TyVisitor for my_visitor {
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
@ -557,7 +542,6 @@ impl TyVisitor for my_visitor {
fn visit_unboxed_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_unboxed_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint, fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
_mtbl: uint, _inner: *TyDesc) -> bool { true } _mtbl: uint, _inner: *TyDesc) -> bool { true }

View file

@ -70,7 +70,6 @@ impl TyVisitor for MyVisitor {
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
@ -83,12 +82,6 @@ impl TyVisitor for MyVisitor {
self.types.push(~"]"); self.types.push(~"]");
true true
} }
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.types.push(~"[");
unsafe { visit_tydesc(inner, &mut *self as &mut TyVisitor) };
self.types.push(~"]");
true
}
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint, fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
_mtbl: uint, _inner: *TyDesc) -> bool { true } _mtbl: uint, _inner: *TyDesc) -> bool { true }