1
Fork 0

Rollup merge of #32741 - tbu-:pr_remove_fixme_12808, r=bluss

Remove strange names created by lack of privacy-conscious name lookup

The fixed issue that allowed this was #12808.
This commit is contained in:
Manish Goregaokar 2016-04-07 23:26:19 +05:30
commit 1d59b91ed4
3 changed files with 74 additions and 86 deletions

View file

@ -124,9 +124,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
#[unsafe_no_drop_flag] #[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> { pub struct Arc<T: ?Sized> {
// FIXME #12808: strange name to try to avoid interfering with ptr: Shared<ArcInner<T>>,
// field accesses of the contained type via Deref
_ptr: Shared<ArcInner<T>>,
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -144,9 +142,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
#[unsafe_no_drop_flag] #[unsafe_no_drop_flag]
#[stable(feature = "arc_weak", since = "1.4.0")] #[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> { pub struct Weak<T: ?Sized> {
// FIXME #12808: strange name to try to avoid interfering with ptr: Shared<ArcInner<T>>,
// field accesses of the contained type via Deref
_ptr: Shared<ArcInner<T>>,
} }
#[stable(feature = "arc_weak", since = "1.4.0")] #[stable(feature = "arc_weak", since = "1.4.0")]
@ -198,7 +194,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1),
data: data, data: data,
}; };
Arc { _ptr: unsafe { Shared::new(Box::into_raw(x)) } } Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
} }
/// Unwraps the contained value if the `Arc<T>` has exactly one strong reference. /// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
@ -230,11 +226,11 @@ impl<T> Arc<T> {
atomic::fence(Acquire); atomic::fence(Acquire);
unsafe { unsafe {
let ptr = *this._ptr; let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data); let elem = ptr::read(&(*ptr).data);
// Make a weak pointer to clean up the implicit strong-weak reference // Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { _ptr: this._ptr }; let _weak = Weak { ptr: this.ptr };
mem::forget(this); mem::forget(this);
Ok(elem) Ok(elem)
@ -275,7 +271,7 @@ impl<T: ?Sized> Arc<T> {
// synchronize with the write coming from `is_unique`, so that the // synchronize with the write coming from `is_unique`, so that the
// events prior to that write happen before this read. // events prior to that write happen before this read.
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
Ok(_) => return Weak { _ptr: this._ptr }, Ok(_) => return Weak { ptr: this.ptr },
Err(old) => cur = old, Err(old) => cur = old,
} }
} }
@ -304,13 +300,13 @@ impl<T: ?Sized> Arc<T> {
// `ArcInner` structure itself is `Sync` because the inner data is // `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these // `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents. // contents.
unsafe { &**self._ptr } unsafe { &**self.ptr }
} }
// Non-inlined part of `drop`. // Non-inlined part of `drop`.
#[inline(never)] #[inline(never)]
unsafe fn drop_slow(&mut self) { unsafe fn drop_slow(&mut self) {
let ptr = *self._ptr; let ptr = *self.ptr;
// Destroy the data at this time, even though we may not free the box // Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around). // allocation itself (there may still be weak pointers lying around).
@ -368,7 +364,7 @@ impl<T: ?Sized> Clone for Arc<T> {
} }
} }
Arc { _ptr: self._ptr } Arc { ptr: self.ptr }
} }
} }
@ -436,7 +432,7 @@ impl<T: Clone> Arc<T> {
// Materialize our own implicit weak pointer, so that it can clean // Materialize our own implicit weak pointer, so that it can clean
// up the ArcInner as needed. // up the ArcInner as needed.
let weak = Weak { _ptr: this._ptr }; let weak = Weak { ptr: this.ptr };
// mark the data itself as already deallocated // mark the data itself as already deallocated
unsafe { unsafe {
@ -444,7 +440,7 @@ impl<T: Clone> Arc<T> {
// here (due to zeroing) because data is no longer accessed by // here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this // other threads (due to there being no more strong refs at this
// point). // point).
let mut swap = Arc::new(ptr::read(&(**weak._ptr).data)); let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
mem::swap(this, &mut swap); mem::swap(this, &mut swap);
mem::forget(swap); mem::forget(swap);
} }
@ -457,7 +453,7 @@ impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was // As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents. // either unique to begin with, or became one upon cloning the contents.
unsafe { unsafe {
let inner = &mut **this._ptr; let inner = &mut **this.ptr;
&mut inner.data &mut inner.data
} }
} }
@ -489,7 +485,7 @@ impl<T: ?Sized> Arc<T> {
// the Arc itself to be `mut`, so we're returning the only possible // the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data. // reference to the inner data.
unsafe { unsafe {
let inner = &mut **this._ptr; let inner = &mut **this.ptr;
Some(&mut inner.data) Some(&mut inner.data)
} }
} else { } else {
@ -558,7 +554,7 @@ impl<T: ?Sized> Drop for Arc<T> {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run // This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if // more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once) // it's run more than once)
let thin = *self._ptr as *const (); let thin = *self.ptr as *const ();
if thin as usize == mem::POST_DROP_USIZE { if thin as usize == mem::POST_DROP_USIZE {
return; return;
@ -639,7 +635,7 @@ impl<T: ?Sized> Weak<T> {
// Relaxed is valid for the same reason it is on Arc's Clone impl // Relaxed is valid for the same reason it is on Arc's Clone impl
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) { match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
Ok(_) => return Some(Arc { _ptr: self._ptr }), Ok(_) => return Some(Arc { ptr: self.ptr }),
Err(old) => n = old, Err(old) => n = old,
} }
} }
@ -648,7 +644,7 @@ impl<T: ?Sized> Weak<T> {
#[inline] #[inline]
fn inner(&self) -> &ArcInner<T> { fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe" // See comments above for why this is "safe"
unsafe { &**self._ptr } unsafe { &**self.ptr }
} }
} }
@ -682,7 +678,7 @@ impl<T: ?Sized> Clone for Weak<T> {
} }
} }
return Weak { _ptr: self._ptr }; return Weak { ptr: self.ptr };
} }
} }
@ -714,7 +710,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// } // implicit drop /// } // implicit drop
/// ``` /// ```
fn drop(&mut self) { fn drop(&mut self) {
let ptr = *self._ptr; let ptr = *self.ptr;
let thin = ptr as *const (); let thin = ptr as *const ();
// see comments above for why this check is here // see comments above for why this check is here
@ -886,7 +882,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> { impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self._ptr, f) fmt::Pointer::fmt(&*self.ptr, f)
} }
} }
@ -931,7 +927,7 @@ impl<T> Weak<T> {
issue = "30425")] issue = "30425")]
pub fn new() -> Weak<T> { pub fn new() -> Weak<T> {
unsafe { unsafe {
Weak { _ptr: Shared::new(Box::into_raw(box ArcInner { Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
strong: atomic::AtomicUsize::new(0), strong: atomic::AtomicUsize::new(0),
weak: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1),
data: uninitialized(), data: uninitialized(),

View file

@ -184,9 +184,7 @@ struct RcBox<T: ?Sized> {
#[unsafe_no_drop_flag] #[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> { pub struct Rc<T: ?Sized> {
// FIXME #12808: strange names to try to avoid interfering with field ptr: Shared<RcBox<T>>,
// accesses of the contained type via Deref
_ptr: Shared<RcBox<T>>,
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -215,7 +213,7 @@ impl<T> Rc<T> {
// pointers, which ensures that the weak destructor never frees // pointers, which ensures that the weak destructor never frees
// the allocation while the strong destructor is running, even // the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one. // if the weak pointer is stored inside the strong one.
_ptr: Shared::new(Box::into_raw(box RcBox { ptr: Shared::new(Box::into_raw(box RcBox {
strong: Cell::new(1), strong: Cell::new(1),
weak: Cell::new(1), weak: Cell::new(1),
value: value, value: value,
@ -254,7 +252,7 @@ impl<T> Rc<T> {
// pointer while also handling drop logic by just crafting a // pointer while also handling drop logic by just crafting a
// fake Weak. // fake Weak.
this.dec_strong(); this.dec_strong();
let _weak = Weak { _ptr: this._ptr }; let _weak = Weak { ptr: this.ptr };
forget(this); forget(this);
Ok(val) Ok(val)
} }
@ -287,7 +285,7 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_weak", since = "1.4.0")] #[stable(feature = "rc_weak", since = "1.4.0")]
pub fn downgrade(this: &Self) -> Weak<T> { pub fn downgrade(this: &Self) -> Weak<T> {
this.inc_weak(); this.inc_weak();
Weak { _ptr: this._ptr } Weak { ptr: this.ptr }
} }
/// Get the number of weak references to this value. /// Get the number of weak references to this value.
@ -348,7 +346,7 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")] #[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> { pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) { if Rc::is_unique(this) {
let inner = unsafe { &mut **this._ptr }; let inner = unsafe { &mut **this.ptr };
Some(&mut inner.value) Some(&mut inner.value)
} else { } else {
None None
@ -390,7 +388,7 @@ impl<T: Clone> Rc<T> {
} else if Rc::weak_count(this) != 0 { } else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks // Can just steal the data, all that's left is Weaks
unsafe { unsafe {
let mut swap = Rc::new(ptr::read(&(**this._ptr).value)); let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
mem::swap(this, &mut swap); mem::swap(this, &mut swap);
swap.dec_strong(); swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake // Remove implicit strong-weak ref (no need to craft a fake
@ -404,7 +402,7 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required // reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible // the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value. // reference to the inner value.
let inner = unsafe { &mut **this._ptr }; let inner = unsafe { &mut **this.ptr };
&mut inner.value &mut inner.value
} }
} }
@ -449,7 +447,7 @@ impl<T: ?Sized> Drop for Rc<T> {
#[unsafe_destructor_blind_to_params] #[unsafe_destructor_blind_to_params]
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
let ptr = *self._ptr; let ptr = *self.ptr;
let thin = ptr as *const (); let thin = ptr as *const ();
if thin as usize != mem::POST_DROP_USIZE { if thin as usize != mem::POST_DROP_USIZE {
@ -490,7 +488,7 @@ impl<T: ?Sized> Clone for Rc<T> {
#[inline] #[inline]
fn clone(&self) -> Rc<T> { fn clone(&self) -> Rc<T> {
self.inc_strong(); self.inc_strong();
Rc { _ptr: self._ptr } Rc { ptr: self.ptr }
} }
} }
@ -691,7 +689,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Rc<T> { impl<T: ?Sized> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self._ptr, f) fmt::Pointer::fmt(&*self.ptr, f)
} }
} }
@ -711,9 +709,7 @@ impl<T> From<T> for Rc<T> {
#[unsafe_no_drop_flag] #[unsafe_no_drop_flag]
#[stable(feature = "rc_weak", since = "1.4.0")] #[stable(feature = "rc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> { pub struct Weak<T: ?Sized> {
// FIXME #12808: strange names to try to avoid interfering with ptr: Shared<RcBox<T>>,
// field accesses of the contained type via Deref
_ptr: Shared<RcBox<T>>,
} }
#[stable(feature = "rc_weak", since = "1.4.0")] #[stable(feature = "rc_weak", since = "1.4.0")]
@ -749,7 +745,7 @@ impl<T: ?Sized> Weak<T> {
None None
} else { } else {
self.inc_strong(); self.inc_strong();
Some(Rc { _ptr: self._ptr }) Some(Rc { ptr: self.ptr })
} }
} }
} }
@ -783,7 +779,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ``` /// ```
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
let ptr = *self._ptr; let ptr = *self.ptr;
let thin = ptr as *const (); let thin = ptr as *const ();
if thin as usize != mem::POST_DROP_USIZE { if thin as usize != mem::POST_DROP_USIZE {
@ -816,7 +812,7 @@ impl<T: ?Sized> Clone for Weak<T> {
#[inline] #[inline]
fn clone(&self) -> Weak<T> { fn clone(&self) -> Weak<T> {
self.inc_weak(); self.inc_weak();
Weak { _ptr: self._ptr } Weak { ptr: self.ptr }
} }
} }
@ -848,7 +844,7 @@ impl<T> Weak<T> {
pub fn new() -> Weak<T> { pub fn new() -> Weak<T> {
unsafe { unsafe {
Weak { Weak {
_ptr: Shared::new(Box::into_raw(box RcBox { ptr: Shared::new(Box::into_raw(box RcBox {
strong: Cell::new(0), strong: Cell::new(0),
weak: Cell::new(1), weak: Cell::new(1),
value: uninitialized(), value: uninitialized(),
@ -910,8 +906,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
// the contract anyway. // the contract anyway.
// This allows the null check to be elided in the destructor if we // This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function. // manipulated the reference count in the same function.
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null()); assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self._ptr) &(**self.ptr)
} }
} }
} }
@ -924,8 +920,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
// the contract anyway. // the contract anyway.
// This allows the null check to be elided in the destructor if we // This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function. // manipulated the reference count in the same function.
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null()); assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self._ptr) &(**self.ptr)
} }
} }
} }

View file

@ -390,8 +390,8 @@ impl<T: ?Sized> RefCell<T> {
pub fn borrow(&self) -> Ref<T> { pub fn borrow(&self) -> Ref<T> {
match BorrowRef::new(&self.borrow) { match BorrowRef::new(&self.borrow) {
Some(b) => Ref { Some(b) => Ref {
_value: unsafe { &*self.value.get() }, value: unsafe { &*self.value.get() },
_borrow: b, borrow: b,
}, },
None => panic!("RefCell<T> already mutably borrowed"), None => panic!("RefCell<T> already mutably borrowed"),
} }
@ -438,8 +438,8 @@ impl<T: ?Sized> RefCell<T> {
pub fn borrow_mut(&self) -> RefMut<T> { pub fn borrow_mut(&self) -> RefMut<T> {
match BorrowRefMut::new(&self.borrow) { match BorrowRefMut::new(&self.borrow) {
Some(b) => RefMut { Some(b) => RefMut {
_value: unsafe { &mut *self.value.get() }, value: unsafe { &mut *self.value.get() },
_borrow: b, borrow: b,
}, },
None => panic!("RefCell<T> already borrowed"), None => panic!("RefCell<T> already borrowed"),
} }
@ -491,7 +491,7 @@ impl<T: ?Sized + PartialEq> PartialEq for RefCell<T> {
impl<T: ?Sized + Eq> Eq for RefCell<T> {} impl<T: ?Sized + Eq> Eq for RefCell<T> {}
struct BorrowRef<'b> { struct BorrowRef<'b> {
_borrow: &'b Cell<BorrowFlag>, borrow: &'b Cell<BorrowFlag>,
} }
impl<'b> BorrowRef<'b> { impl<'b> BorrowRef<'b> {
@ -501,7 +501,7 @@ impl<'b> BorrowRef<'b> {
WRITING => None, WRITING => None,
b => { b => {
borrow.set(b + 1); borrow.set(b + 1);
Some(BorrowRef { _borrow: borrow }) Some(BorrowRef { borrow: borrow })
}, },
} }
} }
@ -510,9 +510,9 @@ impl<'b> BorrowRef<'b> {
impl<'b> Drop for BorrowRef<'b> { impl<'b> Drop for BorrowRef<'b> {
#[inline] #[inline]
fn drop(&mut self) { fn drop(&mut self) {
let borrow = self._borrow.get(); let borrow = self.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED); debug_assert!(borrow != WRITING && borrow != UNUSED);
self._borrow.set(borrow - 1); self.borrow.set(borrow - 1);
} }
} }
@ -521,10 +521,10 @@ impl<'b> Clone for BorrowRef<'b> {
fn clone(&self) -> BorrowRef<'b> { fn clone(&self) -> BorrowRef<'b> {
// Since this Ref exists, we know the borrow flag // Since this Ref exists, we know the borrow flag
// is not set to WRITING. // is not set to WRITING.
let borrow = self._borrow.get(); let borrow = self.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED); debug_assert!(borrow != WRITING && borrow != UNUSED);
self._borrow.set(borrow + 1); self.borrow.set(borrow + 1);
BorrowRef { _borrow: self._borrow } BorrowRef { borrow: self.borrow }
} }
} }
@ -534,10 +534,8 @@ impl<'b> Clone for BorrowRef<'b> {
/// See the [module-level documentation](index.html) for more. /// See the [module-level documentation](index.html) for more.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Ref<'b, T: ?Sized + 'b> { pub struct Ref<'b, T: ?Sized + 'b> {
// FIXME #12808: strange name to try to avoid interfering with value: &'b T,
// field accesses of the contained type via Deref borrow: BorrowRef<'b>,
_value: &'b T,
_borrow: BorrowRef<'b>,
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -546,7 +544,7 @@ impl<'b, T: ?Sized> Deref for Ref<'b, T> {
#[inline] #[inline]
fn deref(&self) -> &T { fn deref(&self) -> &T {
self._value self.value
} }
} }
@ -565,8 +563,8 @@ impl<'b, T: ?Sized> Ref<'b, T> {
#[inline] #[inline]
pub fn clone(orig: &Ref<'b, T>) -> Ref<'b, T> { pub fn clone(orig: &Ref<'b, T>) -> Ref<'b, T> {
Ref { Ref {
_value: orig._value, value: orig.value,
_borrow: orig._borrow.clone(), borrow: orig.borrow.clone(),
} }
} }
@ -594,8 +592,8 @@ impl<'b, T: ?Sized> Ref<'b, T> {
where F: FnOnce(&T) -> &U where F: FnOnce(&T) -> &U
{ {
Ref { Ref {
_value: f(orig._value), value: f(orig.value),
_borrow: orig._borrow, borrow: orig.borrow,
} }
} }
@ -627,9 +625,9 @@ impl<'b, T: ?Sized> Ref<'b, T> {
pub fn filter_map<U: ?Sized, F>(orig: Ref<'b, T>, f: F) -> Option<Ref<'b, U>> pub fn filter_map<U: ?Sized, F>(orig: Ref<'b, T>, f: F) -> Option<Ref<'b, U>>
where F: FnOnce(&T) -> Option<&U> where F: FnOnce(&T) -> Option<&U>
{ {
f(orig._value).map(move |new| Ref { f(orig.value).map(move |new| Ref {
_value: new, value: new,
_borrow: orig._borrow, borrow: orig.borrow,
}) })
} }
} }
@ -667,8 +665,8 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
where F: FnOnce(&mut T) -> &mut U where F: FnOnce(&mut T) -> &mut U
{ {
RefMut { RefMut {
_value: f(orig._value), value: f(orig.value),
_borrow: orig._borrow, borrow: orig.borrow,
} }
} }
@ -706,24 +704,24 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
pub fn filter_map<U: ?Sized, F>(orig: RefMut<'b, T>, f: F) -> Option<RefMut<'b, U>> pub fn filter_map<U: ?Sized, F>(orig: RefMut<'b, T>, f: F) -> Option<RefMut<'b, U>>
where F: FnOnce(&mut T) -> Option<&mut U> where F: FnOnce(&mut T) -> Option<&mut U>
{ {
let RefMut { _value, _borrow } = orig; let RefMut { value, borrow } = orig;
f(_value).map(move |new| RefMut { f(value).map(move |new| RefMut {
_value: new, value: new,
_borrow: _borrow, borrow: borrow,
}) })
} }
} }
struct BorrowRefMut<'b> { struct BorrowRefMut<'b> {
_borrow: &'b Cell<BorrowFlag>, borrow: &'b Cell<BorrowFlag>,
} }
impl<'b> Drop for BorrowRefMut<'b> { impl<'b> Drop for BorrowRefMut<'b> {
#[inline] #[inline]
fn drop(&mut self) { fn drop(&mut self) {
let borrow = self._borrow.get(); let borrow = self.borrow.get();
debug_assert!(borrow == WRITING); debug_assert!(borrow == WRITING);
self._borrow.set(UNUSED); self.borrow.set(UNUSED);
} }
} }
@ -733,7 +731,7 @@ impl<'b> BorrowRefMut<'b> {
match borrow.get() { match borrow.get() {
UNUSED => { UNUSED => {
borrow.set(WRITING); borrow.set(WRITING);
Some(BorrowRefMut { _borrow: borrow }) Some(BorrowRefMut { borrow: borrow })
}, },
_ => None, _ => None,
} }
@ -745,10 +743,8 @@ impl<'b> BorrowRefMut<'b> {
/// See the [module-level documentation](index.html) for more. /// See the [module-level documentation](index.html) for more.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct RefMut<'b, T: ?Sized + 'b> { pub struct RefMut<'b, T: ?Sized + 'b> {
// FIXME #12808: strange name to try to avoid interfering with value: &'b mut T,
// field accesses of the contained type via Deref borrow: BorrowRefMut<'b>,
_value: &'b mut T,
_borrow: BorrowRefMut<'b>,
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -757,7 +753,7 @@ impl<'b, T: ?Sized> Deref for RefMut<'b, T> {
#[inline] #[inline]
fn deref(&self) -> &T { fn deref(&self) -> &T {
self._value self.value
} }
} }
@ -765,7 +761,7 @@ impl<'b, T: ?Sized> Deref for RefMut<'b, T> {
impl<'b, T: ?Sized> DerefMut for RefMut<'b, T> { impl<'b, T: ?Sized> DerefMut for RefMut<'b, T> {
#[inline] #[inline]
fn deref_mut(&mut self) -> &mut T { fn deref_mut(&mut self) -> &mut T {
self._value self.value
} }
} }