Add interfaces for better integration with Foundation.

main
David Chisnall 8 years ago
parent bdc20a8bab
commit 458bd3c7a2

154
arc.m

@ -175,6 +175,45 @@ static const long weak_mask = ((size_t)1)<<((sizeof(size_t)*8)-1);
*/ */
static const long refcount_mask = ~weak_mask; static const long refcount_mask = ~weak_mask;
id objc_retain_fast_np(id obj)
{
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
uintptr_t newVal = refCountVal;
do {
refCountVal = newVal;
long realCount = refCountVal & refcount_mask;
// If this object's reference count is already less than 0, then
// this is a spurious retain. This can happen when one thread is
// attempting to acquire a strong reference from a weak reference
// and the other thread is attempting to destroy it. The
// deallocating thread will decrement the reference count with no
// locks held and will then acquire the weak ref table lock and
// attempt to zero the weak references. The caller of this will be
// `objc_loadWeakRetained`, which will also hold the lock. If the
// serialisation is such that the locked retain happens after the
// decrement, then we return nil here so that the weak-to-strong
// transition doesn't happen and the object is actually destroyed.
// If the serialisation happens the other way, then the locked
// check of the reference count will happen after we've referenced
// this and we don't zero the references or deallocate.
if (realCount < 0)
{
return nil;
}
// If the reference count is saturated, don't increment it.
if (realCount == refcount_mask)
{
return obj;
}
realCount++;
realCount |= refCountVal & weak_mask;
uintptr_t updated = (uintptr_t)realCount;
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
return obj;
}
static inline id retain(id obj) static inline id retain(id obj)
{ {
if (isSmallObject(obj)) { return obj; } if (isSmallObject(obj)) { return obj; }
@ -186,43 +225,55 @@ static inline id retain(id obj)
} }
if (objc_test_class_flag(cls, objc_class_flag_fast_arc)) if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{ {
uintptr_t *refCount = ((uintptr_t*)obj) - 1; return objc_retain_fast_np(obj);
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0); }
uintptr_t newVal = refCountVal; return [obj retain];
do { }
refCountVal = newVal;
long realCount = refCountVal & refcount_mask; BOOL objc_release_fast_no_destroy_np(id obj)
// If this object's reference count is already less than 0, then {
// this is a spurious retain. This can happen when one thread is uintptr_t *refCount = ((uintptr_t*)obj) - 1;
// attempting to acquire a strong reference from a weak reference uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
// and the other thread is attempting to destroy it. The uintptr_t newVal = refCountVal;
// deallocating thread will decrement the reference count with no bool isWeak;
// locks held and will then acquire the weak ref table lock and bool shouldFree;
// attempt to zero the weak references. The caller of this will be do {
// `objc_loadWeakRetained`, which will also hold the lock. If the refCountVal = newVal;
// serialisation is such that the locked retain happens after the size_t realCount = refCountVal & refcount_mask;
// decrement, then we return nil here so that the weak-to-strong // If the reference count is saturated, don't decrement it.
// transition doesn't happen and the object is actually destroyed. if (realCount == refcount_mask)
// If the serialisation happens the other way, then the locked {
// check of the reference count will happen after we've referenced return NO;
// this and we don't zero the references or deallocate. }
if (realCount < 0) realCount--;
{ isWeak = (refCountVal & weak_mask) == weak_mask;
return nil; shouldFree = realCount == -1;
} realCount |= refCountVal & weak_mask;
// If the reference count is saturated, don't increment it. uintptr_t updated = (uintptr_t)realCount;
if (realCount == refcount_mask) newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
// We allow refcounts to run into the negative, but should only
// deallocate once.
if (shouldFree)
{
if (isWeak)
{
if (!objc_delete_weak_refs(obj))
{ {
return obj; return NO;
} }
realCount++; }
realCount |= refCountVal & weak_mask; return YES;
uintptr_t updated = (uintptr_t)realCount; }
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated); return NO;
} while (newVal != refCountVal); }
return obj;
void objc_release_fast_np(id obj)
{
if (objc_release_fast_no_destroy_np(obj))
{
[obj dealloc];
} }
return [obj retain];
} }
static inline void release(id obj) static inline void release(id obj)
@ -241,39 +292,7 @@ static inline void release(id obj)
} }
if (objc_test_class_flag(cls, objc_class_flag_fast_arc)) if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{ {
uintptr_t *refCount = ((uintptr_t*)obj) - 1; objc_release_fast_np(obj);
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
uintptr_t newVal = refCountVal;
bool isWeak;
bool shouldFree;
do {
refCountVal = newVal;
size_t realCount = refCountVal & refcount_mask;
// If the reference count is saturated, don't decrement it.
if (realCount == refcount_mask)
{
return;
}
realCount--;
isWeak = (refCountVal & weak_mask) == weak_mask;
shouldFree = realCount == -1;
realCount |= refCountVal & weak_mask;
uintptr_t updated = (uintptr_t)realCount;
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
// We allow refcounts to run into the negative, but should only
// deallocate once.
if (shouldFree)
{
if (isWeak)
{
if (!objc_delete_weak_refs(obj))
{
return;
}
}
[obj dealloc];
}
return; return;
} }
[obj release]; [obj release];
@ -375,7 +394,6 @@ unsigned long objc_arc_autorelease_count_for_object_np(id obj)
return count; return count;
} }
void *objc_autoreleasePoolPush(void) void *objc_autoreleasePoolPush(void)
{ {
initAutorelease(); initAutorelease();

@ -32,6 +32,12 @@ id objc_loadWeakRetained(id* obj);
* Retains the argument. Equivalent to [obj retain]. * Retains the argument. Equivalent to [obj retain].
*/ */
id objc_retain(id obj); id objc_retain(id obj);
/**
* Retains the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime.
* This is intended to implement `-retain` in ARC-compatible root classes.
*/
id objc_retain_fast_np(id obj) OBJC_NONPORTABLE;
/** /**
* Retains and autoreleases an object. Equivalent to [[obj retain] autorelease]. * Retains and autoreleases an object. Equivalent to [[obj retain] autorelease].
*/ */
@ -85,6 +91,26 @@ void objc_destroyWeak(id* addr);
* Equivalent to objc_copyWeak(), but may also set src to nil. * Equivalent to objc_copyWeak(), but may also set src to nil.
*/ */
void objc_moveWeak(id *dest, id *src); void objc_moveWeak(id *dest, id *src);
/**
* Releases the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime. If the retain count reaches
* zero then all weak references will be zeroed and the object will be
* destroyed.
*
* This is intended to implement `-release` in ARC-compatible root
* classes.
*/
void objc_release_fast_np(id obj) OBJC_NONPORTABLE;
/**
* Releases the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime. If the retain count reaches
* zero then all weak references will be zeroed but the object will *NOT* be
* destroyed.
*
* This is intended to implement `NSDecrementExtraRefCountWasZero` for use with
* ARC-compatible classes.
*/
BOOL objc_release_fast_no_destroy_np(id obj) OBJC_NONPORTABLE;
/** /**
* Releases an object. Equivalent to [obj release]. * Releases an object. Equivalent to [obj release].
*/ */

Loading…
Cancel
Save