Merge branch 'master' into newabi

main
David Chisnall 8 years ago
commit 76e7d4b07f

@ -24,6 +24,26 @@ Highlights of this release include:
Thumb-2 code. This will also generate Thumb-2 message send functions,
improving instruction cache usage.
- Significant improvements to ARC, including
* The runtime no longer acquires a global lock on every object deallocation (a
global lock is still used for objects that have weak references). *NOTE:*
This is incompatible with other code directly inspecting the reference
count and will break with older versions of GNUstep Base!
* Weak references use a scheme closer to C++ `std::weak_pointer` and are
lazily zeroed on access. This reduces the space overheads for weak
references.
* Some additional helper functions are added for use in `NSObject` and other
root classes, which simplifies the layering between the runtime and the
Foundation (or equivalent) implementation.
- Improvements to how the runtime handles layout of ivars with strong alignment
requirements, which should fix issues relating to using vector types in
Objective-C objects.
You may obtain the code for this release from git and use the 1.x branch:
https://github.com/gnustep/libobjc2.git

@ -2,6 +2,8 @@ cmake_minimum_required(VERSION 2.8)
add_executable(test_cxx_runtime typeinfo_test.cc)
set(CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "")
target_link_libraries(test_cxx_runtime ${CXX_RUNTIME})
set_target_properties(test_cxx_runtime PROPERTIES
LINKER_LANGUAGE C)
if (CXX_RUNTIME)
target_link_libraries(test_cxx_runtime ${CXX_RUNTIME})
set_target_properties(test_cxx_runtime PROPERTIES
LINKER_LANGUAGE C)
endif()

@ -21,7 +21,6 @@ namespace std
bool operator==(const type_info &) const;
bool operator!=(const type_info &) const;
bool before(const type_info &) const;
type_info();
private:
type_info(const type_info& rhs);
type_info& operator= (const type_info& rhs);
@ -44,18 +43,17 @@ namespace std
class type_info2 : public std::type_info
{
public:
type_info2() : type_info("foo") {}
virtual bool __is_pointer_p() const;
virtual bool __is_function_p() const { return true; }
virtual bool __do_catch(const type_info *thrown_type,
void **thrown_object,
unsigned outer) const { return true; }
virtual bool __do_upcast(
const __class_type_info *target,
void **thrown_object) const { return true; }
};
bool type_info2::__is_pointer_p() const { return true; }
int main()
{
return 0;
type_info2 s;
return s.__is_pointer_p();
}

@ -162,18 +162,42 @@ set_source_files_properties(
# C++ Runtime interaction
#
function(test_cxx CXX_RUNTIME_NAME IS_STDLIB)
set(CXX_RUNTIME_NAME "${CMAKE_SHARED_LIBRARY_PREFIX}${CXX_RUNTIME_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}")
find_library(CXX_RUNTIME_LIB NAMES ${CXX_RUNTIME_NAME})
if (CXX_RUNTIME_LIB)
message(STATUS "Testing ${CXX_RUNTIME_LIB} as the C++ runtime library")
try_compile(USERUNTIME
"${CMAKE_BINARY_DIR}/CMake"
"${CMAKE_SOURCE_DIR}/CMake"
test_cxx_runtime
CMAKE_FLAGS "-DCXX_RUNTIME=${CXX_RUNTIME_LIB}")
if (USERUNTIME)
set(CXX_RUNTIME ${CXX_RUNTIME_LIB} PARENT_SCOPE)
endif()
endif()
endfunction()
set(ENABLE_OBJCXX true CACHE BOOL
"Enable support for Objective-C++")
set(FORCE_LIBOBJCXX false CACHE BOOL
"Force building a separate Objective-C++ runtime library")
set(CXXRT_IS_STDLIB false)
add_library(objc SHARED ${libobjc_C_SRCS} ${libobjc_ASM_SRCS} ${libobjc_OBJC_SRCS})
if (ENABLE_OBJCXX)
message(STATUS "Testing C++ interop")
# Try to find libcxxrt.so. We can link to this to provide the C++ ABI
# layer, if it exists.
find_library(CXX_RUNTIME NAMES libcxxrt.so)
test_cxx(cxxrt false)
# If it doesn't, then look for GNU libsupc++.so instead (either works,
# they're ABI compatible).
if (NOT CXX_RUNTIME)
find_library(CXX_RUNTIME NAMES libsupc++.so)
test_cxx(supc++ false)
endif (NOT CXX_RUNTIME)
if (NOT CXX_RUNTIME)
test_cxx(c++abi false)
endif (NOT CXX_RUNTIME)
# If we have a C++ ABI library, then we can produce a single libobjc that
@ -181,41 +205,34 @@ if (ENABLE_OBJCXX)
# a separate libobjcxx.
if (CXX_RUNTIME)
message(STATUS "Using ${CXX_RUNTIME} as the C++ runtime library")
try_compile( USERUNTIME
else()
message(STATUS "Testing C++ standard library")
try_compile(USERUNTIME
"${CMAKE_BINARY_DIR}/CMake"
"${CMAKE_SOURCE_DIR}/CMake"
test_cxx_runtime
CMAKE_FLAGS "-DCXX_RUNTIME=${CXX_RUNTIME}")
message(STATUS "Is runtime useable? ${USERUNTIME}")
if (${FORCE_LIBOBJCXX} OR NOT ${USERUNTIME})
message(STATUS "Forcing build of stand-alone libobjcxx")
add_library(objcxx SHARED ${libobjcxx_CXX_SRCS})
set_target_properties(objcxx PROPERTIES
LINKER_LANGUAGE C
SOVERSION ${libobjc_VERSION}
)
target_link_libraries(objcxx ${CXX_RUNTIME})
set(CXX_RUNTIME "")
list(APPEND INSTALL_TARGETS objcxx)
else ()
set(libobjc_CXX_SRCS ${libobjcxx_CXX_SRCS})
# We don't want to link the STL implementation (e.g. libstdc++) if
# we have a separate C++ runtime.
set(CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "")
endif ()
else ()
message(STATUS "No C++ runtime library found")
add_library(objcxx SHARED ${libobjcxx_CXX_SRCS})
set_target_properties(objcxx PROPERTIES
LINKER_LANGUAGE C
SOVERSION ${libobjc_VERSION}
)
set(CXX_RUNTIME "")
list(APPEND INSTALL_TARGETS objcxx)
test_cxx_runtime)
if (${USERUNTIME})
message(STATUS "libobjc will depend on C++ standard library")
set(CXXRT_IS_STDLIB true)
else()
message(STATUS "No useable C++ runtime found")
set(ENABLE_OBJCXX false)
endif()
endif ()
endif (ENABLE_OBJCXX)
if (ENABLE_OBJCXX)
if (NOT CXXRT_IS_STDLIB)
# We don't want to link the STL implementation (e.g. libstdc++) if
# we have a separate C++ runtime.
set(CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "")
target_link_libraries(objc ${CXX_RUNTIME})
endif()
set(libobjc_CXX_SRCS ${libobjcxx_CXX_SRCS})
target_sources(objc PRIVATE ${libobjcxx_CXX_SRCS})
endif()
# Currently, we actually need pthreads, but we should use the platform's native
# threading implementation (we do for everything except thread-local storage)
@ -225,7 +242,6 @@ set(objc_LINK_FLAGS "${objc_LINK_FLAGS} ${CMAKE_THREAD_LIBS_INIT}")
add_library(objc SHARED ${libobjc_C_SRCS} ${libobjc_ASM_SRCS} ${libobjc_OBJC_SRCS} ${libobjc_CXX_SRCS})
set_target_properties(objc PROPERTIES
LINKER_LANGUAGE C
@ -248,8 +264,7 @@ endif ()
# Explicitly link the C++ runtime and libgc if we are compiling with gc support.
target_link_libraries(objc ${CXX_RUNTIME})
# Explicitly link libgc if we are compiling with gc support.
if (LIBGC)
target_link_libraries(objc ${LIBGC})
endif ()

@ -64,6 +64,10 @@ function(addtest_variants TEST TEST_SOURCE)
addtest_flags("${TEST}_optimised" "-O3 -fobjc-runtime=gnustep-2.0 -UNDEBUG" "${TEST_SOURCE}")
addtest_flags("${TEST}_legacy" "-O0 -fobjc-runtime=gnustep-1.7 -UNDEBUG" "${TEST_SOURCE}")
addtest_flags("${TEST}_legacy_optimised" "-O3 -fobjc-runtime=gnustep-1.7 -UNDEBUG" "${TEST_SOURCE}")
target_compile_definitions("${TEST}_legacy" PRIVATE SINGLE_FILE_TEST=1)
target_compile_definitions("${TEST}_legacy_optimised" PRIVATE SINGLE_FILE_TEST=1)
target_compile_definitions(${TEST} PRIVATE SINGLE_FILE_TEST=1)
target_compile_definitions("${TEST}_optimised" PRIVATE SINGLE_FILE_TEST=1)
endfunction(addtest_variants)
foreach(TEST_SOURCE ${TESTS})
@ -73,3 +77,6 @@ endforeach()
# Tests that are more than a single file.
addtest_variants("CXXExceptions" "CXXException.m;CXXException.cc")
if (ENABLE_OBJCXX)
addtest_variants(ObjCXXEHInterop "ObjCXXEHInterop.mm;ObjCXXEHInterop.m;Test.m")
endif()

@ -0,0 +1,22 @@
#import "Test.h"
#import "stdio.h"
void poke_objcxx(void);
void rethrow(id x)
{
@throw x;
}
int main(void)
{
@try {
printf("Poking from minRepM\n");
poke_objcxx();
printf("Poked from minRepM\n");
} @catch (Test *localException) {
printf("In NS_HANDLER block, %p\n", localException);
}
}

@ -0,0 +1,20 @@
#import "Test.h"
#import "stdio.h"
extern "C" void rethrow(id);
extern "C" void poke_objcxx(void)
{
@try {
printf("Raising MyException\n");
Test *e = [Test new];
@throw e;
} @catch (Test *localException) {
printf("Caught - re-raising\n");
[localException retain];
localException = [localException autorelease];;
rethrow(localException);
}
}

@ -23,6 +23,11 @@ __attribute__((objc_root_class))
#endif
@end
@interface NSAutoreleasePool : Test
@end
#ifdef SINGLE_FILE_TEST
#if !__has_feature(objc_arc)
@implementation Test
+ (Class)class { return self; }
@ -48,10 +53,7 @@ __attribute__((objc_root_class))
}
- (void)_ARCCompliantRetainRelease {}
@end
#endif
@interface NSAutoreleasePool : Test
@end
@implementation NSAutoreleasePool
- (void)_ARCCompatibleAutoreleasePool {}
+ (void)addObject:(id)anObject
@ -59,3 +61,6 @@ __attribute__((objc_root_class))
objc_autorelease(anObject);
}
@end
#endif
#endif

@ -4,37 +4,7 @@
#undef NDEBUG
#endif
#include <assert.h>
#ifndef __has_attribute
#define __has_attribute(x) 0
#ifndef SINGLE_FILE_TEST
#define SINGLE_FILE_TEST 1
#endif
#if __has_attribute(objc_root_class)
__attribute__((objc_root_class))
#endif
@interface Test { id isa; }
@end
@implementation Test
+ (Class)class { return self; }
+ (id)new
{
return class_createInstance(self, 0);
}
- (void)dealloc
{
object_dispose(self);
}
- (id)autorelease
{
return objc_autorelease(self);
}
- (id)retain
{
return objc_retain(self);
}
- (void)release
{
objc_release(self);
}
- (void)_ARCCompliantRetainRelease {}
@end
#include "Test.h"

@ -0,0 +1,15 @@
#import "Test.h"
#import "minRep1.h"
#import "stdio.h"
@implementation MinRep1
- (void)poke
{
printf("Poking from minRep1\n");
poke_objcxx();
}
@end

@ -1,3 +1,5 @@
// Needed with glibc to expose vasprintf
#define _GNU_SOURCE
#include <time.h>
#include <stdio.h>
#include <assert.h>

405
arc.m

@ -1,4 +1,5 @@
#include <stdlib.h>
#include <stdbool.h>
#include <assert.h>
#import "stdio.h"
#import "objc/runtime.h"
@ -163,6 +164,63 @@ extern BOOL FastARCAutorelease;
static BOOL useARCAutoreleasePool;
/**
* We use the top bit of the reference count to indicate whether an object has
* ever had a weak reference taken. This lets us avoid acquiring the weak
* table lock for most objects on deallocation.
*/
static const long weak_mask = ((size_t)1)<<((sizeof(size_t)*8)-1);
/**
* All of the bits other than the top bit are the real reference count.
*/
static const long refcount_mask = ~weak_mask;
size_t object_getRetainCount_np(id obj)
{
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
return (((size_t)refCountVal) & refcount_mask) + 1;
}
id objc_retain_fast_np(id obj)
{
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
uintptr_t newVal = refCountVal;
do {
refCountVal = newVal;
long realCount = refCountVal & refcount_mask;
// If this object's reference count is already less than 0, then
// this is a spurious retain. This can happen when one thread is
// attempting to acquire a strong reference from a weak reference
// and the other thread is attempting to destroy it. The
// deallocating thread will decrement the reference count with no
// locks held and will then acquire the weak ref table lock and
// attempt to zero the weak references. The caller of this will be
// `objc_loadWeakRetained`, which will also hold the lock. If the
// serialisation is such that the locked retain happens after the
// decrement, then we return nil here so that the weak-to-strong
// transition doesn't happen and the object is actually destroyed.
// If the serialisation happens the other way, then the locked
// check of the reference count will happen after we've referenced
// this and we don't zero the references or deallocate.
if (realCount < 0)
{
return nil;
}
// If the reference count is saturated, don't increment it.
if (realCount == refcount_mask)
{
return obj;
}
realCount++;
realCount |= refCountVal & weak_mask;
uintptr_t updated = (uintptr_t)realCount;
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
return obj;
}
static inline id retain(id obj)
{
if (isSmallObject(obj)) { return obj; }
@ -174,17 +232,55 @@ static inline id retain(id obj)
}
if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
intptr_t *refCount = ((intptr_t*)obj) - 1;
// Note: this should be an atomic read, so that a sufficiently clever
// compiler doesn't notice that there's no happens-before relationship
// here.
if (*refCount >= 0)
return objc_retain_fast_np(obj);
}
return [obj retain];
}
BOOL objc_release_fast_no_destroy_np(id obj)
{
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
uintptr_t newVal = refCountVal;
bool isWeak;
bool shouldFree;
do {
refCountVal = newVal;
size_t realCount = refCountVal & refcount_mask;
// If the reference count is saturated, don't decrement it.
if (realCount == refcount_mask)
{
__sync_add_and_fetch(refCount, 1);
return NO;
}
return obj;
realCount--;
isWeak = (refCountVal & weak_mask) == weak_mask;
shouldFree = realCount == -1;
realCount |= refCountVal & weak_mask;
uintptr_t updated = (uintptr_t)realCount;
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
// We allow refcounts to run into the negative, but should only
// deallocate once.
if (shouldFree)
{
if (isWeak)
{
if (!objc_delete_weak_refs(obj))
{
return NO;
}
}
return YES;
}
return NO;
}
void objc_release_fast_np(id obj)
{
if (objc_release_fast_no_destroy_np(obj))
{
[obj dealloc];
}
return [obj retain];
}
static inline void release(id obj)
@ -203,14 +299,7 @@ static inline void release(id obj)
}
if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
intptr_t *refCount = ((intptr_t*)obj) - 1;
// We allow refcounts to run into the negative, but should only
// deallocate once.
if (__sync_sub_and_fetch(refCount, 1) == -1)
{
objc_delete_weak_refs(obj);
[obj dealloc];
}
objc_release_fast_np(obj);
return;
}
[obj release];
@ -312,7 +401,6 @@ unsigned long objc_arc_autorelease_count_for_object_np(id obj)
return count;
}
void *objc_autoreleasePoolPush(void)
{
initAutorelease();
@ -467,17 +555,19 @@ id objc_storeStrong(id *addr, id value)
// Weak references
////////////////////////////////////////////////////////////////////////////////
static int weakref_class;
typedef struct objc_weak_ref
{
void *isa;
id obj;
id *ref[4];
struct objc_weak_ref *next;
size_t weak_count;
} WeakRef;
static int weak_ref_compare(const id obj, const WeakRef weak_ref)
static int weak_ref_compare(const id obj, const WeakRef *weak_ref)
{
return obj == weak_ref.obj;
return obj == weak_ref->obj;
}
static uint32_t ptr_hash(const void *ptr)
@ -486,23 +576,14 @@ static uint32_t ptr_hash(const void *ptr)
// always be 0, which is not so useful for a hash value
return ((uintptr_t)ptr >> 4) | ((uintptr_t)ptr << ((sizeof(id) * 8) - 4));
}
static int weak_ref_hash(const WeakRef weak_ref)
{
return ptr_hash(weak_ref.obj);
}
static int weak_ref_is_null(const WeakRef weak_ref)
static int weak_ref_hash(const WeakRef *weak_ref)
{
return weak_ref.obj == NULL;
return ptr_hash(weak_ref->obj);
}
const static WeakRef NullWeakRef;
#define MAP_TABLE_NAME weak_ref
#define MAP_TABLE_COMPARE_FUNCTION weak_ref_compare
#define MAP_TABLE_HASH_KEY ptr_hash
#define MAP_TABLE_HASH_VALUE weak_ref_hash
#define MAP_TABLE_VALUE_TYPE struct objc_weak_ref
#define MAP_TABLE_VALUE_NULL weak_ref_is_null
#define MAP_TABLE_VALUE_PLACEHOLDER NullWeakRef
#define MAP_TABLE_ACCESS_BY_REFERENCE 1
#define MAP_TABLE_SINGLE_THREAD 1
#define MAP_TABLE_NO_LOCK 1
@ -520,12 +601,58 @@ PRIVATE void init_arc(void)
#endif
}
/**
* Load from a weak pointer and return whether this really was a weak
* reference or a strong (not deallocatable) object in a weak pointer. The
* object will be stored in `obj` and the weak reference in `ref`, if one
* exists.
*/
__attribute__((always_inline))
static BOOL loadWeakPointer(id *addr, id *obj, WeakRef **ref)
{
id oldObj = *addr;
if (oldObj == nil)
{
*ref = NULL;
*obj = nil;
return NO;
}
if (classForObject(oldObj) == (Class)&weakref_class)
{
*ref = (WeakRef*)oldObj;
*obj = (*ref)->obj;
return YES;
}
*ref = NULL;
*obj = oldObj;
return NO;
}
__attribute__((always_inline))
static inline BOOL weakRefRelease(WeakRef *ref)
{
ref->weak_count--;
if (ref->weak_count == 0)
{
free(ref);
return YES;
}
return NO;
}
void* block_load_weak(void *block);
id objc_storeWeak(id *addr, id obj)
{
id old = *addr;
LOCK_FOR_SCOPE(&weakRefLock);
WeakRef *oldRef;
id old;
loadWeakPointer(addr, &old, &oldRef);
// If the old and new values are the same, then we don't need to do anything.
if (old == obj)
{
return obj;
}
BOOL isGlobalObject = (obj == nil) || isSmallObject(obj);
Class cls = Nil;
if (!isGlobalObject)
@ -538,33 +665,51 @@ id objc_storeWeak(id *addr, id obj)
isGlobalObject = YES;
}
}
if (cls && objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
intptr_t *refCount = ((intptr_t*)obj) - 1;
if (obj && *refCount < 0)
{
obj = nil;
cls = Nil;
}
}
LOCK_FOR_SCOPE(&weakRefLock);
if (nil != old)
if (obj && cls && objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
WeakRef *oldRef = weak_ref_table_get(weakRefs, old);
while (NULL != oldRef)
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
if (obj)
{
for (int i=0 ; i<4 ; i++)
{
if (oldRef->ref[i] == addr)
uintptr_t refCountVal = __sync_fetch_and_add(refCount, 0);
uintptr_t newVal = refCountVal;
do {
refCountVal = newVal;
long realCount = refCountVal & refcount_mask;
// If this object has already been deallocated (or is in the
// process of being deallocated) then don't bother storing it.
if (realCount < 0)
{
oldRef->ref[i] = 0;
oldRef = 0;
obj = nil;
cls = Nil;
break;
}
}
oldRef = (oldRef == NULL) ? NULL : oldRef->next;
// The weak ref flag is monotonic (it is set, never cleared) so
// don't bother trying to re-set it.
if ((refCountVal & weak_mask) == weak_mask)
{
break;
}
// Set the flag in the reference count to indicate that a weak
// reference has been taken.
//
// We currently hold the weak ref lock, so another thread
// racing to deallocate this object will have to wait to do so
// if we manage to do the reference count update first. This
// shouldn't be possible, because `obj` should be a strong
// reference and so it shouldn't be possible to deallocate it
// while we're assigning it.
uintptr_t updated = ((uintptr_t)realCount | weak_mask);
newVal = __sync_val_compare_and_swap(refCount, refCountVal, updated);
} while (newVal != refCountVal);
}
}
// If we old ref exists, decrement its reference count. This may also
// delete the weak reference control block.
if (oldRef != NULL)
{
weakRefRelease(oldRef);
}
// If we're storing nil, then just write a null pointer.
if (nil == obj)
{
*addr = obj;
@ -573,115 +718,90 @@ id objc_storeWeak(id *addr, id obj)
if (isGlobalObject)
{
// If this is a global object, it's never deallocated, so secretly make
// this a strong reference
// this a strong reference.
*addr = obj;
return obj;
}
if (&_NSConcreteMallocBlock == cls)
{
obj = block_load_weak(obj);
}
else if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
if ((*(((intptr_t*)obj) - 1)) < 0)
{
return nil;
}
}
else
{
obj = _objc_weak_load(obj);
}
if (nil != obj)
{
WeakRef *ref = weak_ref_table_get(weakRefs, obj);
while (NULL != ref)
{
for (int i=0 ; i<4 ; i++)
{
if (0 == ref->ref[i])
{
ref->ref[i] = addr;
*addr = obj;
return obj;
}
}
if (ref->next == NULL)
{
break;
}
ref = ref->next;
}
if (NULL != ref)
if (ref == NULL)
{
ref->next = calloc(sizeof(WeakRef), 1);
ref->next->ref[0] = addr;
ref = calloc(1, sizeof(WeakRef));
ref->isa = (Class)&weakref_class;
ref->obj = obj;
ref->weak_count = 1;
weak_ref_insert(weakRefs, ref);
}
else
{
WeakRef newRef = {0};
newRef.obj = obj;
newRef.ref[0] = addr;
weak_ref_insert(weakRefs, newRef);
ref->weak_count++;
}
*addr = (id)ref;
}
*addr = obj;
return obj;
}
static void zeroRefs(WeakRef *ref, BOOL shouldFree)
BOOL objc_delete_weak_refs(id obj)
{
if (NULL != ref->next)
{
zeroRefs(ref->next, YES);
}
for (int i=0 ; i<4 ; i++)
LOCK_FOR_SCOPE(&weakRefLock);
if (objc_test_class_flag(classForObject(obj), objc_class_flag_fast_arc))
{
if (0 != ref->ref[i])
// If another thread has done a load of a weak reference, then it will
// have incremented the reference count with the lock held. It may
// have done so in between this thread's decrementing the reference
// count and its acquiring the lock. In this case, report failure.
uintptr_t *refCount = ((uintptr_t*)obj) - 1;
if ((long)((__sync_fetch_and_add(refCount, 0) & refcount_mask)) < 0)
{
*ref->ref[i] = 0;
return NO;
}
}
if (shouldFree)
{
free(ref);
}
}
void objc_delete_weak_refs(id obj)
{
LOCK_FOR_SCOPE(&weakRefLock);
WeakRef *oldRef = weak_ref_table_get(weakRefs, obj);
if (0 != oldRef)
{
zeroRefs(oldRef, NO);
// Zero the object pointer. This prevents any other weak
// accesses from loading from this.
oldRef->obj = nil;
// The address of obj is likely to be reused, so remove it from
// the table so that we don't accidentally alias weak
// references
weak_ref_remove(weakRefs, obj);
// If the weak reference count is zero, then we should have
// already removed this.
assert(oldRef->weak_count > 0);
}
return YES;
}
id objc_loadWeakRetained(id* addr)
{
LOCK_FOR_SCOPE(&weakRefLock);
id obj = *addr;
if (nil == obj) { return nil; }
// Small objects don't need reference count modification
if (isSmallObject(obj))
id obj;
WeakRef *ref;
// If this is really a strong reference (nil, or an non-deallocatable
// object), just return it.
if (!loadWeakPointer(addr, &obj, &ref))
{
return obj;
}
// The object cannot be deallocated while we hold the lock (release
// will acquire the lock before attempting to deallocate)
if (obj == nil)
{
// If we've destroyed this weak ref, then make sure that we also deallocate the object.
if (weakRefRelease(ref))
{
*addr = nil;
}
return nil;
}
Class cls = classForObject(obj);
if (&_NSConcreteMallocBlock == cls)
{
obj = block_load_weak(obj);
}
else if (objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
if ((*(((intptr_t*)obj) - 1)) < 0)
{
return nil;
}
}
else
else if (!objc_test_class_flag(cls, objc_class_flag_fast_arc))
{
obj = _objc_weak_load(obj);
}
@ -695,7 +815,24 @@ id objc_loadWeak(id* object)
void objc_copyWeak(id *dest, id *src)
{
objc_release(objc_initWeak(dest, objc_loadWeakRetained(src)));
// Don't retain or release. While the weak ref lock is held, we know that
// the object can't be deallocated, so we just move the value and update
// the weak reference table entry to indicate the new address.
LOCK_FOR_SCOPE(&weakRefLock);
id obj;
WeakRef *srcRef;
WeakRef *dstRef;
loadWeakPointer(dest, &obj, &dstRef);
loadWeakPointer(src, &obj, &srcRef);
*dest = *src;
if (srcRef)
{
srcRef->weak_count++;
}
if (dstRef)
{
weakRefRelease(dstRef);
}
}
void objc_moveWeak(id *dest, id *src)
@ -704,19 +841,15 @@ void objc_moveWeak(id *dest, id *src)
// the object can't be deallocated, so we just move the value and update
// the weak reference table entry to indicate the new address.
LOCK_FOR_SCOPE(&weakRefLock);
id obj;
WeakRef *oldRef;
// If the destination is a weak ref, free it.
loadWeakPointer(dest, &obj, &oldRef);
*dest = *src;
*src = nil;
WeakRef *oldRef = weak_ref_table_get(weakRefs, *dest);
while (NULL != oldRef)
if (oldRef != NULL)
{
for (int i=0 ; i<4 ; i++)
{
if (oldRef->ref[i] == src)
{
oldRef->ref[i] = dest;
return;
}
}
weakRefRelease(oldRef);
}
}

@ -247,7 +247,7 @@ static void deallocHiddenClass(id obj, SEL _cmd)
DESTROY_LOCK(&list->lock);
cleanupReferenceList(list);
freeReferenceList(list->next);
fprintf(stderr, "Deallocating dtable %p\n", hiddenClass->dtable);
//fprintf(stderr, "Deallocating dtable %p\n", hiddenClass->dtable);
free_dtable(hiddenClass->dtable);
// We shouldn't have any subclasses left at this point
assert(hiddenClass->subclass_list == 0);

@ -42,9 +42,9 @@ static void *_HeapBlockByRef = (void*)1;
/**
* Returns the Objective-C type encoding for the block.
*/
const char *block_getType_np(void *b)
const char *block_getType_np(const void *b)
{
struct Block_layout *block = b;
const struct Block_layout *block = b;
if ((NULL == block) || !(block->flags & BLOCK_HAS_SIGNATURE))
{
return NULL;
@ -60,8 +60,10 @@ static int increment24(int *ref)
{
int old = *ref;
int val = old & BLOCK_REFCOUNT_MASK;
// FIXME: We should gracefully handle refcount overflow, but for now we
// just give up
if (val == BLOCK_REFCOUNT_MASK)
{
return val;
}
assert(val < BLOCK_REFCOUNT_MASK);
if (!__sync_bool_compare_and_swap(ref, old, old+1))
{
@ -74,8 +76,10 @@ static int decrement24(int *ref)
{
int old = *ref;
int val = old & BLOCK_REFCOUNT_MASK;
// FIXME: We should gracefully handle refcount overflow, but for now we
// just give up
if (val == BLOCK_REFCOUNT_MASK)
{
return val;
}
assert(val > 0);
if (!__sync_bool_compare_and_swap(ref, old, old-1))
{
@ -231,10 +235,10 @@ void _Block_object_dispose(const void *object, const int flags)
// Copy a block to the heap if it's still on the stack or increments its retain count.
void *_Block_copy(void *src)
void *_Block_copy(const void *src)
{
if (NULL == src) { return NULL; }
struct Block_layout *self = src;
struct Block_layout *self = (struct Block_layout*)src;
struct Block_layout *ret = self;
extern void _NSConcreteStackBlock;
@ -265,10 +269,10 @@ void *_Block_copy(void *src)
}
// Release a block and frees the memory when the retain count hits zero.
void _Block_release(void *src)
void _Block_release(const void *src)
{
if (NULL == src) { return; }
struct Block_layout *self = src;
struct Block_layout *self = (struct Block_layout*)src;
extern void _NSConcreteStackBlock;
extern void _NSConcreteMallocBlock;

@ -54,6 +54,13 @@ static BOOL ownsMethod(Class cls, SEL sel)
return NO;
}
#ifdef DEBUG_ARC_COMPAT
#define ARC_DEBUG_LOG(...) fprintf(stderr, __VA_LIST__)
#else
#define ARC_DEBUG_LOG(...) do {} while(0)
#endif
/**
* Checks whether the class implements memory management methods, and whether
* they are safe to use with ARC.
@ -71,18 +78,21 @@ static void checkARCAccessors(Class cls)
struct objc_slot *slot = objc_get_slot2(cls, retain);
if ((NULL != slot) && !ownsMethod(slot->owner, isARC))
{
ARC_DEBUG_LOG("%s does not support ARC correctly (implements retain)\n", cls->name);
objc_clear_class_flag(cls, objc_class_flag_fast_arc);
return;
}
slot = objc_get_slot2(cls, release);
if ((NULL != slot) && !ownsMethod(slot->owner, isARC))
{
ARC_DEBUG_LOG("%s does not support ARC correctly (implements release)\n", cls->name);
objc_clear_class_flag(cls, objc_class_flag_fast_arc);
return;
}
slot = objc_get_slot2(cls, autorelease);
if ((NULL != slot) && !ownsMethod(slot->owner, isARC))
{
ARC_DEBUG_LOG("%s does not support ARC correctly (implements autorelease)\n", cls->name);
objc_clear_class_flag(cls, objc_class_flag_fast_arc);
return;
}

@ -330,7 +330,7 @@ static inline _Unwind_Reason_Code internal_objc_personality(int version,
// The object to return
void *object = NULL;
#ifdef NO_OBJCXX
#ifndef NO_OBJCXX
if (exceptionClass == cxx_exception_class)
{
int objcxx;
@ -421,7 +421,7 @@ static inline _Unwind_Reason_Code internal_objc_personality(int version,
// If this is not a cleanup, ignore it and keep unwinding.
//if (check_action_record(context, foreignException, &lsda,
//action.action_record, thrown_class, &selector) != handler_cleanup)
if (handler != handler_cleanup)
if ((handler != handler_cleanup) && !objcxxException)
{
DEBUG_LOG("Ignoring handler! %d\n",handler);
return continueUnwinding(exceptionObject, context);

@ -13,9 +13,9 @@
#define BLOCKS_EXPORT extern
#endif
BLOCKS_EXPORT void *_Block_copy(void *);
BLOCKS_EXPORT void _Block_release(void *);
BLOCKS_EXPORT const char *block_getType_np(void *b) OBJC_NONPORTABLE;
BLOCKS_EXPORT void *_Block_copy(const void *);
BLOCKS_EXPORT void _Block_release(const void *);
BLOCKS_EXPORT const char *block_getType_np(const void *b) OBJC_NONPORTABLE;
#define Block_copy(x) ((__typeof(x))_Block_copy((void *)(x)))
#define Block_release(x) _Block_release((void *)(x))
#define Block_copy(x) ((__typeof(x))_Block_copy((const void *)(x)))
#define Block_release(x) _Block_release((const void *)(x))

@ -32,6 +32,12 @@ id objc_loadWeakRetained(id* obj);
* Retains the argument. Equivalent to [obj retain].
*/
id objc_retain(id obj);
/**
* Retains the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime.
* This is intended to implement `-retain` in ARC-compatible root classes.
*/
id objc_retain_fast_np(id obj) OBJC_NONPORTABLE;
/**
* Retains and autoreleases an object. Equivalent to [[obj retain] autorelease].
*/
@ -85,6 +91,30 @@ void objc_destroyWeak(id* addr);
* Equivalent to objc_copyWeak(), but may also set src to nil.
*/
void objc_moveWeak(id *dest, id *src);
/**
* Releases the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime. If the retain count reaches
* zero then all weak references will be zeroed and the object will be
* destroyed.
*
* This is intended to implement `-release` in ARC-compatible root
* classes.
*/
void objc_release_fast_np(id obj) OBJC_NONPORTABLE;
/**
* Releases the argument, assuming that the argument is a normal object and has
* its reference count managed by the runtime. If the retain count reaches
* zero then all weak references will be zeroed but the object will *NOT* be
* destroyed.
*
* This is intended to implement `NSDecrementExtraRefCountWasZero` for use with
* ARC-compatible classes.
*/
BOOL objc_release_fast_no_destroy_np(id obj) OBJC_NONPORTABLE;
/**
* Returns the retain count of an object.
*/
size_t object_getRetainCount_np(id obj) OBJC_NONPORTABLE;
/**
* Releases an object. Equivalent to [obj release].
*/
@ -94,9 +124,11 @@ void objc_release(id obj);
* weak pointers will return 0. This function should be called in -release,
* before calling [self dealloc].
*
* This will return `YES` if the weak references were deleted, `NO` otherwise.
*
* Nonstandard extension.
*/
void objc_delete_weak_refs(id obj);
BOOL objc_delete_weak_refs(id obj);
/**
* Returns the total number of objects in the ARC-managed autorelease pool.
*/

Loading…
Cancel
Save