Removed GCKit.
parent
3769a48cd7
commit
3b96938128
@ -1,20 +0,0 @@
|
||||
Copyright (c) 2009 David Chisnall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
@ -1,126 +0,0 @@
|
||||
GCKit
|
||||
=====
|
||||
|
||||
GCKit is a garbage collection kit designed for Objective-C. It is a hybrid collector, using a combination of reference counting, cycle detection, and tracing. The design goals are:
|
||||
|
||||
- Easy interoperability with non-GC code using retain/release semantics.
|
||||
- Easy interoperability with code designed for Apple's GC implementation.
|
||||
- Support for assisted reference counting with no compiler support.
|
||||
- Support for automatic garbage collection with compiler support.
|
||||
- Low overheads.
|
||||
- Performance in memory-constrained conditions, without causing undue swapping
|
||||
|
||||
Memory Types
|
||||
------------
|
||||
|
||||
There are three types of memory in GCKit's model:
|
||||
|
||||
- Objects
|
||||
- Traced regions
|
||||
- Untraced regions
|
||||
|
||||
Objects have a fixed layout and may contain strong, weak, and traced pointers.
|
||||
|
||||
Traced regions include the stack, and any regions explicitly designated for
|
||||
tracing. Stacks are traced synchronously, from the thread that owns them,
|
||||
while other regions are not.
|
||||
|
||||
Untraced regions are opaque to GCKit. They may contain pointers to GC'd
|
||||
objects only if the pointers are manually reference counted using GCRetain()
|
||||
and GCRelease().
|
||||
|
||||
Object Types
|
||||
------------
|
||||
|
||||
GCKit will allocate two kind of memory. Objective-C objects, and buffers.
|
||||
|
||||
Reference Types
|
||||
---------------
|
||||
|
||||
There are four kinds of reference (pointer) in GCKit's memory model:
|
||||
|
||||
- Strong.
|
||||
- Zeroing weak.
|
||||
- Traced.
|
||||
- Invisible.
|
||||
|
||||
Strong references use reference counting. When an object is strongly assigned
|
||||
to a pointer, its reference count is incremented and the reference count of the
|
||||
old object is decremented. Objects will never be deleted as long as they have
|
||||
a strong reference count greater than 0 and their references can not all be
|
||||
accounted for by cycles.
|
||||
|
||||
Zeroing weak references are also reference counted, however they do not prevent
|
||||
an object from being finalized. Zeroing weak references follow similar
|
||||
assignment semantics to strong references. When an object is only referenced
|
||||
by zeroing weak references, it will be finalized, but not freed. Subsequent
|
||||
reads of zeroing weak pointers to the object will decrement its reference count
|
||||
and it will be freed once this reaches 0.
|
||||
|
||||
Traced pointers do not perform any reference counting. All pointers on the
|
||||
stack are traced, as are pointers in memory buffers explicitly allocated for
|
||||
tracing. Objects with a reference count of 0 will not be freed until tracing
|
||||
these regions determines that there are no unseen references to them.
|
||||
|
||||
Copying traced pointers between stacks directly is not supported. If a thread
|
||||
somehow gets a reference to another thread's stack and copies a pointer then
|
||||
the compiler will not generate a write barrier. This means that, if the two
|
||||
threads' stacks are not traced in the right order relative to each other (50%
|
||||
chance) and there are no locatable heap references to the object then it may be
|
||||
freed.
|
||||
|
||||
Invisible pointers are outside regions that the garbage collector knows about.
|
||||
Objects pointed to by these may be deleted if their reference count hits 0.
|
||||
|
||||
Interior pointers are not supported. A pointer to the start of an object or
|
||||
managed buffer must be maintained
|
||||
|
||||
Object Deletion
|
||||
---------------
|
||||
|
||||
Objects marked as using CoreFoundation semantics are deleted as soon as their
|
||||
reference counts hit 0.
|
||||
|
||||
All other objects are marked as potential garbage once their reference count
|
||||
drops to a value that is equal to the number of references that the cycle
|
||||
detector can find. If A and B both hold strong references to each other, then
|
||||
they are marked as potential garbage once their reference count hits 1.
|
||||
|
||||
Interfaces
|
||||
----------
|
||||
|
||||
Writing pointers into traced heap memory requires a write barrier. The
|
||||
objc_assign_strongCast function generates this barrier for a single write
|
||||
(another function, as yet unwritten, will generate it for multiple writes).
|
||||
|
||||
Assignments to instance variables or globals must increment the strong
|
||||
reference count of the new value and decrement the value of the old one. The
|
||||
objc_assignIvar() and objc_assignGlobal() functions perform this for you.
|
||||
|
||||
If you are storing pointers in memory that is not managed by GCKit then you
|
||||
must call the CGRetain() function on the pointer to prevent it from being freed
|
||||
and the GCRelease() function when you are finished with it.
|
||||
|
||||
Degenerate Cases
|
||||
----------------
|
||||
|
||||
Objects using the Core Foundation or OpenStep models may set a flag indicating
|
||||
that they do not contain cycles (or, more accurately, that the programmer takes
|
||||
responsibility for freeing cycles). In this case, GCKit will trace the stack,
|
||||
catching bugs where you might have used -release instead of -autorelease, but
|
||||
aside from that will not provide any benefits.
|
||||
|
||||
Objects may also be marked as having CF semantics. In this case, they will be
|
||||
checked for cycles (unless explicitly marked as acyclic), but will be finalised
|
||||
when their reference count hits zero and subsequently destroyed when their weak
|
||||
reference count hits zero.
|
||||
|
||||
Finally, you can use traced memory for everything. Don't do this. GCKit is
|
||||
designed to be efficient when only a relatively small proportion of allocated
|
||||
memory needs to be traced.
|
||||
|
||||
Outstanding Bugs
|
||||
----------------
|
||||
|
||||
Lots. Seriously, don't use GCKit yet. Cycles in traced memory are not yet
|
||||
detected. Lots of GCKit is completely untested.
|
||||
@ -1,28 +0,0 @@
|
||||
include $(GNUSTEP_MAKEFILES)/common.make
|
||||
|
||||
#LIBRARY_NAME = GCKit
|
||||
TOOL_NAME = GCKit
|
||||
|
||||
GCKit_VERSION = 1
|
||||
|
||||
GCKit_OBJC_FILES = \
|
||||
cycle.m\
|
||||
inline.m\
|
||||
malloc.m\
|
||||
thread.m\
|
||||
trace.m\
|
||||
test.m\
|
||||
visit.m\
|
||||
workqueue.m
|
||||
|
||||
GCKit_HEADER_FILES_INSTALL_DIR = GCKit
|
||||
|
||||
GCKit_HEADER_FILES = \
|
||||
GCKit.h
|
||||
|
||||
GCKit_LIBRARIES_DEPEND_UPON += -lpthread
|
||||
|
||||
GCKit_OBJCFLAGS += -Werror -std=c99 -fno-inline
|
||||
|
||||
include $(GNUSTEP_MAKEFILES)/library.make
|
||||
include $(GNUSTEP_MAKEFILES)/tool.make
|
||||
@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Category on NSObject to support automatic cycle detection.
|
||||
*/
|
||||
@implementation NSObject (CycleDetection)
|
||||
/**
|
||||
* Increments the 16-bit reference count. Replaces version that sets a
|
||||
* one-word reference count.
|
||||
*/
|
||||
- (id) retain
|
||||
{
|
||||
return GCRetain(self);
|
||||
}
|
||||
/**
|
||||
* Decrements the reference count for an object. If the reference count
|
||||
* reaches zero, calls -dealloc. If the reference count is not zero then the
|
||||
* objectt may be part of a cycle. In this case, it is addded to a buffer and
|
||||
* cycle detection is later invoked.
|
||||
*/
|
||||
- (void) release
|
||||
{
|
||||
GCRelease(self);
|
||||
}
|
||||
/**
|
||||
* Dealloc now does not free objects, they are freed after -dealloc is called.
|
||||
*/
|
||||
- (void) dealloc
|
||||
{
|
||||
}
|
||||
@end
|
||||
@ -1,3 +0,0 @@
|
||||
void GCScanForCycles(id *loopBuffer, unsigned count);
|
||||
id GCRetain(id anObject);
|
||||
void GCRelease(id anObject);
|
||||
@ -1,253 +0,0 @@
|
||||
#include "../objc/runtime.h"
|
||||
#import "object.h"
|
||||
#import "malloc.h"
|
||||
#import "thread.h"
|
||||
#import "visit.h"
|
||||
#include <stdio.h>
|
||||
|
||||
id GCRetain(id anObject)
|
||||
{
|
||||
GCIncrementRetainCount(anObject);
|
||||
GCSetFlag(anObject, GCFlagEscaped);
|
||||
return anObject;
|
||||
}
|
||||
/**
|
||||
* Collect garbage cycles. Inspects every object in the loopBuffer and frees
|
||||
* any that are part of garbage cycles. This is an implementation of the
|
||||
* algorithm described in:
|
||||
*
|
||||
* http://www.research.ibm.com/people/d/dfb/papers/Bacon01Concurrent.pdf
|
||||
*
|
||||
*/
|
||||
void GCRelease(id anObject)
|
||||
{
|
||||
// If decrementing the strong retain count is 0, the object is probably
|
||||
// garbage. Add it to the list to trace and throw it away if it is.
|
||||
if (GCDecrementRetainCount(anObject) <= 0)
|
||||
{
|
||||
// FIXME: Discard it immediately if it is using CF semantics
|
||||
// Mark this object as in-use or free
|
||||
GCSetColourOfObject(anObject, GCColourBlack);
|
||||
// Clear its buffered flag (we won't look at it again)
|
||||
GCClearFlag(anObject, GCFlagBuffered);
|
||||
// Add it for freeing if tracing doesn't find any references to it
|
||||
GCAddObject(anObject);
|
||||
}
|
||||
else
|
||||
{
|
||||
// If this object is not marked as acyclic
|
||||
if (GCColourOfObject(anObject) == GCColourGreen)
|
||||
{
|
||||
// Mark it as the possible root of a cycle. The object was
|
||||
// released, but there are still strong references to it. That
|
||||
// means that it has
|
||||
GCSetColourOfObject(anObject, GCColourPurple);
|
||||
GCSetFlag(anObject, GCFlagBuffered);
|
||||
GCAddObject(anObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GCAddObjectForTracing(id object);
|
||||
|
||||
/**
|
||||
* Scan children turning them black and incrementing the reference count. Used
|
||||
* for objects which have been determined to be acyclic.
|
||||
*/
|
||||
static void GCScanBlackChild(id anObject, void *unused, BOOL isWeak)
|
||||
{
|
||||
GCIncrementRetainCount(anObject);
|
||||
if (GCColourOfObject(anObject) != GCColourBlack)
|
||||
{
|
||||
GCSetColourOfObject(anObject, GCColourBlack);
|
||||
GCVisitChildren(anObject, GCScanBlackChild, NULL, NO);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan objects turning them black if they are not part of a cycle and white if
|
||||
* they are.
|
||||
*/
|
||||
static void GCScan(id anObject, void* unused, BOOL isWeak)
|
||||
{
|
||||
GCColour colour = GCColourOfObject(anObject);
|
||||
// If the object is not grey, then we've visited it already.
|
||||
if (colour == GCColourGrey)
|
||||
{
|
||||
//fprintf(stderr, "%x has retain count of %d\n", (int)anObject, (int)GCGetRetainCount(anObject));
|
||||
// If the retain count is still > 0, we didn't account for all of the
|
||||
// references with cycle detection, so mark it as black and reset the
|
||||
// retain count of every object that it references.
|
||||
//
|
||||
// If it did reach 0, then this is part of a garbage cycle so colour it
|
||||
// accordingly. Any objects reachable from this object do not get
|
||||
// their reference counts restored.
|
||||
//
|
||||
// FIXME: We need to be able to resurrect objects if they are
|
||||
// GCRetain()'d when they are white
|
||||
if (GCGetRetainCount(anObject) > 0)
|
||||
{
|
||||
GCSetColourOfObject(anObject, GCColourBlack);
|
||||
GCVisitChildren(anObject, GCScanBlackChild, NULL, NO);
|
||||
}
|
||||
else
|
||||
{
|
||||
GCSetColourOfObject(anObject, GCColourWhite);
|
||||
GCVisitChildren(anObject, GCScan, NULL, NO);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect objects which are coloured white.
|
||||
*
|
||||
* In the original algorithm, white objects were collected immediately. In
|
||||
* this version, it's possible that they have traced pointers referencing them,
|
||||
* so we defer collection. We can only collect a garbage cycle when there are
|
||||
* no traced pointers to any of the nodes.
|
||||
*/
|
||||
static void GCCollectWhite(id anObject, void *ignored, BOOL isWeak)
|
||||
{
|
||||
//fprintf(stderr, "Looking at object %x with colour %s\n", (unsigned) anObject, [GCStringFromColour(GCColourOfObject(anObject)) UTF8String]);
|
||||
if ((GCColourOfObject(anObject) == GCColourWhite))
|
||||
{
|
||||
GCSetColourOfObject(anObject, GCColourRed);
|
||||
//fprintf(stderr, "%x marked red. Red's dead, baby!\n", (int)anObject);
|
||||
//fprintf(stderr, " has refcount %d!\n", (int)GCGetRetainCount(anObject));
|
||||
GCAddObjectForTracing(anObject);
|
||||
GCVisitChildren(anObject, GCCollectWhite, NULL, NO);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark objects grey if are not already grey.
|
||||
*
|
||||
* Grey indicates that an object is possibly a member of a cycle. We check
|
||||
* that by traversing all reachable objects from the potential root of a cycle,
|
||||
* decrementing their reference count, and marking them grey. If the reference
|
||||
* count drops to 0, it indicates that all of the strong references to this
|
||||
* object come from cycles.
|
||||
*/
|
||||
void GCMarkGreyChildren(id anObject, void *ignored, BOOL isWeak)
|
||||
{
|
||||
//fprintf(stderr, "Marking %x as grey\n", (int)anObject);
|
||||
// FIXME: This should probably check if the colour is green. Green objects
|
||||
// can't be parts of cycles, and we need to restore the green colour after
|
||||
// scanning anyway.
|
||||
GCDecrementRetainCount(anObject);
|
||||
if (GCColourOfObject(anObject) != GCColourGrey)
|
||||
{
|
||||
GCSetColourOfObject(anObject, GCColourGrey);
|
||||
GCVisitChildren(anObject, GCMarkGreyChildren, NULL, NO);
|
||||
}
|
||||
}
|
||||
|
||||
void GCScanForCycles(id *loopBuffer, unsigned count)
|
||||
{
|
||||
//fprintf(stderr, "Starting to detect cycles...\n");
|
||||
// Mark Roots
|
||||
id next;
|
||||
for (unsigned i=0 ; i<count ; i++)
|
||||
{
|
||||
next = loopBuffer[i];
|
||||
//fprintf(stderr, "Looking at %x\n", (int)next);
|
||||
// Check that this object is still eligible for cycle detection
|
||||
if (nil == next) continue;
|
||||
if (GCTestFlag(next, GCFlagNotObject)) continue;
|
||||
if (!GCTestFlag(next, GCFlagBuffered))
|
||||
{
|
||||
loopBuffer[i] = nil;
|
||||
continue;
|
||||
}
|
||||
GCColour colour = GCColourOfObject(next);
|
||||
// If this is the potential root of a cycle (which it might not be
|
||||
// anymore, if something else has changed its colour)
|
||||
if (colour == GCColourPurple)
|
||||
{
|
||||
// Mark it, and all of its children, as grey.
|
||||
//fprintf(stderr, "Marking grey: %d...\n", colour);
|
||||
GCSetColourOfObject(next, GCColourGrey);
|
||||
GCVisitChildren(next, GCMarkGreyChildren, nil, NO);
|
||||
}
|
||||
else
|
||||
{
|
||||
GCClearFlag(next, GCFlagBuffered);
|
||||
// If the object's refcount is 0, add it to the list to free if the
|
||||
// tracer can't find them.
|
||||
if ((colour == GCColourBlack) && (GCGetRetainCount(next) <= 0))
|
||||
{
|
||||
GCAddObjectForTracing(next);
|
||||
}
|
||||
loopBuffer[i] = nil;
|
||||
}
|
||||
}
|
||||
// Scan roots
|
||||
for (unsigned i=0 ; i<count ; i++)
|
||||
{
|
||||
next = loopBuffer[i];
|
||||
if (nil == next) continue;
|
||||
//fprintf(stderr, "scanning object...\n");
|
||||
GCScan(next, NULL, NO);
|
||||
}
|
||||
|
||||
for (unsigned i=0 ; i<count ; i++)
|
||||
{
|
||||
next = loopBuffer[i];
|
||||
if (nil == next) continue;
|
||||
GCCollectWhite(next, NULL, NO);
|
||||
}
|
||||
void GCRunTracerIfNeeded(BOOL);
|
||||
GCRunTracerIfNeeded(YES);
|
||||
}
|
||||
|
||||
#if 0
|
||||
// Code from the old GCKit for drawing pretty pictures.
|
||||
// FIXME: Make it draw pretty pictures with the new GCKit too.
|
||||
/**
|
||||
* Table of objects that have already been visualised.
|
||||
*/
|
||||
NSHashTable __thread drawnObjects;
|
||||
/**
|
||||
* Recursively output connections from this object in GraphViz .dot format.
|
||||
*/
|
||||
void vizGraph(id self, SEL _cmd, NSString *parent)
|
||||
{
|
||||
NSString *me = [NSString stringWithFormat:@"object%d", (unsigned)self];
|
||||
if (NULL != NSHashGet(drawnObjects, self))
|
||||
{
|
||||
if (nil != parent)
|
||||
{
|
||||
printf("\t%s -> %s\n", [parent UTF8String], [me UTF8String]);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Add the node:
|
||||
if (GCColourOfObject(self) == black)
|
||||
{
|
||||
printf("\t%s [style=filled, fillcolor=black, fontcolor=white, label=\"%s\"]\n", [me UTF8String], self->class_pointer->name);
|
||||
}
|
||||
else
|
||||
{
|
||||
printf("\t%s [style=filled, fillcolor=%s, label=\"%s\"]\n", [me UTF8String], [GCStringFromColour(GCColourOfObject(self)) UTF8String], self->class_pointer->name);
|
||||
}
|
||||
// Add the connection to the parent
|
||||
if (nil != parent)
|
||||
{
|
||||
printf("\t%s -> %s\n", [parent UTF8String], [me UTF8String]);
|
||||
}
|
||||
NSHashInsert(drawnObjects, self);
|
||||
for_all_children(self, (IMP)vizGraph, _cmd, me);
|
||||
}
|
||||
/**
|
||||
* Print a GraphViz-compatible graph of all objects reachable from this one and
|
||||
* their colours.
|
||||
*/
|
||||
void visObject(id object, NSString *graphName)
|
||||
{
|
||||
drawnObjects = NSCreateHashTable(NSNonOwnedPointerHashCallBacks, 100);
|
||||
printf("digraph %s {\n", [graphName UTF8String]);
|
||||
vizGraph(object, @selector(vizGraph:), nil);
|
||||
printf("}\n");
|
||||
NSFreeHashTable(drawnObjects);
|
||||
}
|
||||
#endif
|
||||
@ -1,6 +0,0 @@
|
||||
/**
|
||||
* Make sure that all inline functions that are part of the public API are emitted.
|
||||
*/
|
||||
#include "../objc/runtime.h"
|
||||
#define GCINLINEPUBLIC
|
||||
#include "object.h"
|
||||
@ -1,31 +0,0 @@
|
||||
/**
|
||||
* malloc.h - defines allocation and deallocation hooks and functions for GCKit.
|
||||
*/
|
||||
#include <string.h>
|
||||
|
||||
/**
|
||||
* Allocate new memory.
|
||||
*/
|
||||
extern void *(*gc_alloc_with_zone)(void *zone, size_t bytes);
|
||||
|
||||
/**
|
||||
* Free memory allocated by gc_alloc_with_zone().
|
||||
*/
|
||||
extern void (*gc_free_with_zone)(void *zone, void *mem);
|
||||
|
||||
/**
|
||||
* Allocates an instance of a class, optionally with some extra bytes at the
|
||||
* end.
|
||||
*/
|
||||
id GCAllocateObjectWithZone(Class cls, void *zone, size_t extraBytes);
|
||||
/**
|
||||
* Allocates a buffer of the specified size. The third parameter indicates
|
||||
* whether this this memory should be scanned for untracked references. This
|
||||
* buffer itself will be freed when the last reference to it is lost. If the
|
||||
* scan parameter is set to YES then pointer assignments in this region should
|
||||
* not use strong-cast assigns or GCRetain().
|
||||
*/
|
||||
void *GCAllocateBufferWithZone(void *zone, size_t size, BOOL scan);
|
||||
|
||||
void GCFreeObject(id object);
|
||||
void GCFreeObjectUnsafe(id object);
|
||||
@ -1,220 +0,0 @@
|
||||
#include "../objc/runtime.h"
|
||||
#import "malloc.h"
|
||||
#import "object.h"
|
||||
#import "thread.h"
|
||||
#import "trace.h"
|
||||
#import "cycle.h"
|
||||
#import "visit.h"
|
||||
#import "workqueue.h"
|
||||
#import "static.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
||||
/**
|
||||
* Pointer comparison. Needed for the hash table.
|
||||
*/
|
||||
static int pointer_compare(const void *a, const void *b)
|
||||
{
|
||||
return a == b;
|
||||
}
|
||||
static int pointer_hash(const void *obj)
|
||||
{
|
||||
intptr_t ptr = (intptr_t)obj;
|
||||
return (ptr >> 8) | (ptr << 8);
|
||||
}
|
||||
#define MAP_TABLE_NAME known_object
|
||||
#define MAP_TABLE_COMPARE_FUNCTION pointer_compare
|
||||
#define MAP_TABLE_HASH_KEY pointer_hash
|
||||
#define MAP_TABLE_HASH_VALUE pointer_hash
|
||||
#include "../hash_table.h"
|
||||
|
||||
@interface GCObject
|
||||
- (void)finalize;
|
||||
@end
|
||||
|
||||
static void* malloc_zone_alloc(void *zone, size_t bytes)
|
||||
{
|
||||
return calloc(1, bytes);
|
||||
}
|
||||
|
||||
void *(*gc_alloc_with_zone)(void *zone, size_t bytes) = malloc_zone_alloc;
|
||||
|
||||
static void malloc_zone_free(void *zone, void *mem)
|
||||
{
|
||||
free(mem);
|
||||
}
|
||||
|
||||
void (*gc_free_with_zone)(void *zone, void *mem) = malloc_zone_free;
|
||||
|
||||
/**
|
||||
* Macro for calculating the size of a header structure, including padding
|
||||
* required for alignment.
|
||||
*/
|
||||
#define headerSize(header)\
|
||||
({\
|
||||
size_t headerSize = sizeof(struct header);\
|
||||
/* Everything else expects the isa pointer to be correctly aligned and all\
|
||||
* subsequent ivars will be placed with the assumption that they have the\
|
||||
* correct alignment, so make sure this is really the case. */\
|
||||
if (headerSize % __alignof(void*))\
|
||||
{\
|
||||
headerSize += headerSize % __alignof(void*);\
|
||||
}\
|
||||
headerSize;\
|
||||
})
|
||||
|
||||
id GCAllocateObjectWithZone(Class cls, void *zone, size_t extraBytes)
|
||||
{
|
||||
// Allocate space for the header and ivars.
|
||||
size_t allocSize = headerSize(gc_object_header) + class_getInstanceSize(cls);
|
||||
// And for the extra space that we were asked for.
|
||||
allocSize += extraBytes;
|
||||
struct gc_object_header *region = gc_alloc_with_zone(zone, allocSize);
|
||||
region->zone = zone;
|
||||
id obj = (id)((char*)region + headerSize(gc_object_header));
|
||||
obj->isa = cls;
|
||||
// Reference count is 0, so set visited to prevent it from being collected
|
||||
// immediately
|
||||
GCSetFlag(obj, GCFlagVisited);
|
||||
// Mark as free or in use.
|
||||
GCSetColourOfObject(obj, GCColourBlack);
|
||||
// Add to traced map later, if it hasn't been retained
|
||||
GCAddObject(obj);
|
||||
return obj;
|
||||
}
|
||||
|
||||
void *GCAllocateBufferWithZone(void *zone, size_t size, BOOL scan)
|
||||
{
|
||||
size_t allocSize = headerSize(gc_buffer_header) + size;
|
||||
struct gc_buffer_header *region = gc_alloc_with_zone(zone, allocSize);
|
||||
region->size = size;
|
||||
region->object_header.zone = zone;
|
||||
char *buffer = ((char*)region) + headerSize(gc_buffer_header);
|
||||
if (scan)
|
||||
{
|
||||
GCAddBufferForTracing(region);
|
||||
}
|
||||
// Reference count is 0, so set visited to prevent it from being collected
|
||||
// immediately
|
||||
GCSetFlag((id)buffer, GCFlagVisited);
|
||||
GCSetFlag((id)buffer, GCFlagNotObject);
|
||||
// Mark as free or in use.
|
||||
GCSetColourOfObject((id)buffer, GCColourBlack);
|
||||
// Add to traced map later, if it hasn't been retained
|
||||
GCAddObject((id)buffer);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
static void freeObject(id object)
|
||||
{
|
||||
void * addr;
|
||||
if (GCTestFlag(object, GCFlagNotObject))
|
||||
{
|
||||
fprintf(stderr, "Freeing bufer %x\n", (int)object);
|
||||
addr = GCHeaderForBuffer(object);
|
||||
}
|
||||
else
|
||||
{
|
||||
addr = GCHeaderForObject(object);
|
||||
}
|
||||
fprintf(stderr, "Freeing %x\n", (int)object);
|
||||
gc_free_with_zone(GCHeaderForObject(object)->zone, addr);
|
||||
}
|
||||
|
||||
void GCWeakRelease(id anObject)
|
||||
{
|
||||
if (!GCObjectIsDynamic(anObject)) { return; }
|
||||
long count = GCDecrementWeakCount(anObject);
|
||||
// If the object has been finalized and this is the last weak ref, free it.
|
||||
if (count == 0 && GCColourOfObject(anObject) == GCColourOrange)
|
||||
{
|
||||
freeObject(anObject);
|
||||
}
|
||||
}
|
||||
id GCWeakRetain(id anObject)
|
||||
{
|
||||
if (!GCObjectIsDynamic(anObject)) { return anObject; }
|
||||
// If this object has already been finalized, return nil.
|
||||
if (GCColourOfObject(anObject) == GCColourOrange)
|
||||
{
|
||||
return nil;
|
||||
}
|
||||
GCIncrementWeakCount(anObject);
|
||||
return anObject;
|
||||
}
|
||||
// NOTE: Weak read should add the object for tracing.
|
||||
|
||||
void GCAddObjectForTracing(id object);
|
||||
|
||||
static BOOL foundRedObjects;
|
||||
|
||||
static void releaseObjects(id object, void *context, BOOL isWeak)
|
||||
{
|
||||
//fprintf(stderr, "Releasing %x\n", (int)object);
|
||||
if (isWeak)
|
||||
{
|
||||
GCWeakRelease(object);
|
||||
}
|
||||
else
|
||||
{
|
||||
GCColour colour = GCColourOfObject(object);
|
||||
// If we're freeing a cycle, mark this object as orange, finalize it,
|
||||
// then tell the tracing code to really delete it later
|
||||
if (colour == GCColourRed)
|
||||
{
|
||||
foundRedObjects = YES;
|
||||
GCSetColourOfObject(object, GCColourOrange);
|
||||
[object finalize];
|
||||
}
|
||||
else if (colour != GCColourOrange)
|
||||
{
|
||||
GCRelease(object);
|
||||
}
|
||||
//fprintf(stderr, "Object has refcount %d\n", (int)GCGetRetainCount(object));
|
||||
if (GCGetRetainCount(object) <= 0)
|
||||
{
|
||||
GCAddObjectForTracing(object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finalizes an object and frees it if its weak reference count is 0.
|
||||
*
|
||||
* This version must only be called from the GC thread.
|
||||
*/
|
||||
void GCFreeObjectUnsafe(id object)
|
||||
{
|
||||
if (!GCObjectIsDynamic(object)) { return; }
|
||||
|
||||
//fprintf(stderr, "Going to Free object %x\n", (int)(object));
|
||||
|
||||
foundRedObjects = NO;
|
||||
if (GCColourOrange != GCSetColourOfObject(object, GCColourOrange))
|
||||
{
|
||||
// If this is really an object, kill all of its references and then
|
||||
// finalize it.
|
||||
if (!GCTestFlag(object, GCFlagNotObject))
|
||||
{
|
||||
if (0)
|
||||
GCVisitChildren(object, releaseObjects, NULL, YES);
|
||||
[object finalize];
|
||||
}
|
||||
// FIXME: Implement this.
|
||||
//GCRemoveRegionFromTracingUnsafe(region);
|
||||
}
|
||||
if (GCGetWeakRefCount(object) == 0)
|
||||
{
|
||||
//fprintf(stderr, "Freeing object %x\n", (int)(object));
|
||||
freeObject(object);
|
||||
}
|
||||
if (foundRedObjects)
|
||||
{
|
||||
GCRunTracerIfNeeded(NO);
|
||||
}
|
||||
}
|
||||
|
||||
void GCFreeObject(id object)
|
||||
{
|
||||
GCPerform((void(*)(void*))GCFreeObjectUnsafe, object);
|
||||
}
|
||||
@ -1,233 +0,0 @@
|
||||
/**
|
||||
* object.h defines the layout of the object header for GCKit-managed objects.
|
||||
* These objects are allocated with GCAllocateObject() and are not freed by
|
||||
* code outside of GCKit.
|
||||
*/
|
||||
|
||||
/**
|
||||
* GCINLINEPUBLIC functions are functions that are inline for GCKit but
|
||||
* exported symbols for the rest of the world.
|
||||
*/
|
||||
#ifndef GCINLINEPUBLIC
|
||||
#define GCINLINEPUBLIC inline static
|
||||
#endif
|
||||
/**
|
||||
* GCINLINEPRIVATE functions are inline in GCKit and are not exported.
|
||||
*/
|
||||
#define GCINLINEPRIVATE inline static __attribute__((unused))
|
||||
|
||||
/**
|
||||
* Modified version of the object header. Stores a 16-bit reference count and
|
||||
* a 16-bit flags field. Three bits of the flags are used for the object
|
||||
* colour and one to indicate if it is buffered.
|
||||
*
|
||||
* Note: On 64-bit platforms we have to add some padding, so it might be better
|
||||
* to make the ref countfields bigger.
|
||||
*/
|
||||
__attribute__((packed))
|
||||
struct gc_object_header
|
||||
{
|
||||
/**
|
||||
* Garbage collection Flags associated with this object. This includes the
|
||||
* object's colour while performing cycle detection. */
|
||||
char flags;
|
||||
/**
|
||||
* Number of weak references held to this object. An object may be
|
||||
* finalized, but may not be deleted while weak references are held to it.
|
||||
*/
|
||||
char weak_ref_count;
|
||||
/**
|
||||
* Number of strong references to the object. This count is modified by
|
||||
* GCRetain() and GCRelease(). When it reaches 0, the object has no strong
|
||||
* references to it. It may, however, have references from the stack or
|
||||
* traced memory. When the strong reference count reaches 0, the object
|
||||
* will be added to the trace pile.
|
||||
*/
|
||||
short strong_ref_count;
|
||||
/**
|
||||
* The allocation zone for this object. This is an opaque pointer from the
|
||||
* perspective of GCKit. In GNUstep, this will be an NSZone.
|
||||
*/
|
||||
void *zone;
|
||||
};
|
||||
|
||||
__attribute__((packed))
|
||||
struct gc_buffer_header
|
||||
{
|
||||
size_t size;
|
||||
struct gc_object_header object_header;
|
||||
};
|
||||
|
||||
/**
|
||||
* Cycle detection is a graph colouring algorithm. This type specifies the
|
||||
* possible colours.
|
||||
*/
|
||||
typedef enum
|
||||
{
|
||||
/** Acyclic */
|
||||
GCColourGreen = 0,
|
||||
/** In use or free. */
|
||||
GCColourBlack = 1,
|
||||
/** Possible member of a cycle. */
|
||||
GCColourGrey = 2,
|
||||
/** Member of a garbage cycle. */
|
||||
GCColourWhite = 3,
|
||||
/** Potential root of a cycle. */
|
||||
GCColourPurple = 4,
|
||||
/** Object currently being freed. */
|
||||
GCColourOrange = 5,
|
||||
/** Object is a member of a cycle to be freed when the last traced
|
||||
* reference is removed, or resurrected if retained. */
|
||||
GCColourRed = 6
|
||||
} GCColour;
|
||||
|
||||
typedef enum
|
||||
{
|
||||
/** Set when the object has been added to the potential-garbage list. */
|
||||
GCFlagBuffered = (1<<3),
|
||||
/** Set when an object has been assigned on a traced part of the heap. */
|
||||
GCFlagEscaped = (1<<4),
|
||||
/** Visited by the tracing code. */
|
||||
GCFlagVisited = (1<<5),
|
||||
/** This object is a memory buffer, not an Objective-C object. */
|
||||
GCFlagNotObject = (1<<6),
|
||||
/** Object uses CoreFoundation-style semantics and won't ever by traced. */
|
||||
GCFlagCFObject = (1<<7)
|
||||
} GCFlag;
|
||||
|
||||
/**
|
||||
* Debugging function used to return a colour as a human-readable string.
|
||||
*/
|
||||
__attribute__((unused))
|
||||
inline static const char *GCStringFromColour(GCColour aColour)
|
||||
{
|
||||
switch(aColour)
|
||||
{
|
||||
case GCColourBlack: return "black";
|
||||
case GCColourGrey: return "grey";
|
||||
case GCColourWhite: return "white";
|
||||
case GCColourPurple: return "purple";
|
||||
case GCColourGreen: return "green";
|
||||
case GCColourOrange: return "orange";
|
||||
case GCColourRed: return "red";
|
||||
}
|
||||
return "unknown";
|
||||
}
|
||||
GCINLINEPRIVATE struct gc_object_header*GCHeaderForObject(id anObject)
|
||||
{
|
||||
return &((struct gc_object_header*)anObject)[-1];
|
||||
}
|
||||
GCINLINEPRIVATE struct gc_buffer_header*GCHeaderForBuffer(id anObject)
|
||||
{
|
||||
return &((struct gc_buffer_header*)anObject)[-1];
|
||||
}
|
||||
/**
|
||||
* Returns the flags for a specified object.
|
||||
*/
|
||||
GCINLINEPRIVATE unsigned short GCObjectFlags(id anObject)
|
||||
{
|
||||
return GCHeaderForObject(anObject)->flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the colour of the specified object.
|
||||
*/
|
||||
GCINLINEPRIVATE GCColour GCColourOfObject(id anObject)
|
||||
{
|
||||
// Lowest 3 bits of the flags field contain the colour.
|
||||
return GCObjectFlags(anObject) & 0x7;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to set the flags for a given object. Returns the old value.
|
||||
*/
|
||||
GCINLINEPRIVATE unsigned short GCTrySetFlags(id anObject, unsigned char old,
|
||||
unsigned char value)
|
||||
{
|
||||
return __sync_bool_compare_and_swap(
|
||||
&(((struct gc_object_header*)anObject)[-1].flags), old, value);
|
||||
}
|
||||
/**
|
||||
* Sets the colour of the specified object, returning the old colour
|
||||
*/
|
||||
GCINLINEPRIVATE GCColour GCSetColourOfObject(id anObject, GCColour colour)
|
||||
{
|
||||
char oldFlags;
|
||||
char newFlags;
|
||||
do
|
||||
{
|
||||
oldFlags = GCObjectFlags(anObject);
|
||||
newFlags = oldFlags;
|
||||
// Clear the old colour.
|
||||
newFlags &= 0xf8;
|
||||
// Set the new colour
|
||||
newFlags |= colour;
|
||||
} while(!GCTrySetFlags(anObject, oldFlags, newFlags));
|
||||
return oldFlags & 0x7;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the specified flag for a given object.
|
||||
*/
|
||||
GCINLINEPRIVATE void GCSetFlag(id anObject, GCFlag flag)
|
||||
{
|
||||
unsigned oldFlags;
|
||||
unsigned newFlags;
|
||||
do
|
||||
{
|
||||
oldFlags = GCObjectFlags(anObject);
|
||||
newFlags = oldFlags;
|
||||
newFlags |= flag;
|
||||
} while(!GCTrySetFlags(anObject, oldFlags, newFlags));
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the specified flag on an object.
|
||||
*/
|
||||
GCINLINEPRIVATE void GCClearFlag(id anObject, GCFlag flag)
|
||||
{
|
||||
unsigned oldFlags;
|
||||
unsigned newFlags;
|
||||
do
|
||||
{
|
||||
oldFlags = GCObjectFlags(anObject);
|
||||
newFlags = oldFlags;
|
||||
newFlags &= ~flag;
|
||||
} while(!GCTrySetFlags(anObject, oldFlags, newFlags));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the specified object's buffered flag is set.
|
||||
*/
|
||||
GCINLINEPRIVATE BOOL GCTestFlag(id anObject, GCFlag flag)
|
||||
{
|
||||
return GCObjectFlags(anObject) & flag;
|
||||
}
|
||||
|
||||
GCINLINEPUBLIC long GCGetRetainCount(id anObject)
|
||||
{
|
||||
unsigned short refcount = ((struct gc_object_header*)anObject)[-1].strong_ref_count;
|
||||
return (long) refcount;
|
||||
}
|
||||
GCINLINEPRIVATE long GCDecrementRetainCount(id anObject)
|
||||
{
|
||||
return __sync_sub_and_fetch(&(GCHeaderForObject(anObject)->strong_ref_count), 1);
|
||||
}
|
||||
GCINLINEPRIVATE long GCIncrementRetainCount(id anObject)
|
||||
{
|
||||
return __sync_add_and_fetch(&(GCHeaderForObject(anObject)->strong_ref_count), 1);
|
||||
}
|
||||
GCINLINEPUBLIC long GCGetWeakRefCount(id anObject)
|
||||
{
|
||||
unsigned short refcount = ((struct gc_object_header*)anObject)[-1].weak_ref_count;
|
||||
return (long) refcount;
|
||||
}
|
||||
|
||||
GCINLINEPRIVATE long GCDecrementWeakCount(id anObject)
|
||||
{
|
||||
return __sync_sub_and_fetch(&(GCHeaderForObject(anObject)->weak_ref_count), 1);
|
||||
}
|
||||
GCINLINEPRIVATE long GCIncrementWeakCount(id anObject)
|
||||
{
|
||||
return __sync_add_and_fetch(&(GCHeaderForObject(anObject)->weak_ref_count), 1);
|
||||
}
|
||||
@ -1,12 +0,0 @@
|
||||
#include <dlfcn.h>
|
||||
|
||||
/**
|
||||
* Check if an object is in one of the sections that the loader allocated. If
|
||||
* so, it won't have a GCKit header so we just assume that it never needs
|
||||
* collecting.
|
||||
*/
|
||||
static inline BOOL GCObjectIsDynamic(id obj)
|
||||
{
|
||||
Dl_info i;
|
||||
return !dladdr(obj, &i);
|
||||
}
|
||||
@ -1,244 +0,0 @@
|
||||
#if 0
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// TESTING:
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Simple object which stores pointers to two objects. Used to test whether
|
||||
* cycle detection is really working by creating garbage cycles and checking
|
||||
* that they are free'd.
|
||||
*/
|
||||
@interface Pair
|
||||
{
|
||||
Class isa;
|
||||
@public
|
||||
id a, b;
|
||||
}
|
||||
@end
|
||||
@implementation Pair
|
||||
/**
|
||||
* Create a new pair and enable cycle detection for it.
|
||||
*/
|
||||
+ (id) new
|
||||
{
|
||||
id new = GCAllocateObjectWithZone(self, NULL, o);
|
||||
// Enable automatic cycle detection for this object.
|
||||
setColourOfObject(new, black);
|
||||
return new;
|
||||
}
|
||||
/**
|
||||
* Release both pointers and log that the object has been freed.
|
||||
*/
|
||||
- (void) dealloc
|
||||
{
|
||||
fprintf(stderr, "Pair destroyed\n");
|
||||
[a release];
|
||||
[b release];
|
||||
[super dealloc];
|
||||
}
|
||||
@end
|
||||
|
||||
int main(int argc, char **argv, char **env)
|
||||
{
|
||||
id pool = [GCAutoreleasePool new];
|
||||
// FIXME: Test object -> traced region -> object
|
||||
Pair * a1 = [Pair new];
|
||||
Pair * a2 = [Pair new];
|
||||
Pair * a3 = [Pair new];
|
||||
Pair * a4 = [Pair new];
|
||||
Pair * a5 = [Pair new];
|
||||
a1->a = [a2 retain];
|
||||
a1->b = [a5 retain];
|
||||
a2->a = [a2 retain];
|
||||
a2->b = [a4 retain];
|
||||
a3->a = [a3 retain];
|
||||
a3->b = [a4 retain];
|
||||
a4->a = [a3 retain];
|
||||
a4->b = [a5 retain];
|
||||
a5->a = [a5 retain];
|
||||
a5->b = [a1 retain];
|
||||
a5->b = [NSObject new];
|
||||
visObject(a1, @"Test");
|
||||
// Check that we haven't broken anything yet...
|
||||
NSLog(@"Testing? %@", a1);
|
||||
[a1 release];
|
||||
[a2 release];
|
||||
[a3 release];
|
||||
[a4 release];
|
||||
[a5 release];
|
||||
//[pool drain];
|
||||
[pool release];
|
||||
//fprintf(stderr, "Buffered Objects: %d\n", loopBufferInsert);
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
#include "../objc/runtime.h"
|
||||
#import "malloc.h"
|
||||
#import "thread.h"
|
||||
#import "trace.h"
|
||||
#import "cycle.h"
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <pthread.h>
|
||||
|
||||
@interface NSConstantString
|
||||
{
|
||||
id isa;
|
||||
char *c_str;
|
||||
unsigned len;
|
||||
}
|
||||
@end
|
||||
|
||||
@interface SimpleObject
|
||||
{
|
||||
Class isa;
|
||||
}
|
||||
+ (id)new;
|
||||
@end
|
||||
|
||||
@implementation SimpleObject
|
||||
+ (id)new
|
||||
{
|
||||
id obj = GCAllocateObjectWithZone(self, NULL, 0);
|
||||
return obj;
|
||||
}
|
||||
- (void)log
|
||||
{
|
||||
printf("Simple object %x is still alive\n", (int)self);
|
||||
}
|
||||
- (void)finalize
|
||||
{
|
||||
printf("%s %x finalised\n", class_getName(isa), (int)self);
|
||||
}
|
||||
@end
|
||||
// The test program calls GCDrain() repeatedly to force the GC to run. In real
|
||||
// code, this will be triggered automatically as a result of object allocations
|
||||
// and reference count changes. In this code, however, it is not. The test
|
||||
// case will exit before the GC would run in normal use. This is not a bug;
|
||||
// there's no point spending CPU time collecting objects a few milliseconds
|
||||
// before the process exits and the OS reclaims them all at once. The point of
|
||||
// a garbage collector is to reclaim memory for reuse, and if no reuse is going
|
||||
// to take place, there is no point reclaiming it.
|
||||
|
||||
void makeObject(void)
|
||||
{
|
||||
SimpleObject *foo = [SimpleObject new];
|
||||
[foo log];
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
[foo log];
|
||||
foo = nil;
|
||||
[foo log];
|
||||
GCDrain(YES);
|
||||
}
|
||||
|
||||
void doStuff(void)
|
||||
{
|
||||
makeObject();
|
||||
}
|
||||
|
||||
void makeRefCountedObject(void)
|
||||
{
|
||||
SimpleObject *foo = [SimpleObject new];
|
||||
GCRelease(GCRetain(foo));
|
||||
[foo log];
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
}
|
||||
|
||||
void doRefCountStuff(void)
|
||||
{
|
||||
makeRefCountedObject();
|
||||
}
|
||||
|
||||
static id *buffer;
|
||||
|
||||
void putObjectInBuffer(void)
|
||||
{
|
||||
buffer = (id*)GCRetain((id)GCAllocateBufferWithZone(NULL, sizeof(id), YES));
|
||||
buffer[0] = objc_assign_strongCast([SimpleObject new], buffer);
|
||||
//fprintf(stderr, "Storing pointer %x in traced memory %x\n", (int)buffer[0], (int)buffer);
|
||||
[*buffer log];
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
}
|
||||
|
||||
void testTracedMemory(void)
|
||||
{
|
||||
putObjectInBuffer();
|
||||
GCDrain(YES);
|
||||
}
|
||||
@interface Pair : SimpleObject
|
||||
{
|
||||
@public
|
||||
Pair *a, *b;
|
||||
}
|
||||
@end
|
||||
@implementation Pair @end
|
||||
|
||||
void makeObjectCycle(void)
|
||||
{
|
||||
Pair *obj = [Pair new];
|
||||
obj->a = GCRetain([Pair new]);
|
||||
obj->b = GCRetain([Pair new]);
|
||||
obj->a->a = GCRetain(obj->b);
|
||||
obj->b->b = GCRetain(obj->a);
|
||||
obj->a->b = GCRetain(obj);
|
||||
obj->b->a = GCRetain(obj);
|
||||
[obj log];
|
||||
GCRelease(GCRetain(obj));
|
||||
GCDrain(YES);
|
||||
}
|
||||
|
||||
void testCycle(void)
|
||||
{
|
||||
makeObjectCycle();
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
}
|
||||
|
||||
void makeTracedCycle(void)
|
||||
{
|
||||
// These two buffers are pointing to each other
|
||||
id *b1 = GCAllocateBufferWithZone(NULL, sizeof(id), YES);
|
||||
Pair *p = [Pair new];
|
||||
id *b2 = GCAllocateBufferWithZone(NULL, sizeof(id), YES);
|
||||
fprintf(stderr, "Expected to leak %x and %x\n", (int)b1, (int)b2);
|
||||
objc_assign_strongCast((id)b2, b1);
|
||||
//objc_assign_strongCast(p, b1);
|
||||
objc_assign_strongCast((id)b1, b2);
|
||||
p->a = GCRetain((id)b2);
|
||||
}
|
||||
|
||||
void testTracedCycle(void)
|
||||
{
|
||||
makeTracedCycle();
|
||||
}
|
||||
|
||||
int main(void)
|
||||
{
|
||||
testTracedCycle();
|
||||
/*
|
||||
// Not required on main thread:
|
||||
//GCRegisterThread();
|
||||
doStuff();
|
||||
GCDrain(YES);
|
||||
doRefCountStuff();
|
||||
GCDrain(YES);
|
||||
testTracedMemory();
|
||||
buffer[0] = objc_assign_strongCast(nil, buffer);
|
||||
GCDrain(YES);
|
||||
|
||||
testCycle();
|
||||
*/
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
sched_yield();
|
||||
GCDrain(YES);
|
||||
GCDrain(YES);
|
||||
printf("Waiting to make sure the GC thread has caught up before the test exits\n");
|
||||
sleep(1);
|
||||
}
|
||||
@ -1,74 +0,0 @@
|
||||
/**
|
||||
* Modified autorelease pool which performs automatic detection and collection
|
||||
* of garbage cycles.
|
||||
*/
|
||||
typedef struct _GCThread
|
||||
{
|
||||
/**
|
||||
* Next thread in the list.
|
||||
*/
|
||||
struct _GCThread *next;
|
||||
/**
|
||||
* Last thread in the list.
|
||||
*/
|
||||
struct _GCThread *last;
|
||||
/**
|
||||
* Map of objects that haven't yet escaped from the thread.
|
||||
*/
|
||||
void *unescapedObjects;
|
||||
/**
|
||||
* Top of the stack.
|
||||
*/
|
||||
void *stackTop;
|
||||
/**
|
||||
* Top of the stack.
|
||||
*/
|
||||
void *stackBottom;
|
||||
/**
|
||||
* Per-thread buffer into which objects that are potentially roots in garbage
|
||||
* cycles are stored.
|
||||
*/
|
||||
id *cycleBuffer;
|
||||
/**
|
||||
* Insert point into cycle buffer
|
||||
*/
|
||||
unsigned int cycleBufferInsert;
|
||||
/**
|
||||
* Buffer for objects whose reference count has reached 0. These may be
|
||||
* freed if there are no references to them on the stack.
|
||||
*/
|
||||
id *freeBuffer;
|
||||
/**
|
||||
* Insert point into to-free buffer
|
||||
*/
|
||||
unsigned int freeBufferInsert;
|
||||
/**
|
||||
* Condition variable prevents the thread from really exiting (and having
|
||||
* its stack deallocated) until the GC thread has removed the thread.
|
||||
*/
|
||||
void *exitCondition;
|
||||
/**
|
||||
* The generation when this stack was last scanned.
|
||||
*/
|
||||
volatile int scannedInGeneration;
|
||||
} GCThread;
|
||||
|
||||
extern GCThread *GCThreadList;
|
||||
|
||||
/**
|
||||
* Registers the current thread for garbage collection.
|
||||
*/
|
||||
void GCRegisterThread(void);
|
||||
/**
|
||||
* Adds an object for tracing or cycle detection or tracing.
|
||||
*/
|
||||
void GCAddObject(id anObject);
|
||||
/**
|
||||
* Drains the objects queued for (potential) collection on the current thread.
|
||||
* Passing YES as the argument forces a full sweep of the heap-allocated traced
|
||||
* regions.
|
||||
*
|
||||
* Note that this method performs the collection in a second thread, so some
|
||||
* objects may not be collected until after it has run.
|
||||
*/
|
||||
void GCDrain(BOOL forceCollect);
|
||||
@ -1,230 +0,0 @@
|
||||
#include "../objc/runtime.h"
|
||||
BOOL forceCollect;
|
||||
#import "object.h"
|
||||
#import "thread.h"
|
||||
#import "cycle.h"
|
||||
#import "trace.h"
|
||||
#import "workqueue.h"
|
||||
|
||||
#include <pthread.h>
|
||||
#include <stdlib.h>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include <sys/mman.h>
|
||||
#include <unistd.h>
|
||||
/**
|
||||
* Size of the buffers used in each thread before passing stuff over to the GC
|
||||
* thread. Once either BUFFER_SIZE objects are queued waiting for tracing or
|
||||
* cycle detection, the queue is passed over to the GC thread, which wakes up
|
||||
* and tries to find some things to delete.
|
||||
*/
|
||||
static const int BUFFER_SIZE = 256;
|
||||
|
||||
|
||||
/**
|
||||
* Thread local storage key used for the thread structure.
|
||||
*/
|
||||
static pthread_key_t gc_thread_key;
|
||||
/**
|
||||
*
|
||||
*/
|
||||
static pthread_mutex_t thread_lock;
|
||||
|
||||
static void GCDrainThread(GCThread *thread, BOOL forceCollect);
|
||||
|
||||
GCThread *GCThreadList;
|
||||
|
||||
/**
|
||||
* Removes this thread from the list. Must run in the GC thread.
|
||||
*/
|
||||
static void GCUnregisterThread(void *t)
|
||||
{
|
||||
GCThread *thr = t;
|
||||
if (NULL == thr->last)
|
||||
{
|
||||
GCThreadList = thr->next;
|
||||
}
|
||||
else
|
||||
{
|
||||
thr->last->next = thr->next;
|
||||
}
|
||||
if (NULL == thr->next)
|
||||
{
|
||||
if (thr->last)
|
||||
{
|
||||
thr->next->last = thr->last;
|
||||
}
|
||||
else
|
||||
{
|
||||
thr->next->last = GCThreadList;
|
||||
}
|
||||
}
|
||||
//FIXME: Delete tracer references to this stack
|
||||
// Wake up the caller thread and let it do the real cleanup
|
||||
pthread_mutex_lock(&thread_lock);
|
||||
pthread_cond_signal(thr->exitCondition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the thread to the list. Must run in the GC thread.
|
||||
*/
|
||||
static void GCAddThread(void *t)
|
||||
{
|
||||
GCThread *thr = t;
|
||||
thr->next = GCThreadList;
|
||||
if (GCThreadList)
|
||||
{
|
||||
GCThreadList->last = thr;
|
||||
}
|
||||
GCThreadList = thr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup function called when a thread is destroyed. Pops all autorelease
|
||||
* pools, deletes all unaliased objects, and so on.
|
||||
*/
|
||||
static void cleanup_thread(void *thread)
|
||||
{
|
||||
GCThread *thr = thread;
|
||||
GCDrainThread(thread, NO);
|
||||
pthread_setspecific(gc_thread_key, NULL);
|
||||
pthread_cond_t thread_exit_condition;
|
||||
pthread_cond_init(&thread_exit_condition, NULL);
|
||||
thr->exitCondition = &thread_exit_condition;
|
||||
pthread_mutex_lock(&thread_lock);
|
||||
GCPerform(GCUnregisterThread, thread);
|
||||
pthread_cond_wait(&thread_exit_condition, &thread_lock);
|
||||
pthread_cond_destroy(&thread_exit_condition);
|
||||
free(thr->cycleBuffer);
|
||||
free(thr->freeBuffer);
|
||||
free(thr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Thread system initialization.
|
||||
*/
|
||||
__attribute__((constructor))
|
||||
static void init_thread_system(void)
|
||||
{
|
||||
pthread_key_create(&gc_thread_key, cleanup_thread);
|
||||
pthread_mutex_init(&thread_lock, NULL);
|
||||
GCRegisterThread();
|
||||
}
|
||||
|
||||
|
||||
void GCRegisterThread(void)
|
||||
{
|
||||
assert(NULL == pthread_getspecific(gc_thread_key) &&
|
||||
"Only one thread per thread!");
|
||||
GCThread *thr = calloc(sizeof(GCThread),1);
|
||||
// Store this in TLS
|
||||
pthread_setspecific(gc_thread_key, thr);
|
||||
// FIXME: Use non-portable pthread calls to find the stack.
|
||||
// This code is, basically, completely wrong.
|
||||
char a;
|
||||
thr->stackTop = &a;
|
||||
while ((intptr_t)thr->stackTop % 4096)
|
||||
{
|
||||
thr->stackTop = ((char*)thr->stackTop)+1;
|
||||
}
|
||||
|
||||
thr->cycleBuffer = calloc(BUFFER_SIZE, sizeof(void*));
|
||||
thr->freeBuffer = calloc(BUFFER_SIZE, sizeof(void*));
|
||||
GCPerform(GCAddThread, thr);
|
||||
}
|
||||
void GCAddObject(id anObject)
|
||||
{
|
||||
GCThread *thr = pthread_getspecific(gc_thread_key);
|
||||
// If the reference count is 0, we add this
|
||||
if (GCGetRetainCount(anObject) == 0)
|
||||
{
|
||||
thr->freeBuffer[thr->freeBufferInsert++] = anObject;
|
||||
if (thr->freeBufferInsert == BUFFER_SIZE)
|
||||
{
|
||||
GCDrainThread(thr, NO);
|
||||
}
|
||||
}
|
||||
else if (!GCTestFlag(anObject, GCFlagBuffered))
|
||||
{
|
||||
// Note: there is a potential race here. If this occurs then two
|
||||
// GCAutoreleasePools might add the same object to their buffers. This
|
||||
// is not important. If it does happen then we run the cycle detector
|
||||
// on an object twice. This increases the complexity of the collector
|
||||
// above linear, but the cost of making sure that it never happens is
|
||||
// much greater than the cost of (very) occasionally checking an object
|
||||
// twice if it happens to be added at exactly the same time by two
|
||||
// threads.
|
||||
GCSetFlag(anObject, GCFlagBuffered);
|
||||
thr->cycleBuffer[thr->cycleBufferInsert++] = anObject;
|
||||
if (thr->cycleBufferInsert == BUFFER_SIZE)
|
||||
{
|
||||
GCDrainThread(thr, NO);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
typedef struct
|
||||
{
|
||||
id *cycleBuffer;
|
||||
unsigned int cycleBufferSize;
|
||||
BOOL forceTrace;
|
||||
} GCTraceContext;
|
||||
|
||||
/**
|
||||
* Trampoline that is run in the GC thread.
|
||||
*/
|
||||
static void traceTrampoline(void *c)
|
||||
{
|
||||
GCTraceContext *context = c;
|
||||
|
||||
// Scan for any new garbage cycles.
|
||||
if (context->cycleBufferSize)
|
||||
{
|
||||
GCScanForCycles(context->cycleBuffer, context->cycleBufferSize);
|
||||
free(context->cycleBuffer);
|
||||
}
|
||||
// Now add the objects that might be garbage to the collector.
|
||||
// These won't actually be freed until after this
|
||||
GCRunTracerIfNeeded(context->forceTrace);
|
||||
|
||||
//free(c);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect garbage cycles.
|
||||
*/
|
||||
static void GCDrainThread(GCThread *thread, BOOL forceCollect)
|
||||
{
|
||||
// Register these objects for tracing
|
||||
GCAddObjectsForTracing(thread);
|
||||
// Tweak the bottom of the stack to be in this stack frame. Anything in
|
||||
// the caller will be traced, but anything in the callee will be ignored
|
||||
// (this is important because otherwise you'd find objects because you were
|
||||
// looking for them)
|
||||
void *stackBottom = &stackBottom;
|
||||
thread->stackBottom = stackBottom;
|
||||
// Mark all objects on this thread's stack as visited.
|
||||
GCTraceStackSynchronous(thread);
|
||||
|
||||
GCTraceContext *context = calloc(sizeof(GCTraceContext), 1);
|
||||
void *
|
||||
valloc(size_t size);
|
||||
//GCTraceContext *context = valloc(4096);
|
||||
if (thread->cycleBufferInsert)
|
||||
{
|
||||
context->cycleBuffer = thread->cycleBuffer;
|
||||
context->cycleBufferSize = thread->cycleBufferInsert;
|
||||
thread->cycleBuffer = calloc(BUFFER_SIZE, sizeof(void*));
|
||||
thread->cycleBufferInsert = 0;
|
||||
}
|
||||
context->forceTrace = forceCollect;
|
||||
//mprotect(context, 4096, PROT_READ);
|
||||
GCPerform(traceTrampoline, context);
|
||||
thread->freeBufferInsert = 0;
|
||||
}
|
||||
void GCDrain(BOOL forceCollect)
|
||||
{
|
||||
GCThread *thr = pthread_getspecific(gc_thread_key);
|
||||
GCDrainThread(thr, forceCollect);
|
||||
}
|
||||
@ -1,22 +0,0 @@
|
||||
/**
|
||||
* Structure identifying regions in memory. The region is treated as being
|
||||
*/
|
||||
typedef struct
|
||||
{
|
||||
struct gc_buffer_header *buffer;
|
||||
void *start;
|
||||
void *end;
|
||||
} GCTracedRegion;
|
||||
|
||||
|
||||
void GCRunTracerIfNeeded(BOOL);
|
||||
|
||||
void GCAddObjectsForTracing(GCThread *thr);
|
||||
void GCTraceStackSynchronous(GCThread *thr);
|
||||
|
||||
|
||||
void GCAddBufferForTracing(struct gc_buffer_header *buffer);
|
||||
|
||||
extern volatile int GCGeneration;
|
||||
void GCCollect();
|
||||
id objc_assign_strongCast(id obj, id *ptr);
|
||||
@ -1,744 +0,0 @@
|
||||
/**
|
||||
* trace.m implements the tracing part of the collector. This is responsible
|
||||
* for checking for references to objects on stacks and in traced regions on
|
||||
* the heap before finally freeing them.
|
||||
*/
|
||||
#include <stdlib.h>
|
||||
#include <sys/limits.h>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <dlfcn.h>
|
||||
#include "../objc/runtime.h"
|
||||
#import "object.h"
|
||||
#import "thread.h"
|
||||
#import "trace.h"
|
||||
#import "cycle.h"
|
||||
#import "malloc.h"
|
||||
#import "static.h"
|
||||
#import "workqueue.h"
|
||||
|
||||
/**
|
||||
* Structure storing pointers that are currently being traced.
|
||||
*
|
||||
* We store the last location where an object was seen so that, when no objects
|
||||
* are found, we can
|
||||
*
|
||||
* TODO: We currently don't use the last-seen addresses for objects. If we
|
||||
* did, we could quickly verify that the last reference to them was still valid
|
||||
* and eliminate some
|
||||
*/
|
||||
typedef struct
|
||||
{
|
||||
/** The object that might be ready to be freed. */
|
||||
id pointer;
|
||||
/** Last on-heap address that we saw for this object. */
|
||||
id *heapAddress;
|
||||
/** Highest on-stack address that we saw for this object. */
|
||||
id *stackAddress;
|
||||
/** Sweep pass number when this object's visited flag was cleared. All
|
||||
* traced regions - stack and heap - must have been traced at least once
|
||||
* before this object can be freed. */
|
||||
int visitClearedGeneration;
|
||||
} GCTracedPointer;
|
||||
|
||||
// HUGE FIXME: Handle wrapping of this sensibly.
|
||||
volatile int GCGeneration;
|
||||
|
||||
static const GCTracedPointer GCNullTracedPointer = {0,0};
|
||||
|
||||
/**
|
||||
* Pointer comparison. Needed for the hash table.
|
||||
*/
|
||||
static int traced_pointer_compare(const void *a, const GCTracedPointer b)
|
||||
{
|
||||
return a == b.pointer;
|
||||
}
|
||||
/**
|
||||
* Pointer hash function. The lowest bits of a pointer have very little
|
||||
* entropy - we have lots of objects the same size and alignment, so they will
|
||||
* end up at the same place within a page.
|
||||
*/
|
||||
static int traced_pointer_hash(const GCTracedPointer obj)
|
||||
{
|
||||
intptr_t ptr = (intptr_t)obj.pointer;
|
||||
return (ptr >> 4) | (ptr << 4);
|
||||
}
|
||||
static int traced_pointer_key_hash(const void *obj)
|
||||
{
|
||||
intptr_t ptr = (intptr_t)obj;
|
||||
return (ptr >> 4) | (ptr << 4);
|
||||
}
|
||||
static int traced_pointer_is_null(const GCTracedPointer obj)
|
||||
{
|
||||
return obj.pointer == NULL;
|
||||
}
|
||||
#define MAP_TABLE_NAME traced_object
|
||||
#define MAP_TABLE_COMPARE_FUNCTION traced_pointer_compare
|
||||
#define MAP_TABLE_HASH_VALUE traced_pointer_hash
|
||||
#define MAP_TABLE_HASH_KEY traced_pointer_key_hash
|
||||
#define MAP_TABLE_VALUE_TYPE GCTracedPointer
|
||||
#define MAP_TABLE_VALUE_NULL traced_pointer_is_null
|
||||
#define MAP_TABLE_VALUE_PLACEHOLDER GCNullTracedPointer
|
||||
#define MAP_TABLE_NO_LOCK
|
||||
|
||||
#include "../hash_table.h"
|
||||
/**
|
||||
* Pointer comparison. Needed for the hash table.
|
||||
*/
|
||||
static int pointer_compare(const void *a, const void *b)
|
||||
{
|
||||
return a == b;
|
||||
}
|
||||
#define MAP_TABLE_NAME unescaped_object
|
||||
#define MAP_TABLE_COMPARE_FUNCTION pointer_compare
|
||||
#define MAP_TABLE_HASH_KEY traced_pointer_key_hash
|
||||
#define MAP_TABLE_HASH_VALUE traced_pointer_key_hash
|
||||
#define MAP_TABLE_NO_LOCK
|
||||
#define MAP_TABLE_SINGLE_THREAD
|
||||
|
||||
#include "../hash_table.h"
|
||||
|
||||
static traced_object_table *traced_objects;
|
||||
/**
|
||||
* Read write lock for modifying the traced object set. The GC thread may read
|
||||
* from the tree without acquiring this lock, but other threads must acquire a
|
||||
* read lock before reading from it. Any thread must acquire the write lock
|
||||
* before modifying the traced object set. Only the GC thread may remove
|
||||
* objects, other threads may modify them.
|
||||
*/
|
||||
static pthread_rwlock_t traced_objects_lock;
|
||||
|
||||
typedef struct _GCTracedRegionTreeNode
|
||||
{
|
||||
GCTracedRegion region;
|
||||
struct _GCTracedRegionTreeNode *child[2];
|
||||
enum { RED, BLACK=0 } colour;
|
||||
} GCTracedRegionTreeNode;
|
||||
|
||||
/**
|
||||
* Root of a red-black tree used to store regions that are traced. Note that
|
||||
* this is not protected by any locks. We ensure serialisation by doing both
|
||||
* tracing and freeing of traced regions in the same thread.
|
||||
*
|
||||
* Red-black implementation based on Julienne Walker's public domain version.
|
||||
*/
|
||||
static GCTracedRegionTreeNode *GCRegionTreeRoot;
|
||||
/**
|
||||
* Compare two traced regions and return a value that can be compared to 0 to
|
||||
* find their ordering.
|
||||
*/
|
||||
static int GCCompareRegions(GCTracedRegion region1, GCTracedRegion region2)
|
||||
{
|
||||
// Region 1 is before region 2
|
||||
if (region1.end < region2.start)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
// Region 2 is before region 1
|
||||
if (region1.start < region2.end)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
// Regions overlap
|
||||
return 0;
|
||||
}
|
||||
|
||||
static GCTracedRegion mergeRegions(
|
||||
GCTracedRegion region1, GCTracedRegion region2)
|
||||
{
|
||||
if (region1.start > region2.start)
|
||||
{
|
||||
region1.start = region2.start;
|
||||
}
|
||||
if (region1.end < region2.end)
|
||||
{
|
||||
region1.end = region2.end;
|
||||
}
|
||||
return region1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
static GCTracedRegionTreeNode *GCTracedRegionTreeNodeCreate(GCTracedRegion region)
|
||||
{
|
||||
GCTracedRegionTreeNode *node = calloc(1, sizeof(GCTracedRegionTreeNode));
|
||||
node->region = region;
|
||||
node->colour = RED;
|
||||
return node;
|
||||
}
|
||||
|
||||
static int isNodeRed(GCTracedRegionTreeNode *node)
|
||||
{
|
||||
return (node != NULL) && node->colour;
|
||||
}
|
||||
|
||||
static GCTracedRegionTreeNode *rotateTree(GCTracedRegionTreeNode *node,
|
||||
int direction)
|
||||
{
|
||||
GCTracedRegionTreeNode *save = node->child[!direction];
|
||||
|
||||
node->child[!direction] = save->child[direction];
|
||||
save->child[direction] = node;
|
||||
|
||||
node->colour = RED;
|
||||
save->colour = BLACK;
|
||||
|
||||
return save;
|
||||
}
|
||||
|
||||
static GCTracedRegionTreeNode *rotateTreeDouble(GCTracedRegionTreeNode *node,
|
||||
int direction)
|
||||
{
|
||||
node->child[!direction] = rotateTree(node->child[!direction], !direction);
|
||||
return rotateTree(node, direction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the red-black tree is really a red black tree and not a nonsense tree.
|
||||
*/
|
||||
static int debugTree(GCTracedRegionTreeNode *node)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
if (NULL == node)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
GCTracedRegionTreeNode *left = node->child[0];
|
||||
GCTracedRegionTreeNode *right = node->child[1];
|
||||
|
||||
/* Consecutive red childs */
|
||||
if (isNodeRed(node) )
|
||||
{
|
||||
assert(!(isNodeRed(left) || isNodeRed(right)) && "Red violation" );
|
||||
}
|
||||
|
||||
/* Invalid binary search tree */
|
||||
assert(left == NULL || (GCCompareRegions(left->region, node->region) < 0));
|
||||
assert(right == NULL || (GCCompareRegions(right->region, node->region) > 0));
|
||||
|
||||
int leftHeight = debugTree(left);
|
||||
int rightHeight = debugTree(right);
|
||||
|
||||
//assert(leftHeight == 0 || rightHeight ==0 || leftHeight == rightHeight);
|
||||
|
||||
/* Only count black children */
|
||||
if (leftHeight != 0 && rightHeight != 0)
|
||||
{
|
||||
return isNodeRed(node) ? leftHeight : leftHeight + 1;
|
||||
}
|
||||
return 0;
|
||||
#endif //DEBUG
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively inserts a region into the correct location.
|
||||
*/
|
||||
static GCTracedRegionTreeNode *tracedRegionInsert(
|
||||
GCTracedRegionTreeNode *root, GCTracedRegion region)
|
||||
{
|
||||
if (NULL == root)
|
||||
{
|
||||
return GCTracedRegionTreeNodeCreate(region);
|
||||
}
|
||||
int child = GCCompareRegions(root->region, region);
|
||||
// If the regions overlap, just merge them. Note that this will only
|
||||
// affect the structure of the tree if things have already gone badly wrong
|
||||
// somewhere else, because memory regions can not be extended into already
|
||||
// allocated regions unless you broke something.
|
||||
if (child == 0)
|
||||
{
|
||||
root->region = mergeRegions(root->region, region);
|
||||
return root;
|
||||
}
|
||||
// If root->region < region, child is -1. Make it 0 for this case and let
|
||||
// it remain 1 for the other case. This gives us the index of the child
|
||||
child = child > 0;
|
||||
root->child[child] = tracedRegionInsert(root->child[child], region);
|
||||
if (isNodeRed(root->child[child]))
|
||||
{
|
||||
if (isNodeRed(root->child[!child]))
|
||||
{
|
||||
root->colour = RED;
|
||||
root->child[0]->colour = BLACK;
|
||||
root->child[1]->colour = BLACK;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (isNodeRed(root->child[child]->child[child]))
|
||||
{
|
||||
root = rotateTree(root, !child);
|
||||
}
|
||||
else
|
||||
{
|
||||
root = rotateTreeDouble(root, !child);
|
||||
}
|
||||
}
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts the new region into the tree.
|
||||
*/
|
||||
__attribute__((unused))
|
||||
static void GCTracedRegionInsert(GCTracedRegion region)
|
||||
{
|
||||
GCRegionTreeRoot = tracedRegionInsert(GCRegionTreeRoot, region);
|
||||
GCRegionTreeRoot->colour = BLACK;
|
||||
debugTree(GCRegionTreeRoot);
|
||||
}
|
||||
|
||||
__attribute__((unused))
|
||||
static void GCTracedRegionDelete(GCTracedRegion region)
|
||||
{
|
||||
if (GCRegionTreeRoot == NULL)
|
||||
{
|
||||
return;
|
||||
}
|
||||
GCTracedRegionTreeNode head = {{0}}; /* False tree root */
|
||||
GCTracedRegionTreeNode *q, *p, *g; /* Helpers */
|
||||
GCTracedRegionTreeNode *f = NULL; /* Found item */
|
||||
int dir = 1;
|
||||
|
||||
/* Set up helpers */
|
||||
q = &head;
|
||||
g = p = NULL;
|
||||
q->child[1] = GCRegionTreeRoot;
|
||||
|
||||
/* Search and push a red down */
|
||||
while ( q->child[dir] != NULL )
|
||||
{
|
||||
int last = dir;
|
||||
|
||||
/* Update helpers */
|
||||
g = p, p = q;
|
||||
q = q->child[dir];
|
||||
dir = GCCompareRegions(q->region, region) < 0;
|
||||
|
||||
/* Save found node */
|
||||
if (GCCompareRegions(q->region, region) == 0)
|
||||
{
|
||||
f = q;
|
||||
}
|
||||
|
||||
/* Push the red node down */
|
||||
if (!isNodeRed(q) && !isNodeRed(q->child[dir]))
|
||||
{
|
||||
if (isNodeRed (q->child[!dir]))
|
||||
{
|
||||
p = p->child[last] = rotateTree(q, dir);
|
||||
}
|
||||
else if (!isNodeRed(q->child[!dir]))
|
||||
{
|
||||
GCTracedRegionTreeNode *s = p->child[!last];
|
||||
|
||||
if (s != NULL)
|
||||
{
|
||||
if (!isNodeRed(s->child[!last]) && !isNodeRed(s->child[last]))
|
||||
{
|
||||
/* Color flip */
|
||||
p->colour = 0;
|
||||
s->colour = 1;
|
||||
q->colour = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
int dir2 = g->child[1] == p;
|
||||
|
||||
if (isNodeRed(s->child[last]))
|
||||
{
|
||||
g->child[dir2] = rotateTreeDouble(p, last);
|
||||
}
|
||||
else if (isNodeRed(s->child[!last]))
|
||||
{
|
||||
g->child[dir2] = rotateTree(p, last);
|
||||
}
|
||||
|
||||
/* Ensure correct coloring */
|
||||
q->colour = g->child[dir2]->colour = RED;
|
||||
g->child[dir2]->child[0]->colour = BLACK;
|
||||
g->child[dir2]->child[1]->colour = BLACK;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Replace and remove if found */
|
||||
if (f != NULL)
|
||||
{
|
||||
f->region = q->region;
|
||||
p->child[p->child[1] == q] =
|
||||
q->child[q->child[0] == NULL];
|
||||
free(q);
|
||||
}
|
||||
|
||||
/* Update root and make it black */
|
||||
GCRegionTreeRoot = head.child[1];
|
||||
if (GCRegionTreeRoot != NULL)
|
||||
{
|
||||
GCRegionTreeRoot->colour = BLACK;
|
||||
}
|
||||
debugTree(GCRegionTreeRoot);
|
||||
}
|
||||
|
||||
|
||||
typedef void(*gc_region_visitor)(GCTracedRegion, void*);
|
||||
|
||||
static void GCVisitTracedRegion(GCTracedRegionTreeNode *node,
|
||||
gc_region_visitor visitor, void *context)
|
||||
{
|
||||
visitor(node->region, context);
|
||||
if (node->child[0])
|
||||
{
|
||||
GCVisitTracedRegion(node->child[0], visitor, context);
|
||||
}
|
||||
if (node->child[1])
|
||||
{
|
||||
GCVisitTracedRegion(node->child[1], visitor, context);
|
||||
}
|
||||
}
|
||||
|
||||
void GCVisitTracedRegions(gc_region_visitor visitor, void *context)
|
||||
{
|
||||
if (GCRegionTreeRoot)
|
||||
{
|
||||
GCVisitTracedRegion(GCRegionTreeRoot, visitor, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
__attribute__((constructor))
|
||||
static void GCTraceInitialise(void)
|
||||
{
|
||||
traced_objects = traced_object_create(128);
|
||||
pthread_rwlock_init(&traced_objects_lock, NULL);
|
||||
}
|
||||
|
||||
|
||||
struct GCTraceContext
|
||||
{
|
||||
int foundObjects;
|
||||
};
|
||||
|
||||
static void GCTraceRegion(GCTracedRegion region, void *c)
|
||||
{
|
||||
struct GCTraceContext *context = c;
|
||||
// Stop if we've already found references to everything that might be
|
||||
// garbage.
|
||||
id *object = region.start;
|
||||
fprintf(stderr, "Region starts at %x (%d bytes)\n", (int)object, (int)region.end - (int)region.start);
|
||||
while (object < (id*)region.end)
|
||||
{
|
||||
if (context->foundObjects == traced_objects->table_used)
|
||||
{
|
||||
return;
|
||||
}
|
||||
GCTracedPointer *foundObject = traced_object_table_get(traced_objects, *object);
|
||||
if (foundObject && foundObject->pointer)
|
||||
{
|
||||
//fprintf(stderr, "Found traced heap pointer to %x\n", (int)foundObject->pointer);
|
||||
if(!GCTestFlag(foundObject->pointer, GCFlagVisited))
|
||||
{
|
||||
context->foundObjects++;
|
||||
GCSetFlag(foundObject->pointer, GCFlagVisited);
|
||||
}
|
||||
}
|
||||
object++;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Traces the current thread's stack.
|
||||
*/
|
||||
void GCTraceStackSynchronous(GCThread *thr)
|
||||
{
|
||||
//fprintf(stderr, "Scanning the stack...\n");
|
||||
int generation = GCGeneration;
|
||||
pthread_rwlock_rdlock(&traced_objects_lock);
|
||||
if (NULL == thr->unescapedObjects)
|
||||
{
|
||||
thr->unescapedObjects = unescaped_object_create(256);
|
||||
}
|
||||
else
|
||||
{
|
||||
struct unescaped_object_table_enumerator *e = NULL;
|
||||
id ptr;
|
||||
while ((ptr = unescaped_object_next(thr->unescapedObjects, &e)))
|
||||
{
|
||||
GCClearFlag(ptr, GCFlagVisited);
|
||||
}
|
||||
}
|
||||
id *object = thr->stackBottom;
|
||||
while (object < (id*)thr->stackTop)
|
||||
{
|
||||
if (unescaped_object_table_get(thr->unescapedObjects, *object))
|
||||
{
|
||||
// Note: This doesn't actually have to use atomic ops; this object
|
||||
// is guaranteed, at this point, not to be referenced by another
|
||||
// thread.
|
||||
GCSetFlag(*object, GCFlagVisited);
|
||||
//fprintf(stderr, "Tracing found %x\n", (int)*object);
|
||||
}
|
||||
GCTracedPointer *foundObject =
|
||||
traced_object_table_get(traced_objects, *object);
|
||||
// FIXME: This second test should not be required. Why are we being
|
||||
// returned pointers to NULL?
|
||||
if (foundObject && foundObject->pointer)
|
||||
{
|
||||
if(!GCTestFlag(foundObject->pointer, GCFlagVisited))
|
||||
{
|
||||
GCSetFlag(foundObject->pointer, GCFlagVisited);
|
||||
if (foundObject->stackAddress)
|
||||
{
|
||||
if ((foundObject->stackAddress < (id*)thr->stackTop &&
|
||||
foundObject->stackAddress > object))
|
||||
{
|
||||
foundObject->stackAddress = object;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Record this address if there isn't an existing stack
|
||||
// address.
|
||||
foundObject->stackAddress = object;
|
||||
}
|
||||
}
|
||||
}
|
||||
object++;
|
||||
}
|
||||
pthread_rwlock_unlock(&traced_objects_lock);
|
||||
|
||||
struct unescaped_object_table_enumerator *e = NULL;
|
||||
id ptr;
|
||||
while ((ptr = unescaped_object_next(thr->unescapedObjects, &e)))
|
||||
{
|
||||
id oldPtr;
|
||||
// Repeat on the current enumerator spot while we are are deleting things.
|
||||
do
|
||||
{
|
||||
oldPtr = ptr;
|
||||
if (!GCTestFlag(ptr, GCFlagVisited))
|
||||
{
|
||||
GCFreeObject(ptr);
|
||||
unescaped_object_remove(thr->unescapedObjects, ptr);
|
||||
}
|
||||
} while ((oldPtr != (ptr = unescaped_object_current(thr->unescapedObjects, &e)))
|
||||
&& ptr);
|
||||
}
|
||||
thr->scannedInGeneration = generation;
|
||||
}
|
||||
|
||||
void GCRunTracer(void)
|
||||
{
|
||||
struct GCTraceContext context = {0};
|
||||
// Mark any objects that we can see as really existing
|
||||
GCVisitTracedRegions(GCTraceRegion, &context);
|
||||
// Free any objects that we couldn't find references for
|
||||
struct traced_object_table_enumerator *e = NULL;
|
||||
int threadGeneration = INT_MAX;
|
||||
for (GCThread *thr = GCThreadList ; thr != NULL ; thr = thr->next)
|
||||
{
|
||||
int thrGeneration = thr->scannedInGeneration;
|
||||
if (thr->scannedInGeneration < threadGeneration)
|
||||
{
|
||||
threadGeneration = thrGeneration;
|
||||
}
|
||||
}
|
||||
GCTracedPointer *object;
|
||||
while ((object = traced_object_next(traced_objects, &e)))
|
||||
{
|
||||
GCTracedPointer *oldPtr;
|
||||
// Repeat on the current enumerator spot while we are are deleting things.
|
||||
do
|
||||
{
|
||||
oldPtr = object;
|
||||
//fprintf(stderr, "Thinking of freeing %x. Visited: %d, clear gen: %d, thread gen: %d\n", (int)object->pointer, GCTestFlag(object->pointer, GCFlagVisited), object->visitClearedGeneration , threadGeneration);
|
||||
// If an object hasn't been visited and we have scanned everywhere
|
||||
// since we cleared its visited flag, delete it. This works
|
||||
// because the heap write barrier sets the visited flag.
|
||||
if (!GCTestFlag(object->pointer, GCFlagVisited) &&
|
||||
object->visitClearedGeneration < threadGeneration)
|
||||
{
|
||||
GCFreeObjectUnsafe(object->pointer);
|
||||
traced_object_remove(traced_objects, object->pointer);
|
||||
}
|
||||
} while (oldPtr != ((object = traced_object_current(traced_objects, &e)))
|
||||
&& object);
|
||||
}
|
||||
}
|
||||
|
||||
void GCRunTracerIfNeeded(BOOL forceCollect)
|
||||
{
|
||||
struct traced_object_table_enumerator *e = NULL;
|
||||
GCTracedPointer *ptr;
|
||||
// See if we can avoid running the tracer.
|
||||
while ((ptr = traced_object_next(traced_objects, &e)))
|
||||
{
|
||||
id object = ptr->pointer;
|
||||
// Throw away any objects that are referenced by the heap
|
||||
if (GCGetRetainCount(object) > 0 &&
|
||||
GCColourOfObject(object) != GCColourRed)
|
||||
{
|
||||
// Make sure that the retain count is still > 0. If not then it
|
||||
// may have been released but not added to the tracing list
|
||||
// (because it was marked for tracing already)
|
||||
if (GCGetRetainCount(object) > 0 &&
|
||||
GCColourOfObject(object) != GCColourRed)
|
||||
{
|
||||
pthread_rwlock_wrlock(&traced_objects_lock);
|
||||
traced_object_remove(traced_objects, object);
|
||||
pthread_rwlock_unlock(&traced_objects_lock);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (GCTestFlag(object, GCFlagVisited))
|
||||
{
|
||||
//fprintf(stderr, "Clearing visited flag for %x\n", (int)object);
|
||||
GCClearFlag(object, GCFlagVisited);
|
||||
ptr->visitClearedGeneration = GCGeneration;
|
||||
}
|
||||
}
|
||||
//fprintf(stderr, "Incrementing generation\n");
|
||||
// Invalidate all stack scans.
|
||||
GCGeneration++;
|
||||
// Only actually run the tracer if we have more than a few objects that
|
||||
// might need freeing. No point killing the cache just to reclaim one or
|
||||
// two objects...
|
||||
if (traced_objects->table_used > 256 || forceCollect)
|
||||
{
|
||||
GCRunTracer();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an object for tracing that the cycle detector has decided needs freeing.
|
||||
*/
|
||||
void GCAddObjectForTracing(id object)
|
||||
{
|
||||
if (!traced_object_table_get(traced_objects, object))
|
||||
{
|
||||
//fprintf(stderr, "Cycle detector nominated %x for tracing\n", (int)object);
|
||||
GCTracedPointer obj = {object, 0, 0, 0};
|
||||
traced_object_insert(traced_objects, obj);
|
||||
}
|
||||
}
|
||||
|
||||
void GCAddObjectsForTracing(GCThread *thr)
|
||||
{
|
||||
id *buffer = thr->freeBuffer;
|
||||
unsigned int count = thr->freeBufferInsert;
|
||||
//unsigned int generation = GCHeapScanGeneration;
|
||||
// No locking is needed for this table, because it is always accessed from
|
||||
// the same thread
|
||||
if (NULL == thr->unescapedObjects)
|
||||
{
|
||||
thr->unescapedObjects = unescaped_object_create(256);
|
||||
}
|
||||
unescaped_object_table *unescaped = thr->unescapedObjects;
|
||||
|
||||
pthread_rwlock_wrlock(&traced_objects_lock);
|
||||
for (unsigned int i=0 ; i<count ; i++)
|
||||
{
|
||||
id object = buffer[i];
|
||||
if (!GCObjectIsDynamic(object))
|
||||
{
|
||||
return;
|
||||
}
|
||||
// Skip objects that have a strong retain count > 0. They are
|
||||
// definitely still referenced...
|
||||
if (GCGetRetainCount(object) > 0 &&
|
||||
GCColourOfObject(object) != GCColourRed)
|
||||
{
|
||||
// ...but they might have become part of a cycle
|
||||
GCSetFlag(object, GCFlagBuffered);
|
||||
GCSetColourOfObject(object, GCColourPurple);
|
||||
GCScanForCycles(&object, 1);
|
||||
continue;
|
||||
}
|
||||
if (GCTestFlag(object, GCFlagEscaped))
|
||||
{
|
||||
// FIXME: Check if the object is already there, don't add it again
|
||||
// if it is, but do update its generation. It was seen by
|
||||
// something in this thread, so it might still be on the stack
|
||||
// here, or have been moved to the heap.
|
||||
if (!traced_object_table_get(traced_objects, object))
|
||||
{
|
||||
GCTracedPointer obj = {object, 0, 0, 0};
|
||||
traced_object_insert(traced_objects, obj);
|
||||
// Make sure that this object is not in the thread's list as well.
|
||||
unescaped_object_remove(unescaped, object);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!unescaped_object_table_get(unescaped, object))
|
||||
{
|
||||
unescaped_object_insert(unescaped, object);
|
||||
}
|
||||
}
|
||||
}
|
||||
pthread_rwlock_unlock(&traced_objects_lock);
|
||||
}
|
||||
|
||||
static void GCAddBufferForTracingTrampoline(void *b)
|
||||
{
|
||||
struct gc_buffer_header *buffer = b;
|
||||
GCTracedRegion region = { buffer, (char*)buffer + sizeof(struct gc_buffer_header),
|
||||
(char*)buffer + sizeof(struct gc_buffer_header) + buffer->size };
|
||||
fprintf(stderr, "Buffer has size %d (%d)\n", buffer->size, (int)region.end - (int)region.start);
|
||||
GCTracedRegionInsert(region);
|
||||
}
|
||||
|
||||
void GCAddBufferForTracing(struct gc_buffer_header *buffer)
|
||||
{
|
||||
GCPerform(GCAddBufferForTracingTrampoline, buffer);
|
||||
}
|
||||
|
||||
// TODO: memmove_collectable does this for a whole region, but only does the
|
||||
// locking once.
|
||||
id objc_assign_strongCast(id obj, id *ptr)
|
||||
{
|
||||
BOOL objIsDynamic = GCObjectIsDynamic(obj);
|
||||
// This object is definitely stored somewhere, so mark it as visited
|
||||
// for now.
|
||||
if (objIsDynamic && obj)
|
||||
{
|
||||
GCSetFlag(obj, GCFlagVisited);
|
||||
// Tracing semantics do not apply to objects with CF semantics, so skip the
|
||||
// next bits if the CF flag is set.
|
||||
if (obj && !GCTestFlag(obj, GCFlagCFObject))
|
||||
{
|
||||
// Don't free this just after scanning the stack.
|
||||
GCSetFlag(obj, GCFlagEscaped);
|
||||
}
|
||||
}
|
||||
pthread_rwlock_wrlock(&traced_objects_lock);
|
||||
GCTracedPointer *old = traced_object_table_get(traced_objects, *ptr);
|
||||
if (old)
|
||||
{
|
||||
// If the value that we are overwriting is a traced pointer and this is
|
||||
// the pointer to it that we are tracking then mark it as not visited.
|
||||
//
|
||||
// This object may still have been copied to a stack. If it hasn't
|
||||
// been copied to this stack, then we can collect it in future if it
|
||||
// isn't in any other heap blocks?
|
||||
if (old->heapAddress == ptr)
|
||||
{
|
||||
old->heapAddress = 0;
|
||||
old->visitClearedGeneration = GCGeneration + 1;
|
||||
GCClearFlag(*ptr, GCFlagVisited);
|
||||
}
|
||||
}
|
||||
if (objIsDynamic && obj)
|
||||
{
|
||||
GCTracedPointer *new = traced_object_table_get(traced_objects, obj);
|
||||
if (new)
|
||||
{
|
||||
new->heapAddress = ptr;
|
||||
}
|
||||
}
|
||||
pthread_rwlock_unlock(&traced_objects_lock);
|
||||
*ptr = obj;
|
||||
return obj;
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
|
||||
/**
|
||||
* Visitor function. Visiting is non-recursive. You must call
|
||||
* GCVisitChildren() on the object argument if you wish to explore the entire
|
||||
* graph.
|
||||
*/
|
||||
typedef void (*visit_function_t)(id object, void *context, BOOL isWeak);
|
||||
|
||||
void GCVisitChildren(id object, visit_function_t function, void *argument,
|
||||
BOOL visitWeakChildren);
|
||||
@ -1,145 +0,0 @@
|
||||
#include "../objc/objc-api.h"
|
||||
#include "../objc/runtime.h"
|
||||
#import "visit.h"
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
|
||||
|
||||
/**
|
||||
* Structure storing information about object children.
|
||||
*
|
||||
* Note: This structure is quite inefficient. We can optimise it a lot later,
|
||||
* if required.
|
||||
*/
|
||||
struct GCChildInfo
|
||||
{
|
||||
/** Number of children of this class. */
|
||||
unsigned int count;
|
||||
/** Offsets of children. */
|
||||
size_t *offsets;
|
||||
/** Method pointer for enumerating extra children. */
|
||||
IMP extraChildren;
|
||||
};
|
||||
|
||||
@interface NSObject
|
||||
- (BOOL)instancesRespondToSelector: (SEL)aSel;
|
||||
- (IMP)instanceMethodForSelector: (SEL)aSel;
|
||||
- (void)_visitChildrenWithFunction: (visit_function_t)function
|
||||
context: (void*)context
|
||||
visitWeak: (BOOL)aFlag;
|
||||
@end
|
||||
static SEL visitSelector = @selector(_visitChildrenWithFunction:context:visitWeak:);
|
||||
|
||||
/**
|
||||
* Macro for adding an offset to the offset buffer and resizing it if required.
|
||||
*/
|
||||
#define ADD_OFFSET(offset) \
|
||||
do {\
|
||||
if (found == space)\
|
||||
{\
|
||||
space *= 2;\
|
||||
buffer = realloc(buffer, sizeof(size_t[space]));\
|
||||
}\
|
||||
buffer[found++] = offset;\
|
||||
} while(0)
|
||||
|
||||
// Note: If we want to save space we could use char*s and short*s for objects
|
||||
// less than 2^8 and 2^16 big and add a header indicating this.
|
||||
/**
|
||||
* Create an instance variable map for the specified class. Inspects the ivars
|
||||
* metadata and creates a GCChildInfo structure for the class. This is cached
|
||||
* in the gc_object_type field in the class structure.
|
||||
*
|
||||
* FIXME: This is a hack. The compiler should generate this stuff, not the
|
||||
* runtime.
|
||||
*/
|
||||
struct GCChildInfo *GCMakeIVarMap(Class aClass)
|
||||
{
|
||||
struct GCChildInfo *info = calloc(1, sizeof(struct GCChildInfo));
|
||||
|
||||
unsigned int ivarCount;
|
||||
Ivar *ivars = class_copyIvarList(aClass, &ivarCount);
|
||||
|
||||
if (0 == ivarCount)
|
||||
{
|
||||
info->count = 0;
|
||||
info->offsets = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
unsigned found = 0;
|
||||
// First guess - every instance variable is an object
|
||||
size_t space = sizeof(size_t[ivarCount]);
|
||||
size_t *buffer = malloc(space);
|
||||
|
||||
for (unsigned i=0 ; i<ivarCount ; ++i)
|
||||
{
|
||||
Ivar ivar = ivars[i];
|
||||
const char *type = ivar_getTypeEncoding(ivar);
|
||||
switch(type[0])
|
||||
{
|
||||
case '@':
|
||||
{
|
||||
// If it's an object, add it to the list.
|
||||
// FIXME: Weak ivars
|
||||
ADD_OFFSET(ivar_getOffset(ivar));
|
||||
break;
|
||||
}
|
||||
case '[':
|
||||
case '{':
|
||||
{
|
||||
if (strchr(type, '@'))
|
||||
{
|
||||
//FIXME: Parse structures and arrays correctly
|
||||
fprintf(stderr, "Compound type found in class %s, type: %s is "
|
||||
"incorrectly handled", class_getName(aClass),
|
||||
ivar_getTypeEncoding(ivar));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
info->count = found;
|
||||
info->offsets = realloc(buffer, sizeof(size_t[found]));
|
||||
}
|
||||
/* FIXME: Use the runtime functions for this
|
||||
if ([aClass instancesRespondToSelector: visitSelector])
|
||||
{
|
||||
info->extraChildren =
|
||||
[aClass instanceMethodForSelector: visitSelector];
|
||||
}
|
||||
*/
|
||||
aClass->gc_object_type = info;
|
||||
return info;
|
||||
}
|
||||
|
||||
void GCVisitChildren(id object, visit_function_t function, void *argument,
|
||||
BOOL visitWeakChildren)
|
||||
{
|
||||
Class cls = object->class_pointer;
|
||||
while (Nil != cls)
|
||||
{
|
||||
if (NULL == cls->gc_object_type)
|
||||
{
|
||||
// FIXME: Locking
|
||||
GCMakeIVarMap(cls);
|
||||
}
|
||||
struct GCChildInfo *info = cls->gc_object_type;
|
||||
for (unsigned i=0 ; i<info->count ; ++i)
|
||||
{
|
||||
id child = *(id*)(((char*)object) + info->offsets[i]);
|
||||
if (child != nil)
|
||||
{
|
||||
BOOL isWeak = (intptr_t)child & 1;
|
||||
function(object, argument, isWeak);
|
||||
}
|
||||
}
|
||||
if (NULL != info->extraChildren)
|
||||
{
|
||||
info->extraChildren(object, visitSelector, function, argument,
|
||||
visitWeakChildren);
|
||||
}
|
||||
cls = cls->super_class;
|
||||
}
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
|
||||
void GCPerformDeferred(void(*function)(void*), void *data,
|
||||
int useconds);
|
||||
void GCPerform(void(*function)(void*), void *data);
|
||||
@ -1,46 +0,0 @@
|
||||
#include "../toydispatch/toydispatch.h"
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
static dispatch_queue_t gc_deferred_queue;
|
||||
static dispatch_queue_t gc_queue;
|
||||
|
||||
__attribute__((constructor)) void static create_queue(void)
|
||||
{
|
||||
gc_deferred_queue = dispatch_queue_create("GCKit collection deferred queue", 0);
|
||||
gc_queue = dispatch_queue_create("GCKit collection queue", 0);
|
||||
}
|
||||
|
||||
struct gc_deferred_execution_t
|
||||
{
|
||||
useconds_t time;
|
||||
dispatch_function_t function;
|
||||
void *data;
|
||||
};
|
||||
|
||||
static void perform_deferred(void *c)
|
||||
{
|
||||
struct gc_deferred_execution_t *context = c;
|
||||
usleep(context->time);
|
||||
dispatch_async_f(gc_queue, context->function, context->data);
|
||||
free(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs function(data) at some point at least useconds microseconds in the
|
||||
* future.
|
||||
*/
|
||||
void GCPerformDeferred(dispatch_function_t function, void *data,
|
||||
int useconds)
|
||||
{
|
||||
struct gc_deferred_execution_t *context = malloc(sizeof(struct gc_deferred_execution_t));
|
||||
context->time = useconds;
|
||||
context->function = function;
|
||||
context->data = data;
|
||||
dispatch_async_f(gc_deferred_queue, context, perform_deferred);
|
||||
}
|
||||
|
||||
void GCPerform(dispatch_function_t function, void *data)
|
||||
{
|
||||
dispatch_async_f(gc_queue, data, function);
|
||||
}
|
||||
Loading…
Reference in New Issue