diff --git a/associate.m b/associate.m index 79c9713a..f161e8fa 100644 --- a/associate.m +++ b/associate.m @@ -12,6 +12,8 @@ #include "lock.h" #include "gc_ops.h" +PRIVATE int associate_spinlocks[spinlock_count]; + /** * A single associative reference. Contains the key, value, and association * policy. @@ -140,7 +142,7 @@ static void setReference(struct reference_list *list, break; } // While inserting into the list, we need to lock it temporarily. - volatile int *lock = lock_for_pointer(list); + volatile int *lock = associate_lock_for_pointer(list); lock_spinlock(lock); struct reference *r = findReference(list, key); // If there's an existing reference, then we can update it, otherwise we @@ -165,7 +167,7 @@ static void setReference(struct reference_list *list, BOOL needLock = isAtomic(r->policy) || isAtomic(policy); if (needLock) { - lock = lock_for_pointer(r); + lock = associate_lock_for_pointer(r); lock_spinlock(lock); } @try @@ -289,7 +291,7 @@ static void deallocHiddenClass(id obj, SEL _cmd) Class cls = (Class)object; if ((NULL == cls->extra_data) && create) { - volatile int *lock = lock_for_pointer(cls); + volatile int *lock = associate_lock_for_pointer(cls); struct reference_list *list = gc->malloc(sizeof(struct reference_list)); lock_spinlock(lock); if (NULL == cls->extra_data) @@ -309,7 +311,7 @@ static void deallocHiddenClass(id obj, SEL _cmd) Class hiddenClass = findHiddenClass(object); if ((NULL == hiddenClass) && create) { - volatile int *lock = lock_for_pointer(object); + volatile int *lock = associate_lock_for_pointer(object); lock_spinlock(lock); hiddenClass = findHiddenClass(object); if (NULL == hiddenClass) @@ -423,7 +425,7 @@ static Class hiddenClassForObject(id object) Class hiddenClass = findHiddenClass(object); if (NULL == hiddenClass) { - volatile int *lock = lock_for_pointer(object); + volatile int *lock = associate_lock_for_pointer(object); lock_spinlock(lock); hiddenClass = findHiddenClass(object); if (NULL == hiddenClass) diff --git a/properties.m b/properties.m index 7c3dcc31..9c72ed48 100644 --- a/properties.m +++ b/properties.m @@ -12,7 +12,7 @@ #include "gc_ops.h" #include "lock.h" -PRIVATE int spinlocks[spinlock_count]; +PRIVATE int prop_spinlocks[spinlock_count]; /** * Public function for getting a property. @@ -26,7 +26,7 @@ id objc_getProperty(id obj, SEL _cmd, ptrdiff_t offset, BOOL isAtomic) id ret; if (isAtomic) { - volatile int *lock = lock_for_pointer(addr); + volatile int *lock = prop_lock_for_pointer(addr); lock_spinlock(lock); ret = *(id*)addr; ret = objc_retain(ret); @@ -59,7 +59,7 @@ void objc_setProperty(id obj, SEL _cmd, ptrdiff_t offset, id arg, BOOL isAtomic, id old; if (isAtomic) { - volatile int *lock = lock_for_pointer(addr); + volatile int *lock = prop_lock_for_pointer(addr); lock_spinlock(lock); old = *(id*)addr; *(id*)addr = arg; @@ -79,7 +79,7 @@ void objc_setProperty_atomic(id obj, SEL _cmd, id arg, ptrdiff_t offset) char *addr = (char*)obj; addr += offset; arg = objc_retain(arg); - volatile int *lock = lock_for_pointer(addr); + volatile int *lock = prop_lock_for_pointer(addr); lock_spinlock(lock); id old = *(id*)addr; *(id*)addr = arg; @@ -94,7 +94,7 @@ void objc_setProperty_atomic_copy(id obj, SEL _cmd, id arg, ptrdiff_t offset) addr += offset; arg = [arg copy]; - volatile int *lock = lock_for_pointer(addr); + volatile int *lock = prop_lock_for_pointer(addr); lock_spinlock(lock); id old = *(id*)addr; *(id*)addr = arg; @@ -127,20 +127,25 @@ void objc_setProperty_nonatomic_copy(id obj, SEL _cmd, id arg, ptrdiff_t offset) void objc_copyCppObjectAtomic(void *dest, const void *src, void (*copyHelper) (void *dest, const void *source)) { - volatile int *lock = lock_for_pointer(src < dest ? src : dest); - volatile int *lock2 = lock_for_pointer(src < dest ? dest : src); + volatile int *lock = prop_lock_for_pointer(src < dest ? src : dest); + volatile int *lock2 = prop_lock_for_pointer(src < dest ? dest : src); lock_spinlock(lock); - lock_spinlock(lock2); + if (lock != lock2) { + // this might look odd, but anytime we take two spinlocks simutaniously, there + // is a non-zero chance that the pointers might hash to the same spinlock. + // in which cause, locking one locks both, and locking both would deadlock us. + lock_spinlock(lock2); + } copyHelper(dest, src); unlock_spinlock(lock); - unlock_spinlock(lock2); + if (lock != lock2) { unlock_spinlock(lock2); } } OBJC_PUBLIC void objc_getCppObjectAtomic(void *dest, const void *src, void (*copyHelper) (void *dest, const void *source)) { - volatile int *lock = lock_for_pointer(src); + volatile int *lock = prop_lock_for_pointer(src); lock_spinlock(lock); copyHelper(dest, src); unlock_spinlock(lock); @@ -150,7 +155,7 @@ void objc_getCppObjectAtomic(void *dest, const void *src, void objc_setCppObjectAtomic(void *dest, const void *src, void (*copyHelper) (void *dest, const void *source)) { - volatile int *lock = lock_for_pointer(dest); + volatile int *lock = prop_lock_for_pointer(dest); lock_spinlock(lock); copyHelper(dest, src); unlock_spinlock(lock); @@ -172,13 +177,13 @@ void objc_copyPropertyStruct(void *dest, { if (atomic) { - volatile int *lock = lock_for_pointer(src < dest ? src : dest); - volatile int *lock2 = lock_for_pointer(src < dest ? dest : src); + volatile int *lock = prop_lock_for_pointer(src < dest ? src : dest); + volatile int *lock2 = prop_lock_for_pointer(src < dest ? dest : src); lock_spinlock(lock); - lock_spinlock(lock2); + if (lock != lock2) { lock_spinlock(lock2); } memcpy(dest, src, size); unlock_spinlock(lock); - unlock_spinlock(lock2); + if (lock != lock2) { unlock_spinlock(lock2); } } else { @@ -199,7 +204,7 @@ void objc_getPropertyStruct(void *dest, { if (atomic) { - volatile int *lock = lock_for_pointer(src); + volatile int *lock = prop_lock_for_pointer(src); lock_spinlock(lock); memcpy(dest, src, size); unlock_spinlock(lock); @@ -223,7 +228,7 @@ void objc_setPropertyStruct(void *dest, { if (atomic) { - volatile int *lock = lock_for_pointer(dest); + volatile int *lock = prop_lock_for_pointer(dest); lock_spinlock(lock); memcpy(dest, src, size); unlock_spinlock(lock); diff --git a/spinlock.h b/spinlock.h index 2efec475..5c974ffb 100644 --- a/spinlock.h +++ b/spinlock.h @@ -17,7 +17,20 @@ static const int spinlock_mask = spinlock_count - 1; /** * Integers used as spinlocks for atomic property access. */ -extern int spinlocks[spinlock_count]; +extern int prop_spinlocks[spinlock_count]; +extern int associate_spinlocks[spinlock_count]; + +static inline intptr_t hash_for_pointer(const void *ptr) +{ + intptr_t hash = (intptr_t)ptr; + // Most properties will be pointers, so disregard the lowest few bits + hash >>= sizeof(void*) == 4 ? 2 : 8; + intptr_t low = hash & spinlock_mask; + hash >>= 16; + hash |= low; + return (hash & spinlock_mask); +} + /** * Get a spin lock from a pointer. We want to prevent lock contention between * properties in the same object - if someone is stupid enough to be using @@ -26,15 +39,14 @@ extern int spinlocks[spinlock_count]; * contention between the same property in different objects, so we can't just * use the ivar offset. */ -static inline volatile int *lock_for_pointer(const void *ptr) +static inline volatile int *prop_lock_for_pointer(const void *ptr) { - intptr_t hash = (intptr_t)ptr; - // Most properties will be pointers, so disregard the lowest few bits - hash >>= sizeof(void*) == 4 ? 2 : 8; - intptr_t low = hash & spinlock_mask; - hash >>= 16; - hash |= low; - return spinlocks + (hash & spinlock_mask); + return prop_spinlocks + hash_for_pointer(ptr); +} + +static inline volatile int *associate_lock_for_pointer(const void *ptr) +{ + return associate_spinlocks + hash_for_pointer(ptr); } /**