Skip to content

Commit 99eaf0e

Browse files
authored
Merge pull request #69 from glessard/unload
remove `load()` on `AtomicReference`
2 parents 10138f4 + 5798ee6 commit 99eaf0e

File tree

9 files changed

+74
-476
lines changed

9 files changed

+74
-476
lines changed

.travis.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@ language: swift
22

33
matrix:
44
include:
5+
- os: osx
6+
osx_image: xcode8.3
7+
env: SWIFT=3.1.1
8+
59
- os: osx
610
osx_image: xcode9.2
711
env: SWIFT=4.0.3
@@ -22,6 +26,11 @@ matrix:
2226
osx_image: xcode11.2
2327
env: SWIFT=5.1.2
2428

29+
- os: linux
30+
dist: xenial
31+
language: generic
32+
env: SWIFT=3.1.1
33+
2534
- os: linux
2635
dist: xenial
2736
language: generic

Package.swift

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
import PackageDescription
44

5+
#if swift(>=4.0)
6+
57
let package = Package(
68
name: "SwiftAtomics",
79
products: [
@@ -16,3 +18,14 @@ let package = Package(
1618
],
1719
swiftLanguageVersions: [.v3, .v4, .v4_2, .version("5")]
1820
)
21+
22+
#else
23+
24+
let package = Package(
25+
name: "SwiftAtomics",
26+
targets: [
27+
Target(name: "SwiftAtomics", dependencies: ["CAtomics"]),
28+
]
29+
)
30+
31+
#endif

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# swift-atomics [![Build Status](https://travis-ci.org/glessard/swift-atomics.svg?branch=master)](https://travis-ci.org/glessard/swift-atomics)
2-
Some atomic functions made available to Swift 3.2 and up, thanks to Clang
2+
Some atomic functions made available to Swift 3.1 and up, thanks to Clang
33

44
The atomic functions available in `/usr/include/libkern/OSAtomic.h` are quite limiting in Swift, due to impedance mismatches between the type systems of Swift and C. Furthermore, some simple things such as a synchronized load or a synchronized store are not immediately available. On top of that, they have now been deprecated.
55

Sources/CAtomics/include/CAtomics.h

Lines changed: 4 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -409,112 +409,27 @@ void CAtomicsThreadFence(enum MemoryOrder order)
409409
atomic_thread_fence(order);
410410
}
411411

412-
// define struct + functions for nearly-atomic handling of Swift.Unmanaged
413-
414-
#define __OPAQUE_UNMANAGED_LOCKED (uintptr_t)0x7
415-
#define __OPAQUE_UNMANAGED_SPINMASK (char)0xc0
412+
// define struct + functions for handling of Swift.Unmanaged
416413

417414
CLANG_ATOMICS_STRUCT(OpaqueUnmanagedHelper, atomic_uintptr_t, a, _Alignof(atomic_uintptr_t))
418415
CLANG_ATOMICS_IS_LOCK_FREE(OpaqueUnmanagedHelper)
419416
CLANG_ATOMICS_POINTER_INITIALIZE(OpaqueUnmanagedHelper, const void*, _Nullable)
420-
421-
// this should only be used for unlocking
422-
CLANG_ATOMICS_POINTER_STORE(OpaqueUnmanagedHelper, const void*, _Nullable)
417+
CLANG_ATOMICS_POINTER_SWAP(OpaqueUnmanagedHelper, const void*, _Nullable)
423418

424419
// this should only be used for debugging and testing
425420
CLANG_ATOMICS_POINTER_LOAD(OpaqueUnmanagedHelper, const void*, _Nullable)
426421

427-
static __inline__ __attribute__((__always_inline__)) \
428-
const void *_Nullable CAtomicsUnmanagedLockAndLoad(OpaqueUnmanagedHelper *_Nonnull atomic)
429-
{ // load the pointer value, and leave the pointer either LOCKED or NULL; spin for the lock if necessary
430-
#ifndef __SSE2__
431-
char c;
432-
c = 0;
433-
#endif
434-
uintptr_t pointer;
435-
pointer = atomic_load_explicit(&(atomic->a), __ATOMIC_ACQUIRE);
436-
do { // don't fruitlessly invalidate the cache line if the value is locked
437-
while (pointer == __OPAQUE_UNMANAGED_LOCKED)
438-
{
439-
#ifdef __SSE2__
440-
_mm_pause();
441-
#else
442-
c += 1;
443-
if ((c&__OPAQUE_UNMANAGED_SPINMASK) != 0) { sched_yield(); }
444-
#endif
445-
pointer = atomic_load_explicit(&(atomic->a), __ATOMIC_ACQUIRE);
446-
}
447-
// return immediately if pointer is NULL (importantly: without locking)
448-
if (pointer == (uintptr_t) NULL) { return NULL; }
449-
} while(!atomic_compare_exchange_weak_explicit(&(atomic->a), &pointer, __OPAQUE_UNMANAGED_LOCKED, __ATOMIC_ACQ_REL, __ATOMIC_ACQUIRE));
450-
451-
return (void*) pointer;
452-
}
453-
454-
static __inline__ __attribute__((__always_inline__)) \
455-
__attribute__((overloadable)) \
456-
const void *_Nullable CAtomicsExchange(OpaqueUnmanagedHelper *_Nonnull atomic,
457-
const void *_Nullable value, enum MemoryOrder order)
458-
{ // swap the pointer with `value`, spinning until the lock becomes unlocked if necessary
459-
#ifndef __SSE2__
460-
char c;
461-
c = 0;
462-
#endif
463-
uintptr_t pointer;
464-
pointer = atomic_load_explicit(&(atomic->a), order);
465-
do { // don't fruitlessly invalidate the cache line if the value is locked
466-
while (pointer == __OPAQUE_UNMANAGED_LOCKED)
467-
{
468-
#ifdef __SSE2__
469-
_mm_pause();
470-
#else
471-
c += 1;
472-
if ((c&__OPAQUE_UNMANAGED_SPINMASK) != 0) { sched_yield(); }
473-
#endif
474-
pointer = atomic_load_explicit(&(atomic->a), order);
475-
}
476-
} while(!atomic_compare_exchange_weak_explicit(&(atomic->a), &pointer, (uintptr_t)value, order, order));
477-
478-
return (void*) pointer;
479-
}
480-
481422
static __inline__ __attribute__((__always_inline__)) \
482423
__attribute__((overloadable)) \
483424
_Bool CAtomicsCompareAndExchange(OpaqueUnmanagedHelper *_Nonnull atomic,
484425
const void *_Nullable current, const void *_Nullable future,
485426
enum CASType type, enum MemoryOrder order)
486427
{
428+
uintptr_t pointer = (uintptr_t) current;
487429
if(type == __ATOMIC_CAS_TYPE_WEAK)
488-
{
489-
uintptr_t pointer = (uintptr_t) current;
490430
return atomic_compare_exchange_weak_explicit(&(atomic->a), &pointer, (uintptr_t)future, order, memory_order_relaxed);
491-
}
492431
else
493-
{ // we should consider that __OPAQUE_UNMANAGED_LOCKED is a spurious value
494-
#ifndef __SSE2__
495-
char c;
496-
c = 0;
497-
#endif
498-
_Bool success;
499-
while (true)
500-
{
501-
uintptr_t pointer = (uintptr_t) current;
502-
success = atomic_compare_exchange_strong_explicit(&(atomic->a), &pointer, (uintptr_t)future, order, memory_order_relaxed);
503-
if (pointer != __OPAQUE_UNMANAGED_LOCKED) { break; }
504-
505-
while (pointer == __OPAQUE_UNMANAGED_LOCKED)
506-
{ // don't fruitlessly invalidate the cache line if the value is locked
507-
#ifdef __SSE2__
508-
_mm_pause();
509-
#else
510-
c += 1;
511-
if ((c&__OPAQUE_UNMANAGED_SPINMASK) != 0) { sched_yield(); }
512-
#endif
513-
pointer = atomic_load_explicit(&(atomic->a), __ATOMIC_RELAXED);
514-
}
515-
}
516-
return success;
517-
}
432+
return atomic_compare_exchange_strong_explicit(&(atomic->a), &pointer, (uintptr_t)future, order, memory_order_relaxed);
518433
}
519434

520435
#endif

Sources/SwiftAtomics/atomics-reference.swift

Lines changed: 33 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,21 @@ import CAtomics
1414

1515
import struct CAtomics.OpaqueUnmanagedHelper
1616

17+
#if !swift(>=3.2)
18+
extension MemoryOrder
19+
{
20+
@_versioned init(order: LoadMemoryOrder)
21+
{
22+
self = MemoryOrder.init(rawValue: order.rawValue) ?? .sequential
23+
}
24+
25+
@_versioned init(order: StoreMemoryOrder)
26+
{
27+
self = MemoryOrder.init(rawValue: order.rawValue) ?? .sequential
28+
}
29+
}
30+
#endif
31+
1732
public struct AtomicReference<T: AnyObject>
1833
{
1934
#if swift(>=4.2)
@@ -79,7 +94,7 @@ extension AtomicReference
7994
}
8095
return false
8196
}
82-
#else
97+
#elseif swift(>=3.2)
8398
@inline(__always)
8499
public mutating func storeIfNil(_ reference: T, order: StoreMemoryOrder = .release) -> Bool
85100
{
@@ -91,6 +106,18 @@ extension AtomicReference
91106
}
92107
return false
93108
}
109+
#else
110+
@inline(__always)
111+
public mutating func storeIfNil(_ reference: T, order: StoreMemoryOrder = .sequential) -> Bool
112+
{
113+
let u = Unmanaged.passUnretained(reference)
114+
if CAtomicsCompareAndExchange(&ptr, nil, u.toOpaque(), .strong, MemoryOrder(order: order))
115+
{
116+
_ = u.retain()
117+
return true
118+
}
119+
return false
120+
}
94121
#endif
95122

96123
#if swift(>=4.2)
@@ -100,70 +127,19 @@ extension AtomicReference
100127
let pointer = CAtomicsExchange(&ptr, nil, MemoryOrder(rawValue: order.rawValue)!)
101128
return pointer.map { Unmanaged.fromOpaque($0).takeRetainedValue() }
102129
}
103-
#else
130+
#elseif swift(>=3.2)
104131
@inline(__always)
105132
public mutating func take(order: LoadMemoryOrder = .acquire) -> T?
106133
{
107134
let pointer = CAtomicsExchange(&ptr, nil, MemoryOrder(rawValue: order.rawValue)!)
108135
return pointer.map { Unmanaged.fromOpaque($0).takeRetainedValue() }
109136
}
110-
#endif
111-
112-
#if swift(>=4.2)
113-
/// load the reference currently stored in this AtomicReference
114-
///
115-
/// This is *not* an atomic operation if the reference is non-nil.
116-
/// In order to ensure the integrity of the automatic reference
117-
/// counting, the AtomicReference gets locked (internally) until the
118-
/// reference count has been incremented. The critical section
119-
/// protected by the lock is extremely short (nanoseconds), but necessary.
120-
///
121-
/// This is the only AtomicReference operation that needs a lock;
122-
/// the others operations will spin until a `load` operation is complete,
123-
/// but are otherwise atomic.
124-
@inlinable
125-
public mutating func load() -> T?
126-
{
127-
if let pointer = CAtomicsUnmanagedLockAndLoad(&ptr)
128-
{
129-
assert(CAtomicsLoad(&ptr, .acquire) == UnsafeRawPointer(bitPattern: 0x7))
130-
CAtomicsThreadFence(.acquire)
131-
let unmanaged = Unmanaged<T>.fromOpaque(pointer).retain()
132-
// ensure the reference counting operation has occurred before unlocking,
133-
// by performing our store operation with StoreMemoryOrder.release
134-
CAtomicsThreadFence(.release)
135-
CAtomicsStore(&ptr, pointer, .release)
136-
return unmanaged.takeRetainedValue()
137-
}
138-
return nil
139-
}
140-
#else
141-
/// load the reference currently stored in this AtomicReference
142-
///
143-
/// This is *not* an atomic operation if the reference is non-nil.
144-
/// In order to ensure the integrity of the automatic reference
145-
/// counting, the AtomicReference gets locked (internally) until the
146-
/// reference count has been incremented. The critical section
147-
/// protected by the lock is extremely short (nanoseconds), but necessary.
148-
///
149-
/// This is the only AtomicReference operation that needs a lock;
150-
/// the others operations will spin until a `load` operation is complete,
151-
/// but are otherwise atomic.
137+
#else // swift 3.1
152138
@inline(__always)
153-
public mutating func load() -> T?
139+
public mutating func take(order: LoadMemoryOrder = .sequential) -> T?
154140
{
155-
if let pointer = CAtomicsUnmanagedLockAndLoad(&ptr)
156-
{
157-
assert(CAtomicsLoad(&ptr, .acquire) == UnsafeRawPointer(bitPattern: 0x7))
158-
CAtomicsThreadFence(.acquire)
159-
let unmanaged = Unmanaged<T>.fromOpaque(pointer).retain()
160-
// ensure the reference counting operation has occurred before unlocking,
161-
// by performing our store operation with StoreMemoryOrder.release
162-
CAtomicsThreadFence(.release)
163-
CAtomicsStore(&ptr, pointer, .release)
164-
return unmanaged.takeRetainedValue()
165-
}
166-
return nil
141+
let pointer = CAtomicsExchange(&ptr, nil, MemoryOrder(order: order))
142+
return pointer.map { Unmanaged.fromOpaque($0).takeRetainedValue() }
167143
}
168144
#endif
169145

0 commit comments

Comments
 (0)