| //===----------------------------------------------------------------------===// |
| // |
| // This source file is part of the Swift.org open source project |
| // |
| // Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors |
| // Licensed under Apache License v2.0 with Runtime Library Exception |
| // |
| // See https://swift.org/LICENSE.txt for license information |
| // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors |
| // |
| //===----------------------------------------------------------------------===// |
| |
| @available(swift, deprecated: 4.2, obsoleted: 5.0) |
| public final class _stdlib_AtomicInt { |
| internal var _value: Int |
| |
| internal var _valuePtr: UnsafeMutablePointer<Int> { |
| return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( |
| to: Int.self) |
| } |
| |
| public init(_ value: Int = 0) { |
| _value = value |
| } |
| |
| public func store(_ desired: Int) { |
| return _swift_stdlib_atomicStoreInt(object: _valuePtr, desired: desired) |
| } |
| |
| public func load() -> Int { |
| return _swift_stdlib_atomicLoadInt(object: _valuePtr) |
| } |
| |
| % for operation_name, operation in [ ('Add', '+'), ('And', '&'), ('Or', '|'), ('Xor', '^') ]: |
| @discardableResult |
| public func fetchAnd${operation_name}(_ operand: Int) -> Int { |
| return _swift_stdlib_atomicFetch${operation_name}Int( |
| object: _valuePtr, |
| operand: operand) |
| } |
| |
| public func ${operation_name.lower()}AndFetch(_ operand: Int) -> Int { |
| return fetchAnd${operation_name}(operand) ${operation} operand |
| } |
| % end |
| |
| public func compareExchange(expected: inout Int, desired: Int) -> Bool { |
| var expectedVar = expected |
| let result = _swift_stdlib_atomicCompareExchangeStrongInt( |
| object: _valuePtr, |
| expected: &expectedVar, |
| desired: desired) |
| expected = expectedVar |
| return result |
| } |
| } |
| |
| @usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| internal func _swift_stdlib_atomicCompareExchangeStrongInt( |
| object target: UnsafeMutablePointer<Int>, |
| expected: UnsafeMutablePointer<Int>, |
| desired: Int) -> Bool { |
| #if arch(i386) || arch(arm) |
| let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int32( |
| target._rawValue, expected.pointee._value, desired._value) |
| #elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int64( |
| target._rawValue, expected.pointee._value, desired._value) |
| #endif |
| expected.pointee._value = oldValue |
| return Bool(won) |
| } |
| |
| |
| // FIXME: ideally it should not be here, at the very least not public, but |
| // @usableFromInline internal to be used by SwiftPrivate._stdlib_AtomicInt |
| public // Existing uses outside stdlib |
| func _swift_stdlib_atomicLoadInt( |
| object target: UnsafeMutablePointer<Int>) -> Int { |
| #if arch(i386) || arch(arm) |
| let value = Builtin.atomicload_seqcst_Int32(target._rawValue) |
| return Int(value) |
| #elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| let value = Builtin.atomicload_seqcst_Int64(target._rawValue) |
| return Int(value) |
| #endif |
| } |
| |
| @usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| internal func _swift_stdlib_atomicStoreInt( |
| object target: UnsafeMutablePointer<Int>, |
| desired: Int) { |
| #if arch(i386) || arch(arm) |
| Builtin.atomicstore_seqcst_Int32(target._rawValue, desired._value) |
| #elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| Builtin.atomicstore_seqcst_Int64(target._rawValue, desired._value) |
| #endif |
| } |
| |
| % for operation in ['Add', 'And', 'Or', 'Xor']: |
| // Warning: no overflow checking. |
| // FIXME: ideally it should not be here, at the very least not public, but |
| // @usableFromInline internal to be used by SwiftPrivate._stdlib_AtomicInt |
| public // Existing uses outside stdlib |
| func _swift_stdlib_atomicFetch${operation}Int( |
| object target: UnsafeMutablePointer<Int>, |
| operand: Int) -> Int { |
| let rawTarget = UnsafeMutableRawPointer(target) |
| #if arch(i386) || arch(arm) |
| let value = _swift_stdlib_atomicFetch${operation}Int32( |
| object: rawTarget.assumingMemoryBound(to: Int32.self), |
| operand: Int32(operand)) |
| #elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| let value = _swift_stdlib_atomicFetch${operation}Int64( |
| object: rawTarget.assumingMemoryBound(to: Int64.self), |
| operand: Int64(operand)) |
| #endif |
| return Int(value) |
| } |
| |
| % for bits in [ 32, 64 ]: |
| |
| // Warning: no overflow checking. |
| @usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| internal func _swift_stdlib_atomicFetch${operation}Int${bits}( |
| object target: UnsafeMutablePointer<Int${bits}>, |
| operand: Int${bits}) -> Int${bits} { |
| |
| let value = Builtin.atomicrmw_${operation.lower()}_seqcst_Int${bits}( |
| target._rawValue, operand._value) |
| |
| return Int${bits}(value) |
| } |
| |
| % end |
| |
| % end |
| |