Mildly rework the NIOLock storage (#2395)
Motivation: NIOLock uses a storage object constructed from a ManagedBuffer. In general this is fine, but it's a tricky API to use safely and we want to avoid violating any of its guarantees. Modifications: - Store the value in the header and the lock in the elements - Add debug assertions on alignment Result: We'll be a bit more confident of the use of NIOLock
This commit is contained in:
parent
9b2848d76f
commit
8423040a9b
|
@ -35,6 +35,8 @@ enum LockOperations { }
|
||||||
extension LockOperations {
|
extension LockOperations {
|
||||||
@inlinable
|
@inlinable
|
||||||
static func create(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
static func create(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
||||||
|
mutex.assertValidAlignment()
|
||||||
|
|
||||||
#if os(Windows)
|
#if os(Windows)
|
||||||
InitializeSRWLock(mutex)
|
InitializeSRWLock(mutex)
|
||||||
#else
|
#else
|
||||||
|
@ -51,6 +53,8 @@ extension LockOperations {
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
static func destroy(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
static func destroy(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
||||||
|
mutex.assertValidAlignment()
|
||||||
|
|
||||||
#if os(Windows)
|
#if os(Windows)
|
||||||
// SRWLOCK does not need to be free'd
|
// SRWLOCK does not need to be free'd
|
||||||
#else
|
#else
|
||||||
|
@ -61,6 +65,8 @@ extension LockOperations {
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
static func lock(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
static func lock(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
||||||
|
mutex.assertValidAlignment()
|
||||||
|
|
||||||
#if os(Windows)
|
#if os(Windows)
|
||||||
AcquireSRWLockExclusive(mutex)
|
AcquireSRWLockExclusive(mutex)
|
||||||
#else
|
#else
|
||||||
|
@ -71,6 +77,8 @@ extension LockOperations {
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
static func unlock(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
static func unlock(_ mutex: UnsafeMutablePointer<LockPrimitive>) {
|
||||||
|
mutex.assertValidAlignment()
|
||||||
|
|
||||||
#if os(Windows)
|
#if os(Windows)
|
||||||
ReleaseSRWLockExclusive(mutex)
|
ReleaseSRWLockExclusive(mutex)
|
||||||
#else
|
#else
|
||||||
|
@ -83,19 +91,43 @@ extension LockOperations {
|
||||||
// Tail allocate both the mutex and a generic value using ManagedBuffer.
|
// Tail allocate both the mutex and a generic value using ManagedBuffer.
|
||||||
// Both the header pointer and the elements pointer are stable for
|
// Both the header pointer and the elements pointer are stable for
|
||||||
// the class's entire lifetime.
|
// the class's entire lifetime.
|
||||||
|
//
|
||||||
|
// However, for safety reasons, we elect to place the lock in the "elements"
|
||||||
|
// section of the buffer instead of the head. The reasoning here is subtle,
|
||||||
|
// so buckle in.
|
||||||
|
//
|
||||||
|
// _As a practical matter_, the implementation of ManagedBuffer ensures that
|
||||||
|
// the pointer to the header is stable across the lifetime of the class, and so
|
||||||
|
// each time you call `withUnsafeMutablePointers` or `withUnsafeMutablePointerToHeader`
|
||||||
|
// the value of the header pointer will be the same. This is because ManagedBuffer uses
|
||||||
|
// `Builtin.addressOf` to load the value of the header, and that does ~magic~ to ensure
|
||||||
|
// that it does not invoke any weird Swift accessors that might copy the value.
|
||||||
|
//
|
||||||
|
// _However_, the header is also available via the `.header` field on the ManagedBuffer.
|
||||||
|
// This presents a problem! The reason there's an issue is that `Builtin.addressOf` and friends
|
||||||
|
// do not interact with Swift's exclusivity model. That is, the various `with` functions do not
|
||||||
|
// conceptually trigger a mutating access to `.header`. For elements this isn't a concern because
|
||||||
|
// there's literally no other way to perform the access, but for `.header` it's entirely possible
|
||||||
|
// to accidentally recursively read it.
|
||||||
|
//
|
||||||
|
// Our implementation is free from these issues, so we don't _really_ need to worry about it.
|
||||||
|
// However, out of an abundance of caution, we store the Value in the header, and the LockPrimitive
|
||||||
|
// in the trailing elements. We still don't use `.header`, but it's better to be safe than sorry,
|
||||||
|
// and future maintainers will be happier that we were cautious.
|
||||||
|
//
|
||||||
|
// See also: https://github.com/apple/swift/pull/40000
|
||||||
@usableFromInline
|
@usableFromInline
|
||||||
final class LockStorage<Value>: ManagedBuffer<LockPrimitive, Value> {
|
final class LockStorage<Value>: ManagedBuffer<Value, LockPrimitive> {
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
static func create(value: Value) -> Self {
|
static func create(value: Value) -> Self {
|
||||||
let buffer = Self.create(minimumCapacity: 1) { _ in
|
let buffer = Self.create(minimumCapacity: 1) { _ in
|
||||||
return LockPrimitive()
|
return value
|
||||||
}
|
}
|
||||||
let storage = unsafeDowncast(buffer, to: Self.self)
|
let storage = unsafeDowncast(buffer, to: Self.self)
|
||||||
|
|
||||||
storage.withUnsafeMutablePointers { lockPtr, valuePtr in
|
storage.withUnsafeMutablePointers { _, lockPtr in
|
||||||
LockOperations.create(lockPtr)
|
LockOperations.create(lockPtr)
|
||||||
valuePtr.initialize(to: value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return storage
|
return storage
|
||||||
|
@ -103,36 +135,35 @@ final class LockStorage<Value>: ManagedBuffer<LockPrimitive, Value> {
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
func lock() {
|
func lock() {
|
||||||
self.withUnsafeMutablePointerToHeader { lockPtr in
|
self.withUnsafeMutablePointerToElements { lockPtr in
|
||||||
LockOperations.lock(lockPtr)
|
LockOperations.lock(lockPtr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
func unlock() {
|
func unlock() {
|
||||||
self.withUnsafeMutablePointerToHeader { lockPtr in
|
self.withUnsafeMutablePointerToElements { lockPtr in
|
||||||
LockOperations.unlock(lockPtr)
|
LockOperations.unlock(lockPtr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
deinit {
|
deinit {
|
||||||
self.withUnsafeMutablePointers { lockPtr, valuePtr in
|
self.withUnsafeMutablePointerToElements { lockPtr in
|
||||||
LockOperations.destroy(lockPtr)
|
LockOperations.destroy(lockPtr)
|
||||||
valuePtr.deinitialize(count: 1)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
func withLockPrimitive<T>(_ body: (UnsafeMutablePointer<LockPrimitive>) throws -> T) rethrows -> T {
|
func withLockPrimitive<T>(_ body: (UnsafeMutablePointer<LockPrimitive>) throws -> T) rethrows -> T {
|
||||||
try self.withUnsafeMutablePointerToHeader { lockPtr in
|
try self.withUnsafeMutablePointerToElements { lockPtr in
|
||||||
return try body(lockPtr)
|
return try body(lockPtr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@inlinable
|
@inlinable
|
||||||
func withLockedValue<T>(_ mutate: (inout Value) throws -> T) rethrows -> T {
|
func withLockedValue<T>(_ mutate: (inout Value) throws -> T) rethrows -> T {
|
||||||
try self.withUnsafeMutablePointers { lockPtr, valuePtr in
|
try self.withUnsafeMutablePointers { valuePtr, lockPtr in
|
||||||
LockOperations.lock(lockPtr)
|
LockOperations.lock(lockPtr)
|
||||||
defer { LockOperations.unlock(lockPtr) }
|
defer { LockOperations.unlock(lockPtr) }
|
||||||
return try mutate(&valuePtr.pointee)
|
return try mutate(&valuePtr.pointee)
|
||||||
|
@ -209,3 +240,10 @@ extension NIOLock {
|
||||||
}
|
}
|
||||||
|
|
||||||
extension NIOLock: Sendable {}
|
extension NIOLock: Sendable {}
|
||||||
|
|
||||||
|
extension UnsafeMutablePointer {
|
||||||
|
@inlinable
|
||||||
|
func assertValidAlignment() {
|
||||||
|
assert(UInt(bitPattern: self) % UInt(MemoryLayout<Pointee>.alignment) == 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
//
|
//
|
||||||
// This source file is part of the SwiftNIO open source project
|
// This source file is part of the SwiftNIO open source project
|
||||||
//
|
//
|
||||||
// Copyright (c) 2017-2022 Apple Inc. and the SwiftNIO project authors
|
// Copyright (c) 2017-2023 Apple Inc. and the SwiftNIO project authors
|
||||||
// Licensed under Apache License v2.0
|
// Licensed under Apache License v2.0
|
||||||
//
|
//
|
||||||
// See LICENSE.txt for license information
|
// See LICENSE.txt for license information
|
||||||
|
@ -60,6 +60,7 @@ extension NIOConcurrencyHelpersTests {
|
||||||
("testLoadAndCASHammering", testLoadAndCASHammering),
|
("testLoadAndCASHammering", testLoadAndCASHammering),
|
||||||
("testMultipleLoadsRacingWhilstStoresAreGoingOn", testMultipleLoadsRacingWhilstStoresAreGoingOn),
|
("testMultipleLoadsRacingWhilstStoresAreGoingOn", testMultipleLoadsRacingWhilstStoresAreGoingOn),
|
||||||
("testNIOLockedValueBox", testNIOLockedValueBox),
|
("testNIOLockedValueBox", testNIOLockedValueBox),
|
||||||
|
("testNIOLockedValueBoxHandlesThingsWithTransitiveClassesProperly", testNIOLockedValueBoxHandlesThingsWithTransitiveClassesProperly),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1034,6 +1034,23 @@ class NIOConcurrencyHelpersTests: XCTestCase {
|
||||||
|
|
||||||
XCTAssertEqual(50_000, lv.withLockedValue { $0.count })
|
XCTAssertEqual(50_000, lv.withLockedValue { $0.count })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testNIOLockedValueBoxHandlesThingsWithTransitiveClassesProperly() {
|
||||||
|
struct State {
|
||||||
|
var counts: [Int] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
let lv = NIOLockedValueBox<State>(State())
|
||||||
|
spawnAndJoinRacingThreads(count: 50) { _ in
|
||||||
|
for i in 0..<1000 {
|
||||||
|
lv.withLockedValue { state in
|
||||||
|
state.counts.append(i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
XCTAssertEqual(50_000, lv.withLockedValue { $0.counts.count })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func spawnAndJoinRacingThreads(count: Int, _ body: @escaping (Int) -> Void) {
|
func spawnAndJoinRacingThreads(count: Int, _ body: @escaping (Int) -> Void) {
|
||||||
|
|
Loading…
Reference in New Issue