Improve the performance of copying BBV (#2039)
Motivation: When turning a BBV into a Collection, several of the collection algorithms can do better if you implement the semi-private `_copyContents` hook. We can easily implement it, so we should. While I'm here we should clean up some other bits. Modifications: - Implement _copyContents. - Implement a faster `.count` for `ByteBufferView`. - Make `ByteBuffer.capacity` `@inlinable` to reduce ARC traffic Result: Copying a BBV into an Array is faster.
This commit is contained in:
parent
a4ad5eb54d
commit
4ad4c11526
|
@ -35,13 +35,15 @@ extension _ByteBufferSlice: Equatable {}
|
|||
/// fits within 24 bits, otherwise the behaviour is undefined.
|
||||
@usableFromInline
|
||||
struct _ByteBufferSlice {
|
||||
@usableFromInline var upperBound: ByteBuffer._Index
|
||||
@usableFromInline var _begin: _UInt24
|
||||
@usableFromInline private(set) var upperBound: ByteBuffer._Index
|
||||
@usableFromInline private(set) var _begin: _UInt24
|
||||
@inlinable var lowerBound: ByteBuffer._Index {
|
||||
return UInt32(self._begin)
|
||||
}
|
||||
@inlinable var count: Int {
|
||||
return Int(self.upperBound - self.lowerBound)
|
||||
// Safe: the only constructors that set this enforce that upperBound > lowerBound, so
|
||||
// this cannot underflow.
|
||||
return Int(self.upperBound &- self.lowerBound)
|
||||
}
|
||||
init() {
|
||||
self._begin = .init(0)
|
||||
|
@ -453,6 +455,7 @@ public struct ByteBuffer {
|
|||
|
||||
/// The current capacity of the storage of this `ByteBuffer`, this is not constant and does _not_ signify the number
|
||||
/// of bytes that have been written to this `ByteBuffer`.
|
||||
@inlinable
|
||||
public var capacity: Int {
|
||||
return self._slice.count
|
||||
}
|
||||
|
|
|
@ -60,6 +60,13 @@ public struct ByteBufferView: RandomAccessCollection {
|
|||
return i + 1
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public var count: Int {
|
||||
// Unchecked is safe here: Range enforces that upperBound is strictly greater than
|
||||
// lower bound, and we guarantee that _range.lowerBound >= 0.
|
||||
return self._range.upperBound &- self._range.lowerBound
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public subscript(position: Index) -> UInt8 {
|
||||
get {
|
||||
|
@ -106,6 +113,23 @@ public struct ByteBufferView: RandomAccessCollection {
|
|||
return ptr.lastIndex(of: element).map { $0 + self._range.lowerBound }
|
||||
})
|
||||
}
|
||||
|
||||
@inlinable
|
||||
public func _copyContents(
|
||||
initializing ptr: UnsafeMutableBufferPointer<UInt8>
|
||||
) -> (Iterator, UnsafeMutableBufferPointer<UInt8>.Index) {
|
||||
precondition(ptr.count >= self.count)
|
||||
|
||||
let bytesToWrite = self.count
|
||||
|
||||
let endIndex = self.withContiguousStorageIfAvailable { ourBytes in
|
||||
ptr.initialize(from: ourBytes).1
|
||||
}
|
||||
precondition(endIndex == bytesToWrite)
|
||||
|
||||
let iterator = self[self.endIndex..<self.endIndex].makeIterator()
|
||||
return (iterator, bytesToWrite)
|
||||
}
|
||||
}
|
||||
|
||||
extension ByteBufferView: MutableCollection {}
|
||||
|
|
Loading…
Reference in New Issue