Compare commits

...

1 Commits

Author SHA1 Message Date
shogo4405 987d77eca6 Support canPerformMultiPass options. 2020-03-07 19:38:05 +09:00
10 changed files with 265 additions and 49 deletions

View File

@ -58,7 +58,8 @@ final class LiveViewController: UIViewController {
]
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
.height: 1280,
.multiPassCount: 2
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared

View File

@ -353,6 +353,15 @@
29FD1B5222FF13190095A0BE /* VTSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */; };
BC3FA38B2413AEDA009C83D3 /* AVFoundation+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 292F6DB01EEBB2040097EDBE /* AVFoundation+Extension.swift */; };
BC3FA38C2413AEDA009C83D3 /* AVFoundation+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 292F6DB01EEBB2040097EDBE /* AVFoundation+Extension.swift */; };
BC0E12B22403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B12403E3A10091CAE3 /* VTFrameSilo+Extension.swift */; };
BC0E12B32403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B12403E3A10091CAE3 /* VTFrameSilo+Extension.swift */; };
BC0E12B42403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B12403E3A10091CAE3 /* VTFrameSilo+Extension.swift */; };
BC0E12BA2403F0060091CAE3 /* OSError.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B92403F0060091CAE3 /* OSError.swift */; };
BC0E12BB2403F0060091CAE3 /* OSError.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B92403F0060091CAE3 /* OSError.swift */; };
BC0E12BC2403F0060091CAE3 /* OSError.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12B92403F0060091CAE3 /* OSError.swift */; };
BC0E12BE2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12BD2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift */; };
BC0E12BF2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12BD2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift */; };
BC0E12C02403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0E12BD2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift */; };
BC44A1A923D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
BC44A1AA23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
BC44A1AB23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
@ -656,6 +665,9 @@
29F6F4841DFB83E200920A3A /* RTMPHandshake.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPHandshake.swift; sourceTree = "<group>"; };
29F97F232336A4FA00A4C317 /* SettingTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingTests.swift; sourceTree = "<group>"; };
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionPropertyKey.swift; sourceTree = "<group>"; };
BC0E12B12403E3A10091CAE3 /* VTFrameSilo+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTFrameSilo+Extension.swift"; sourceTree = "<group>"; };
BC0E12B92403F0060091CAE3 /* OSError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OSError.swift; sourceTree = "<group>"; };
BC0E12BD2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTMultiPassStorage+Extension.swift"; sourceTree = "<group>"; };
BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioConverter.AudioBuffer.swift; sourceTree = "<group>"; wrapsLines = 1; };
BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioStreamBasicDescription+Extension.swift"; sourceTree = "<group>"; };
BC558267240BB40E00011AC0 /* RTMPStreamInfo.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTMPStreamInfo.swift; sourceTree = "<group>"; };
@ -752,6 +764,7 @@
29B876571CD70A7900FC07DA /* AudioConverter.swift */,
29B876581CD70A7900FC07DA /* H264Decoder.swift */,
29B876591CD70A7900FC07DA /* H264Encoder.swift */,
BC0E12B92403F0060091CAE3 /* OSError.swift */,
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */,
);
path = Codec;
@ -1162,6 +1175,8 @@
29EA87D41E799F670043A5F8 /* Mirror+Extension.swift */,
29EA87D71E79A0090043A5F8 /* URL+Extension.swift */,
BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */,
BC0E12B12403E3A10091CAE3 /* VTFrameSilo+Extension.swift */,
BC0E12BD2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift */,
);
path = Extension;
sourceTree = "<group>";
@ -1620,10 +1635,12 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
BC0E12BE2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */,
295891161EEB8DFC00CE51E1 /* FLVTagType.swift in Sources */,
29B876B11CD70B2800FC07DA /* RTMPMessage.swift in Sources */,
2941746B22D069B300A2944F /* AudioEffect.swift in Sources */,
295891011EEB7A8B00CE51E1 /* ScalingMode.swift in Sources */,
BC0E12B22403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */,
BC83A4732403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */,
299B131D1D35272D00A1E8F5 /* ScreenCaptureSession.swift in Sources */,
2943ED53232FCA7C00ED6301 /* Setting.swift in Sources */,
@ -1700,6 +1717,7 @@
29B876881CD70AE800FC07DA /* TransportStream.swift in Sources */,
29B876BE1CD70B3900FC07DA /* EventDispatcher.swift in Sources */,
29B8769D1CD70B1100FC07DA /* NetService.swift in Sources */,
BC0E12BA2403F0060091CAE3 /* OSError.swift in Sources */,
29B8769E1CD70B1100FC07DA /* NetSocket.swift in Sources */,
2958911A1EEB8E3F00CE51E1 /* FLVAudioCodec.swift in Sources */,
295891261EEB8EF300CE51E1 /* FLVAACPacket.swift in Sources */,
@ -1802,6 +1820,7 @@
295891131EEB8D7200CE51E1 /* FLVFrameType.swift in Sources */,
29B877011CD70D5A00FC07DA /* ProgramSpecific.swift in Sources */,
295891271EEB8EF300CE51E1 /* FLVAACPacket.swift in Sources */,
BC0E12BB2403F0060091CAE3 /* OSError.swift in Sources */,
299D6A512051A9720090E10A /* MTHKView-macOS.swift in Sources */,
29B877021CD70D5A00FC07DA /* TransportStream.swift in Sources */,
296242641D8DBA9000C451A3 /* TSWriter.swift in Sources */,
@ -1819,6 +1838,7 @@
29B877091CD70D5A00FC07DA /* VideoEffect.swift in Sources */,
29B8770A1CD70D5A00FC07DA /* MIME.swift in Sources */,
29B8770B1CD70D5A00FC07DA /* NetClient.swift in Sources */,
BC0E12BF2403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */,
29EA87EE1E79A3E30043A5F8 /* CVPixelBuffer+Extension.swift in Sources */,
29B8770C1CD70D5A00FC07DA /* NetService.swift in Sources */,
2958911B1EEB8E3F00CE51E1 /* FLVAudioCodec.swift in Sources */,
@ -1855,6 +1875,7 @@
2926A9F21DE6F08E0074E3D2 /* TimerDriver.swift in Sources */,
2958912B1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */,
29B8771D1CD70D5A00FC07DA /* EventDispatcher.swift in Sources */,
BC0E12B32403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */,
2901A4EF1D437662002BBD23 /* DisplayLinkedQueue.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -1908,6 +1929,8 @@
29EB3DF01ED05768001CAE8B /* H264Encoder.swift in Sources */,
29EB3E351ED05A33001CAE8B /* DeviceUtil.swift in Sources */,
29DC17B521D0CC0600E26CED /* Atomic.swift in Sources */,
BC0E12C02403FE1D0091CAE3 /* VTMultiPassStorage+Extension.swift in Sources */,
BC0E12B42403E3A10091CAE3 /* VTFrameSilo+Extension.swift in Sources */,
BC44A1AB23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */,
29EB3E261ED05A07001CAE8B /* RTMPStream.swift in Sources */,
29DF20642312A3DD004057C3 /* RTMPNWSocket.swift in Sources */,
@ -1923,6 +1946,7 @@
29EB3E221ED059FD001CAE8B /* RTMPMessage.swift in Sources */,
29EB3E371ED05A38001CAE8B /* MachUtil.swift in Sources */,
29EB3E001ED05854001CAE8B /* HTTPService.swift in Sources */,
BC0E12BC2403F0060091CAE3 /* OSError.swift in Sources */,
29EB3DFF1ED05852001CAE8B /* HTTPResponse.swift in Sources */,
29EB3E021ED05858001CAE8B /* M3U.swift in Sources */,
29EB3E0D1ED05877001CAE8B /* AudioIOComponent.swift in Sources */,

View File

@ -62,11 +62,17 @@ extension VideoIOComponent: ScreenCaptureOutputPixelBufferDelegate {
}
context?.render(effect(pixelBuffer, info: nil), to: pixelBuffer)
}
encoder.encodeImageBuffer(
pixelBuffer,
presentationTimeStamp: withPresentationTime,
duration: CMTime.invalid
)
do {
try encoder.encodeImageBuffer(
pixelBuffer,
presentationTimeStamp: withPresentationTime,
duration: CMTime.invalid
)
} catch {
logger.error(error)
}
mixer?.recorder.appendPixelBuffer(pixelBuffer, withPresentationTime: withPresentationTime)
}
}

View File

@ -24,6 +24,7 @@ public final class H264Encoder {
#endif
case maxKeyFrameIntervalDuration
case scalingMode
case multiPassCount
public var keyPath: AnyKeyPath {
switch self {
@ -45,10 +46,18 @@ public final class H264Encoder {
return \H264Encoder.scalingMode
case .profileLevel:
return \H264Encoder.profileLevel
case .multiPassCount:
return \H264Encoder.multiPassCount
}
}
}
private struct ImageBuffer {
let image: CVImageBuffer
let presentationTimeStamp: CMTime
let duration: CMTime
}
public static let defaultWidth: Int32 = 480
public static let defaultHeight: Int32 = 272
public static let defaultBitrate: UInt32 = 160 * 1024
@ -82,7 +91,6 @@ public final class H264Encoder {
invalidateSession = true
}
}
var width: Int32 = H264Encoder.defaultWidth {
didSet {
guard width != oldValue else {
@ -114,7 +122,7 @@ public final class H264Encoder {
guard bitrate != oldValue else {
return
}
setProperty(kVTCompressionPropertyKey_AverageBitRate, Int(bitrate) as CFTypeRef)
try? session?.setProperty(.averageBitRate, value: Int(bitrate) as CFTypeRef)
}
}
var profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String {
@ -140,7 +148,7 @@ public final class H264Encoder {
guard expectedFPS != oldValue else {
return
}
setProperty(kVTCompressionPropertyKey_ExpectedFrameRate, NSNumber(value: expectedFPS))
try? session?.setProperty(.expectedFrameRate, value: NSNumber(value: expectedFPS))
}
}
var formatDescription: CMFormatDescription? {
@ -151,16 +159,34 @@ public final class H264Encoder {
delegate?.didSetFormatDescription(video: formatDescription)
}
}
var multiPassCount: Int = 1 {
didSet {
guard multiPassCount != oldValue else {
return
}
invalidateSession = true
}
}
weak var delegate: VideoEncoderDelegate?
private(set) var status: OSStatus = noErr
private var frameSilo: VTFrameSilo?
private var multiPassStorage: VTMultiPassStorage? {
didSet {
try? oldValue?.close()
}
}
private var multiPassBuffers: [ImageBuffer] = []
private var multiPassDuration: Double = 0.2
private var canperformMultiPass: Bool {
1 < multiPassCount
}
private var attributes: [NSString: AnyObject] {
var attributes: [NSString: AnyObject] = H264Encoder.defaultAttributes
attributes[kCVPixelBufferWidthKey] = NSNumber(value: width)
attributes[kCVPixelBufferHeightKey] = NSNumber(value: height)
return attributes
}
private var invalidateSession: Bool = true
private var invalidateSession = true
private var lastImageBuffer: CVImageBuffer?
// @see: https://developer.apple.com/library/mac/releasenotes/General/APIDiffsMacOSX10_8/VideoToolbox.html
@ -207,7 +233,11 @@ public final class H264Encoder {
}
let encoder: H264Encoder = Unmanaged<H264Encoder>.fromOpaque(refcon).takeUnretainedValue()
encoder.formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
encoder.delegate?.sampleOutput(video: sampleBuffer)
if encoder.canperformMultiPass {
encoder.performMultiPass(sampleBuffer)
} else {
encoder.delegate?.sampleOutput(video: sampleBuffer)
}
}
private var _session: VTCompressionSession?
@ -230,10 +260,19 @@ public final class H264Encoder {
return nil
}
invalidateSession = false
status = session.setProperties(properties)
status = session.prepareToEncodeFrame()
guard status == noErr else {
logger.error("setup failed VTCompressionSessionPrepareToEncodeFrames. Size = \(width)x\(height)")
do {
try session.setProperties(properties)
if canperformMultiPass {
VTMultiPassStorageCreate(allocator: kCFAllocatorDefault, fileURL: nil, timeRange: .invalid, options: nil, multiPassStorageOut: &multiPassStorage)
try session.setProperty(.multiPassStorage, value: multiPassStorage)
try session.setProperty(.realTime, value: kCFBooleanFalse)
try session.beginPass()
VTFrameSiloCreate(allocator: kCFAllocatorDefault, fileURL: nil, timeRange: .invalid, options: nil, frameSiloOut: &frameSilo)
} else {
try session.prepareToEncodeFrame()
}
} catch {
logger.error(error)
return nil
}
}
@ -249,44 +288,69 @@ public final class H264Encoder {
settings.observer = self
}
func encodeImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
func encodeImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) throws {
guard isRunning.value && locked == 0 else {
return
}
if invalidateSession {
session = nil
}
guard let session: VTCompressionSession = session else {
guard let session = session else {
return
}
var flags: VTEncodeInfoFlags = []
VTCompressionSessionEncodeFrame(
session,
imageBuffer: muted ? lastImageBuffer ?? imageBuffer : imageBuffer,
presentationTimeStamp: presentationTimeStamp,
duration: duration,
frameProperties: nil,
sourceFrameRefcon: nil,
infoFlagsOut: &flags
)
let currentImageBuffer = muted ? lastImageBuffer ?? imageBuffer : imageBuffer
if canperformMultiPass {
multiPassBuffers.append(ImageBuffer(image: currentImageBuffer, presentationTimeStamp: presentationTimeStamp, duration: duration))
try multiPassEndOfRoundIfNeeded(session)
}
try session.encodeFrame(currentImageBuffer, presentaionTimeStamp: presentationTimeStamp, duration: duration)
if !muted || lastImageBuffer == nil {
lastImageBuffer = imageBuffer
}
}
private func setProperty(_ key: CFString, _ value: CFTypeRef?) {
lockQueue.async {
guard let session: VTCompressionSession = self._session else {
return
}
self.status = VTSessionSetProperty(
session,
key: key,
value: value
)
func performMultiPass(_ sampleBuffer: CMSampleBuffer) {
do {
try frameSilo?.addSampleBuffer(sampleBuffer)
} catch {
logger.error(error)
}
}
private func multiPassEndOfRoundIfNeeded(_ session: VTCompressionSession) throws {
let timeRange = makeTimeRange()
guard multiPassDuration < timeRange.duration.seconds else {
return
}
print(try session.endPass())
print(try session.timeRangeForNextPass())
for i in 0...multiPassCount {
print(i, ":", multiPassCount)
try session.beginPass(i == multiPassCount ? .beginFinalPass : .init(rawValue: 0))
for buffer in multiPassBuffers {
try? session.encodeFrame(buffer.image, presentaionTimeStamp: buffer.presentationTimeStamp, duration: buffer.duration)
}
guard (try session.endPass()).boolValue else {
break
}
try session.timeRangeForNextPass()
}
multiPassBuffers.removeAll()
try frameSilo?.forEachSampleBuffer(timeRange) { [weak self] sampleBuffer -> OSStatus in
self?.delegate?.sampleOutput(video: sampleBuffer)
return noErr
}
try session.beginPass()
}
private func makeTimeRange() -> CMTimeRange {
guard let first = multiPassBuffers.first, let last = multiPassBuffers.last else {
return .invalid
}
return CMTimeRange(start: first.presentationTimeStamp, end: last.presentationTimeStamp)
}
#if os(iOS)
@objc
private func applicationWillEnterForeground(_ notification: Notification) {
@ -338,6 +402,9 @@ extension H264Encoder: Running {
self.session = nil
self.lastImageBuffer = nil
self.formatDescription = nil
self.frameSilo = nil
self.multiPassStorage = nil
self.multiPassBuffers.removeAll()
#if os(iOS)
NotificationCenter.default.removeObserver(self)
#endif

View File

@ -0,0 +1,5 @@
import Foundation
enum OSError: Swift.Error {
case invoke(function: String, status: OSStatus)
}

View File

@ -153,16 +153,66 @@ enum VTCompressionSessionPropertyKey: VTSessionPropertyKey {
}
extension VTCompressionSession {
func setProperty(_ key: VTCompressionSessionPropertyKey, value: CFTypeRef?) -> OSStatus {
VTSessionSetProperty(self, key: key.CFString, value: value)
func encodeFrame(_ imageBuffer: CVImageBuffer, presentaionTimeStamp: CMTime, duration: CMTime, frameProperties: CFDictionary? = nil, sourceFrameRefcon: UnsafeMutableRawPointer? = nil, infoFlagsOut: UnsafeMutablePointer<VTEncodeInfoFlags>? = nil) throws {
let status = VTCompressionSessionEncodeFrame(
self,
imageBuffer: imageBuffer,
presentationTimeStamp: presentaionTimeStamp,
duration: duration,
frameProperties: frameProperties,
sourceFrameRefcon: sourceFrameRefcon,
infoFlagsOut: infoFlagsOut
)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func setProperties(_ propertyDictionary: [NSString: NSObject]) -> OSStatus {
VTSessionSetProperties(self, propertyDictionary: propertyDictionary as CFDictionary)
func timeRangeForNextPass() throws -> CMTimeRange {
var itemCount: CMItemCount = 0
var timeRange: UnsafePointer<CMTimeRange>?
let status = VTCompressionSessionGetTimeRangesForNextPass(self, timeRangeCountOut: &itemCount, timeRangeArrayOut: &timeRange)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
return timeRange?.pointee ?? .invalid
}
func prepareToEncodeFrame() -> OSStatus {
VTCompressionSessionPrepareToEncodeFrames(self)
func setProperty(_ key: VTCompressionSessionPropertyKey, value: CFTypeRef?) throws {
let status = VTSessionSetProperty(self, key: key.CFString, value: value)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func setProperties(_ propertyDictionary: [NSString: NSObject]) throws {
let status = VTSessionSetProperties(self, propertyDictionary: propertyDictionary as CFDictionary)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func prepareToEncodeFrame() throws {
let status = VTCompressionSessionPrepareToEncodeFrames(self)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func beginPass(_ flags: VTCompressionSessionOptionFlags = .init(rawValue: 0)) throws {
let status = VTCompressionSessionBeginPass(self, flags: flags, nil)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func endPass() throws -> DarwinBoolean {
var furtherPassesRequestedOut: DarwinBoolean = false
let status = VTCompressionSessionEndPass(self, furtherPassesRequestedOut: &furtherPassesRequestedOut, nil)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
return furtherPassesRequestedOut
}
func invalidate() {

View File

@ -0,0 +1,18 @@
import Foundation
import VideoToolbox
extension VTFrameSilo {
func addSampleBuffer(_ sampleBuffer: CMSampleBuffer) throws {
let status = VTFrameSiloAddSampleBuffer(self, sampleBuffer: sampleBuffer)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
func forEachSampleBuffer(_ range: CMTimeRange, handler: (CMSampleBuffer) -> OSStatus) throws {
let status = VTFrameSiloCallBlockForEachSampleBuffer(self, in: range, handler: handler)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
}

View File

@ -0,0 +1,10 @@
import VideoToolbox
extension VTMultiPassStorage {
func close() throws {
let status = VTMultiPassStorageClose(self)
guard status == noErr else {
throw OSError.invoke(function: #function, status: status)
}
}
}

View File

@ -439,11 +439,15 @@ extension VideoIOComponent {
renderer?.render(image: image)
}
encoder.encodeImageBuffer(
imageBuffer ?? buffer,
presentationTimeStamp: sampleBuffer.presentationTimeStamp,
duration: sampleBuffer.duration
)
do {
try encoder.encodeImageBuffer(
imageBuffer ?? buffer,
presentationTimeStamp: sampleBuffer.presentationTimeStamp,
duration: sampleBuffer.duration
)
} catch {
logger.error(error)
}
mixer?.recorder.appendPixelBuffer(imageBuffer ?? buffer, withPresentationTime: sampleBuffer.presentationTimeStamp)
}

View File

@ -46,6 +46,10 @@ public class Setting<T: AnyObject, Key: KeyPathRepresentable>: ExpressibleByDict
if let newValue = toDouble(value: newValue) {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, Int>:
if let newValue = toInt(value: newValue) {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, String>:
if let newValue = newValue as? String {
observer?[keyPath: path] = newValue
@ -146,6 +150,33 @@ public class Setting<T: AnyObject, Key: KeyPathRepresentable>: ExpressibleByDict
return nil
}
}
private func toInt(value: Any?) -> Int? {
switch value {
case let value as Int:
return value
case let value as Int8:
return numericCast(value)
case let value as Int16:
return numericCast(value)
case let value as Int32:
return numericCast(value)
case let value as Int64:
return numericCast(value)
case let value as UInt:
return numericCast(value)
case let value as UInt8:
return numericCast(value)
case let value as UInt16:
return numericCast(value)
case let value as UInt32:
return numericCast(value)
case let value as UInt64:
return numericCast(value)
default:
return nil
}
}
}
extension Setting: CustomDebugStringConvertible {