add Setting, SettingObserver
This commit is contained in:
parent
16633344f7
commit
c24bfb54a4
|
@ -51,17 +51,17 @@ final class LiveViewController: UIViewController {
|
|||
rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
rtmpStream.syncOrientation = true
|
||||
rtmpStream.captureSettings = [
|
||||
"sessionPreset": AVCaptureSession.Preset.hd1280x720.rawValue,
|
||||
"continuousAutofocus": true,
|
||||
"continuousExposure": true,
|
||||
"preferredVideoStabilizationMode": AVCaptureVideoStabilizationMode.auto.rawValue
|
||||
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
|
||||
.continuousAutofocus: true,
|
||||
.continuousExposure: true,
|
||||
.preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
|
||||
]
|
||||
rtmpStream.videoSettings = [
|
||||
"width": 720,
|
||||
"height": 1280
|
||||
.width: 720,
|
||||
.height: 1280
|
||||
]
|
||||
rtmpStream.audioSettings = [
|
||||
"sampleRate": sampleRate
|
||||
.sampleRate: sampleRate
|
||||
]
|
||||
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
|
||||
|
||||
|
@ -106,11 +106,11 @@ final class LiveViewController: UIViewController {
|
|||
@IBAction func on(slider: UISlider) {
|
||||
if slider == audioBitrateSlider {
|
||||
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
|
||||
rtmpStream.audioSettings["bitrate"] = slider.value * 1024
|
||||
rtmpStream.audioSettings[.bitrate] = slider.value * 1024
|
||||
}
|
||||
if slider == videoBitrateSlider {
|
||||
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
|
||||
rtmpStream.videoSettings["bitrate"] = slider.value * 1024
|
||||
rtmpStream.videoSettings[.bitrate] = slider.value * 1024
|
||||
}
|
||||
if slider == zoomSlider {
|
||||
rtmpStream.setZoomFactor(CGFloat(slider.value), ramping: true, withRate: 5.0)
|
||||
|
@ -187,11 +187,11 @@ final class LiveViewController: UIViewController {
|
|||
@IBAction func onFPSValueChanged(_ segment: UISegmentedControl) {
|
||||
switch segment.selectedSegmentIndex {
|
||||
case 0:
|
||||
rtmpStream.captureSettings["fps"] = 15.0
|
||||
rtmpStream.captureSettings[.fps] = 15.0
|
||||
case 1:
|
||||
rtmpStream.captureSettings["fps"] = 30.0
|
||||
rtmpStream.captureSettings[.fps] = 30.0
|
||||
case 2:
|
||||
rtmpStream.captureSettings["fps"] = 60.0
|
||||
rtmpStream.captureSettings[.fps] = 60.0
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
|
|
@ -32,9 +32,9 @@ open class SampleHandler: RPBroadcastSampleHandler {
|
|||
if let description: CMVideoFormatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) {
|
||||
let dimensions: CMVideoDimensions = CMVideoFormatDescriptionGetDimensions(description)
|
||||
broadcaster.stream.videoSettings = [
|
||||
"width": dimensions.width,
|
||||
"height": dimensions.height ,
|
||||
"profileLevel": kVTProfileLevel_H264_Baseline_AutoLevel
|
||||
.width: dimensions.width,
|
||||
.height: dimensions.height ,
|
||||
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
|
||||
]
|
||||
}
|
||||
broadcaster.appendSampleBuffer(sampleBuffer, withType: .video)
|
||||
|
|
|
@ -68,6 +68,9 @@
|
|||
2942A4FA21A9418A004E1BEE /* Running.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2942A4F721A9418A004E1BEE /* Running.swift */; };
|
||||
2942EF841DFF4D06008E620C /* HaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; };
|
||||
2942EF861DFF4D3C008E620C /* HaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
|
||||
2943ED53232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
|
||||
2943ED54232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
|
||||
2943ED55232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
|
||||
294637A41EC8961C008EEC71 /* RTMPReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294637A31EC8961C008EEC71 /* RTMPReaderTests.swift */; };
|
||||
294637A81EC89BC9008EEC71 /* Config.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294637A71EC89BC9008EEC71 /* Config.swift */; };
|
||||
294637AA1EC8A79F008EEC71 /* SampleVideo_360x240_5mb.flv in Resources */ = {isa = PBXBuildFile; fileRef = 294637A91EC8A79F008EEC71 /* SampleVideo_360x240_5mb.flv */; };
|
||||
|
@ -337,6 +340,7 @@
|
|||
29EF03781CD79A5400473D99 /* HaishinKit.h in Headers */ = {isa = PBXBuildFile; fileRef = 299F7E3B1CD71A97001E7272 /* HaishinKit.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
29F6F4851DFB83E200920A3A /* RTMPHandshake.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29F6F4841DFB83E200920A3A /* RTMPHandshake.swift */; };
|
||||
29F6F4861DFB862400920A3A /* RTMPHandshake.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29F6F4841DFB83E200920A3A /* RTMPHandshake.swift */; };
|
||||
29F97F242336A4FA00A4C317 /* SettingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29F97F232336A4FA00A4C317 /* SettingTests.swift */; };
|
||||
29FD1B5022FF13190095A0BE /* VTSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */; };
|
||||
29FD1B5122FF13190095A0BE /* VTSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */; };
|
||||
29FD1B5222FF13190095A0BE /* VTSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */; };
|
||||
|
@ -490,6 +494,7 @@
|
|||
2941746A22D069B300A2944F /* AudioEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioEffect.swift; sourceTree = "<group>"; };
|
||||
2942424C1CF4C01300D65DCB /* MD5.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MD5.swift; sourceTree = "<group>"; };
|
||||
2942A4F721A9418A004E1BEE /* Running.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Running.swift; sourceTree = "<group>"; };
|
||||
2943ED52232FCA7C00ED6301 /* Setting.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Setting.swift; sourceTree = "<group>"; };
|
||||
2945CBBD1B4BE66000104112 /* HaishinKit.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = HaishinKit.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
294637A31EC8961C008EEC71 /* RTMPReaderTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPReaderTests.swift; sourceTree = "<group>"; };
|
||||
294637A71EC89BC9008EEC71 /* Config.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Config.swift; sourceTree = "<group>"; };
|
||||
|
@ -623,6 +628,7 @@
|
|||
29EA87EC1E79A3E30043A5F8 /* CVPixelBuffer+Extension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CVPixelBuffer+Extension.swift"; sourceTree = "<group>"; };
|
||||
29F04FF21F3388B000172706 /* HaishinKit.podspec */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = HaishinKit.podspec; sourceTree = "<group>"; };
|
||||
29F6F4841DFB83E200920A3A /* RTMPHandshake.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPHandshake.swift; sourceTree = "<group>"; };
|
||||
29F97F232336A4FA00A4C317 /* SettingTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingTests.swift; sourceTree = "<group>"; };
|
||||
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||
29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTCompressionSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
@ -736,6 +742,7 @@
|
|||
2926A9EB1DE6B71D0074E3D2 /* MachUtil.swift */,
|
||||
2942424C1CF4C01300D65DCB /* MD5.swift */,
|
||||
2942A4F721A9418A004E1BEE /* Running.swift */,
|
||||
2943ED52232FCA7C00ED6301 /* Setting.swift */,
|
||||
2926A9F01DE6EAEB0074E3D2 /* TimerDriver.swift */,
|
||||
29245AEC1D3233EB00AFFB9A /* VideoGravityUtil.swift */,
|
||||
);
|
||||
|
@ -775,6 +782,7 @@
|
|||
290EA8A51DFB61E700053022 /* CRC32Tests.swift */,
|
||||
290EA8A61DFB61E700053022 /* EventDispatcherTests.swift */,
|
||||
290EA8A71DFB61E700053022 /* MD5Tests.swift */,
|
||||
29F97F232336A4FA00A4C317 /* SettingTests.swift */,
|
||||
290EA8A81DFB61E700053022 /* TimerDriverTests.swift */,
|
||||
);
|
||||
path = Util;
|
||||
|
@ -1578,6 +1586,7 @@
|
|||
2941746B22D069B300A2944F /* AudioEffect.swift in Sources */,
|
||||
295891011EEB7A8B00CE51E1 /* ScalingMode.swift in Sources */,
|
||||
299B131D1D35272D00A1E8F5 /* ScreenCaptureSession.swift in Sources */,
|
||||
2943ED53232FCA7C00ED6301 /* Setting.swift in Sources */,
|
||||
2915EC4D1D85BB8C00621092 /* RTMPTSocket.swift in Sources */,
|
||||
2958910A1EEB8D1800CE51E1 /* FLVReader.swift in Sources */,
|
||||
29C2631C1D0083B50098D4EF /* VideoIOComponent.swift in Sources */,
|
||||
|
@ -1685,6 +1694,7 @@
|
|||
290EA8AB1DFB61E700053022 /* EventDispatcherTests.swift in Sources */,
|
||||
290EA8901DFB616000053022 /* Foundation+ExtensionTests.swift in Sources */,
|
||||
290EA8991DFB619600053022 /* PacketizedElementaryStreamTests.swift in Sources */,
|
||||
29F97F242336A4FA00A4C317 /* SettingTests.swift in Sources */,
|
||||
290EA8911DFB616000053022 /* SwiftCore+ExtensionTests.swift in Sources */,
|
||||
290EA89A1DFB619600053022 /* ProgramSpecificTests.swift in Sources */,
|
||||
290EA8931DFB617800053022 /* HTTPRequestTests.swift in Sources */,
|
||||
|
@ -1707,6 +1717,7 @@
|
|||
29EA87EB1E79A3B70043A5F8 /* CMBlockBuffer+Extension.swift in Sources */,
|
||||
29B876F01CD70D5900FC07DA /* Constants.swift in Sources */,
|
||||
29EA87D91E79A0090043A5F8 /* URL+Extension.swift in Sources */,
|
||||
2943ED54232FCA7C00ED6301 /* Setting.swift in Sources */,
|
||||
292AC17C1CF4C871004F5730 /* MD5.swift in Sources */,
|
||||
2958910B1EEB8D1800CE51E1 /* FLVReader.swift in Sources */,
|
||||
295747921E3A30A400EF056E /* RTMPStreamDelegate.swift in Sources */,
|
||||
|
@ -1894,6 +1905,7 @@
|
|||
2958912C1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */,
|
||||
295891241EEB8EC500CE51E1 /* FLVAVCPacketType.swift in Sources */,
|
||||
299D6A532051A9920090E10A /* MTHKView.swift in Sources */,
|
||||
2943ED55232FCA7C00ED6301 /* Setting.swift in Sources */,
|
||||
29EB3DF31ED05773001CAE8B /* CMAudioFormatDescription+Extension.swift in Sources */,
|
||||
29EB3DFC1ED057AC001CAE8B /* HTTPRequest.swift in Sources */,
|
||||
29EB3E161ED0588F001CAE8B /* MIME.swift in Sources */,
|
||||
|
|
|
@ -34,10 +34,8 @@ extension VideoIOComponent {
|
|||
input = nil
|
||||
output = nil
|
||||
if useScreenSize {
|
||||
encoder.setValuesForKeys([
|
||||
"width": screen.attributes["Width"]!,
|
||||
"height": screen.attributes["Height"]!
|
||||
])
|
||||
encoder.width = screen.attributes["Width"] as! Int32
|
||||
encoder.height = screen.attributes["Height"] as! Int32
|
||||
}
|
||||
self.screen = screen
|
||||
}
|
||||
|
|
28
README.md
28
README.md
|
@ -164,24 +164,24 @@ do {
|
|||
var rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
|
||||
rtmpStream.captureSettings = [
|
||||
"fps": 30, // FPS
|
||||
"sessionPreset": AVCaptureSession.Preset.medium.rawValue, // input video width/height
|
||||
"continuousAutofocus": false, // use camera autofocus mode
|
||||
"continuousExposure": false, // use camera exposure mode
|
||||
// "preferredVideoStabilizationMode": AVCaptureVideoStabilizationMode.auto.rawValue
|
||||
.fps: 30, // FPS
|
||||
.sessionPreset: AVCaptureSession.Preset.medium, // input video width/height
|
||||
.continuousAutofocus: false, // use camera autofocus mode
|
||||
.continuousExposure: false, // use camera exposure mode
|
||||
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto.rawValue
|
||||
]
|
||||
rtmpStream.audioSettings = [
|
||||
"muted": false, // mute audio
|
||||
"bitrate": 32 * 1024,
|
||||
"sampleRate": sampleRate,
|
||||
.muted: false, // mute audio
|
||||
.bitrate: 32 * 1000,
|
||||
.sampleRate: sampleRate,
|
||||
]
|
||||
rtmpStream.videoSettings = [
|
||||
"width": 640, // video output width
|
||||
"height": 360, // video output height
|
||||
"bitrate": 160 * 1024, // video output bitrate
|
||||
// "dataRateLimits": [160 * 1024 / 8, 1], optional kVTCompressionPropertyKey_DataRateLimits property
|
||||
"profileLevel": kVTProfileLevel_H264_Baseline_3_1, // H264 Profile require "import VideoToolbox"
|
||||
"maxKeyFrameIntervalDuration": 2, // key frame / sec
|
||||
.width: 640, // video output width
|
||||
.height: 360, // video output height
|
||||
.bitrate: 160 * 1000, // video output bitrate
|
||||
// .dataRateLimits: [160 * 1000 / 8, 1], optional kVTCompressionPropertyKey_DataRateLimits property
|
||||
.profileLevel: kVTProfileLevel_H264_Baseline_3_1, // H264 Profile require "import VideoToolbox"
|
||||
.maxKeyFrameIntervalDuration: 2, // key frame / sec
|
||||
]
|
||||
// "0" means the same of input
|
||||
rtmpStream.recorderSettings = [
|
||||
|
|
|
@ -10,19 +10,30 @@ public protocol AudioConverterDelegate: class {
|
|||
- seealse:
|
||||
- https://developer.apple.com/library/ios/technotes/tn2236/_index.html
|
||||
*/
|
||||
public class AudioConverter: NSObject {
|
||||
public class AudioConverter {
|
||||
enum Error: Swift.Error {
|
||||
case setPropertyError(id: AudioConverterPropertyID, status: OSStatus)
|
||||
}
|
||||
|
||||
var effects: Set<AudioEffect> = []
|
||||
public enum Option: String, KeyPathRepresentable {
|
||||
case muted
|
||||
case bitrate
|
||||
case sampleRate
|
||||
case actualBitrate
|
||||
|
||||
static let supportedSettingsKeys: [String] = [
|
||||
"muted",
|
||||
"bitrate",
|
||||
"sampleRate", // down,up sampleRate not supported yet #58
|
||||
"actualBitrate"
|
||||
]
|
||||
public var keyPath: AnyKeyPath {
|
||||
switch self {
|
||||
case .muted:
|
||||
return \AudioConverter.muted
|
||||
case .bitrate:
|
||||
return \AudioConverter.bitrate
|
||||
case .sampleRate:
|
||||
return \AudioConverter.sampleRate
|
||||
case .actualBitrate:
|
||||
return \AudioConverter.actualBitrate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static let minimumBitrate: UInt32 = 8 * 1024
|
||||
public static let defaultBitrate: UInt32 = 32 * 1024
|
||||
|
@ -36,10 +47,14 @@ public class AudioConverter: NSObject {
|
|||
public var destination: Destination = .AAC
|
||||
public weak var delegate: AudioConverterDelegate?
|
||||
public private(set) var isRunning: Atomic<Bool> = .init(false)
|
||||
public var settings: Setting<AudioConverter, Option> = [:] {
|
||||
didSet {
|
||||
settings.observer = self
|
||||
}
|
||||
}
|
||||
|
||||
@objc var muted: Bool = false
|
||||
|
||||
@objc var bitrate: UInt32 = AudioConverter.defaultBitrate {
|
||||
var muted: Bool = false
|
||||
var bitrate: UInt32 = AudioConverter.defaultBitrate {
|
||||
didSet {
|
||||
guard bitrate != oldValue else {
|
||||
return
|
||||
|
@ -51,13 +66,12 @@ public class AudioConverter: NSObject {
|
|||
}
|
||||
}
|
||||
}
|
||||
@objc var sampleRate: Double = AudioConverter.defaultSampleRate
|
||||
@objc var actualBitrate: UInt32 = AudioConverter.defaultBitrate {
|
||||
var sampleRate: Double = AudioConverter.defaultSampleRate
|
||||
var actualBitrate: UInt32 = AudioConverter.defaultBitrate {
|
||||
didSet {
|
||||
logger.info(actualBitrate)
|
||||
}
|
||||
}
|
||||
|
||||
var channels: UInt32 = AudioConverter.defaultChannels
|
||||
var formatDescription: CMFormatDescription? {
|
||||
didSet {
|
||||
|
@ -78,6 +92,7 @@ public class AudioConverter: NSObject {
|
|||
bufferListSize = nonInterleaved ? AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers) : AudioConverter.defaultBufferListSize
|
||||
}
|
||||
}
|
||||
var effects: Set<AudioEffect> = []
|
||||
private var maximumBuffers: Int = AudioConverter.defaultMaximumBuffers
|
||||
private var bufferListSize: Int = AudioConverter.defaultBufferListSize
|
||||
private var currentBufferList: UnsafeMutableAudioBufferListPointer?
|
||||
|
@ -124,6 +139,10 @@ public class AudioConverter: NSObject {
|
|||
)
|
||||
}
|
||||
|
||||
public init() {
|
||||
settings.observer = self
|
||||
}
|
||||
|
||||
private var _converter: AudioConverterRef?
|
||||
private var converter: AudioConverterRef {
|
||||
var status: OSStatus = noErr
|
||||
|
|
|
@ -8,23 +8,43 @@ protocol VideoEncoderDelegate: class {
|
|||
}
|
||||
|
||||
// MARK: -
|
||||
final class H264Encoder: NSObject {
|
||||
static let supportedSettingsKeys: [String] = [
|
||||
"muted",
|
||||
"width",
|
||||
"height",
|
||||
"bitrate",
|
||||
"profileLevel",
|
||||
"dataRateLimits",
|
||||
"enabledHardwareEncoder", // macOS only
|
||||
"maxKeyFrameIntervalDuration",
|
||||
"scalingMode"
|
||||
]
|
||||
public final class H264Encoder {
|
||||
public enum Option: String, KeyPathRepresentable, CaseIterable {
|
||||
case muted
|
||||
case width
|
||||
case height
|
||||
case bitrate
|
||||
case profileLevel
|
||||
case enabledHardwareEncoder
|
||||
case maxKeyFrameIntervalDuration
|
||||
case scalingMode
|
||||
|
||||
static let defaultWidth: Int32 = 480
|
||||
static let defaultHeight: Int32 = 272
|
||||
static let defaultBitrate: UInt32 = 160 * 1024
|
||||
static let defaultScalingMode: String = "Trim"
|
||||
public var keyPath: AnyKeyPath {
|
||||
switch self {
|
||||
case .muted:
|
||||
return \H264Encoder.muted
|
||||
case .width:
|
||||
return \H264Encoder.width
|
||||
case .height:
|
||||
return \H264Encoder.height
|
||||
case .bitrate:
|
||||
return \H264Encoder.bitrate
|
||||
case .enabledHardwareEncoder:
|
||||
return \H264Encoder.enabledHardwareEncoder
|
||||
case .maxKeyFrameIntervalDuration:
|
||||
return \H264Encoder.maxKeyFrameIntervalDuration
|
||||
case .scalingMode:
|
||||
return \H264Encoder.scalingMode
|
||||
case .profileLevel:
|
||||
return \H264Encoder.profileLevel
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static let defaultWidth: Int32 = 480
|
||||
public static let defaultHeight: Int32 = 272
|
||||
public static let defaultBitrate: UInt32 = 160 * 1024
|
||||
public static let defaultScalingMode: ScalingMode = .trim
|
||||
|
||||
#if os(iOS)
|
||||
static let defaultAttributes: [NSString: AnyObject] = [
|
||||
|
@ -37,10 +57,15 @@ final class H264Encoder: NSObject {
|
|||
kCVPixelBufferOpenGLCompatibilityKey: kCFBooleanTrue
|
||||
]
|
||||
#endif
|
||||
static let defaultDataRateLimits: [Int] = [0, 0]
|
||||
|
||||
@objc var muted: Bool = false
|
||||
@objc var scalingMode: String = H264Encoder.defaultScalingMode {
|
||||
public var settings: Setting<H264Encoder, Option> = [:] {
|
||||
didSet {
|
||||
settings.observer = self
|
||||
}
|
||||
}
|
||||
|
||||
var muted: Bool = false
|
||||
var scalingMode: ScalingMode = H264Encoder.defaultScalingMode {
|
||||
didSet {
|
||||
guard scalingMode != oldValue else {
|
||||
return
|
||||
|
@ -49,7 +74,7 @@ final class H264Encoder: NSObject {
|
|||
}
|
||||
}
|
||||
|
||||
@objc var width: Int32 = H264Encoder.defaultWidth {
|
||||
var width: Int32 = H264Encoder.defaultWidth {
|
||||
didSet {
|
||||
guard width != oldValue else {
|
||||
return
|
||||
|
@ -57,7 +82,7 @@ final class H264Encoder: NSObject {
|
|||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
@objc var height: Int32 = H264Encoder.defaultHeight {
|
||||
var height: Int32 = H264Encoder.defaultHeight {
|
||||
didSet {
|
||||
guard height != oldValue else {
|
||||
return
|
||||
|
@ -65,7 +90,7 @@ final class H264Encoder: NSObject {
|
|||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
@objc var enabledHardwareEncoder: Bool = true {
|
||||
var enabledHardwareEncoder: Bool = true {
|
||||
didSet {
|
||||
guard enabledHardwareEncoder != oldValue else {
|
||||
return
|
||||
|
@ -73,7 +98,7 @@ final class H264Encoder: NSObject {
|
|||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
@objc var bitrate: UInt32 = H264Encoder.defaultBitrate {
|
||||
var bitrate: UInt32 = H264Encoder.defaultBitrate {
|
||||
didSet {
|
||||
guard bitrate != oldValue else {
|
||||
return
|
||||
|
@ -81,20 +106,7 @@ final class H264Encoder: NSObject {
|
|||
setProperty(kVTCompressionPropertyKey_AverageBitRate, Int(bitrate) as CFTypeRef)
|
||||
}
|
||||
}
|
||||
|
||||
@objc var dataRateLimits: [Int] = H264Encoder.defaultDataRateLimits {
|
||||
didSet {
|
||||
guard dataRateLimits != oldValue else {
|
||||
return
|
||||
}
|
||||
if dataRateLimits == H264Encoder.defaultDataRateLimits {
|
||||
invalidateSession = true
|
||||
return
|
||||
}
|
||||
setProperty(kVTCompressionPropertyKey_DataRateLimits, dataRateLimits as CFTypeRef)
|
||||
}
|
||||
}
|
||||
@objc var profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String {
|
||||
var profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String {
|
||||
didSet {
|
||||
guard profileLevel != oldValue else {
|
||||
return
|
||||
|
@ -102,7 +114,7 @@ final class H264Encoder: NSObject {
|
|||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
@objc var maxKeyFrameIntervalDuration: Double = 2.0 {
|
||||
var maxKeyFrameIntervalDuration: Double = 2.0 {
|
||||
didSet {
|
||||
guard maxKeyFrameIntervalDuration != oldValue else {
|
||||
return
|
||||
|
@ -110,7 +122,6 @@ final class H264Encoder: NSObject {
|
|||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
|
||||
var locked: UInt32 = 0
|
||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.H264Encoder.lock")
|
||||
var expectedFPS: Float64 = AVMixer.defaultFPS {
|
||||
|
@ -131,7 +142,7 @@ final class H264Encoder: NSObject {
|
|||
}
|
||||
weak var delegate: VideoEncoderDelegate?
|
||||
|
||||
private(set) var isRunning: Atomic<Bool> = .init(false)
|
||||
public private(set) var isRunning: Atomic<Bool> = .init(false)
|
||||
private(set) var status: OSStatus = noErr
|
||||
private var attributes: [NSString: AnyObject] {
|
||||
var attributes: [NSString: AnyObject] = H264Encoder.defaultAttributes
|
||||
|
@ -153,10 +164,9 @@ final class H264Encoder: NSObject {
|
|||
kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration: NSNumber(value: maxKeyFrameIntervalDuration),
|
||||
kVTCompressionPropertyKey_AllowFrameReordering: !isBaseline as NSObject,
|
||||
kVTCompressionPropertyKey_PixelTransferProperties: [
|
||||
"ScalingMode": scalingMode
|
||||
"ScalingMode": scalingMode.rawValue
|
||||
] as NSObject
|
||||
]
|
||||
|
||||
#if os(OSX)
|
||||
if enabledHardwareEncoder {
|
||||
properties[kVTVideoEncoderSpecification_EncoderID] = "com.apple.videotoolbox.videoencoder.h264.gva" as NSObject
|
||||
|
@ -164,10 +174,6 @@ final class H264Encoder: NSObject {
|
|||
properties["RequireHardwareAcceleratedVideoEncoder"] = kCFBooleanTrue
|
||||
}
|
||||
#endif
|
||||
|
||||
if dataRateLimits != H264Encoder.defaultDataRateLimits {
|
||||
properties[kVTCompressionPropertyKey_DataRateLimits] = dataRateLimits as NSObject
|
||||
}
|
||||
if !isBaseline {
|
||||
properties[kVTCompressionPropertyKey_H264EntropyMode] = kVTH264EntropyMode_CABAC
|
||||
}
|
||||
|
@ -223,6 +229,10 @@ final class H264Encoder: NSObject {
|
|||
}
|
||||
}
|
||||
|
||||
init() {
|
||||
settings.observer = self
|
||||
}
|
||||
|
||||
func encodeImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
|
||||
guard isRunning.value && locked == 0 else {
|
||||
return
|
||||
|
@ -287,7 +297,7 @@ final class H264Encoder: NSObject {
|
|||
|
||||
extension H264Encoder: Running {
|
||||
// MARK: Running
|
||||
func startRunning() {
|
||||
public func startRunning() {
|
||||
lockQueue.async {
|
||||
self.isRunning.mutate { $0 = true }
|
||||
#if os(iOS)
|
||||
|
@ -307,7 +317,7 @@ extension H264Encoder: Running {
|
|||
}
|
||||
}
|
||||
|
||||
func stopRunning() {
|
||||
public func stopRunning() {
|
||||
lockQueue.async {
|
||||
self.session = nil
|
||||
self.lastImageBuffer = nil
|
||||
|
|
|
@ -11,7 +11,7 @@ protocol AVMixerDelegate: class {
|
|||
func didOutputVideo(_ buffer: CMSampleBuffer)
|
||||
}
|
||||
|
||||
public class AVMixer: NSObject {
|
||||
public class AVMixer {
|
||||
public static let bufferEmpty: Notification.Name = .init("AVMixerBufferEmpty")
|
||||
|
||||
public static let defaultFPS: Float64 = 30
|
||||
|
@ -19,48 +19,66 @@ public class AVMixer: NSObject {
|
|||
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA)
|
||||
]
|
||||
|
||||
#if os(iOS)
|
||||
static let supportedSettingsKeys: [String] = [
|
||||
"fps",
|
||||
"sessionPreset",
|
||||
"continuousAutofocus",
|
||||
"continuousExposure",
|
||||
"preferredVideoStabilizationMode"
|
||||
]
|
||||
#if os(iOS) || os(macOS)
|
||||
public enum Option: String, KeyPathRepresentable, CaseIterable {
|
||||
case fps
|
||||
case sessionPreset
|
||||
case continuousAutofocus
|
||||
case continuousExposure
|
||||
|
||||
@objc var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode {
|
||||
#if os(iOS)
|
||||
case preferredVideoStabilizationMode
|
||||
#endif
|
||||
|
||||
public var keyPath: AnyKeyPath {
|
||||
switch self {
|
||||
case .fps:
|
||||
return \AVMixer.fps
|
||||
case .sessionPreset:
|
||||
return \AVMixer.sessionPreset
|
||||
case .continuousAutofocus:
|
||||
return \AVMixer.continuousAutofocus
|
||||
case .continuousExposure:
|
||||
return \AVMixer.continuousExposure
|
||||
#if os(iOS)
|
||||
case .preferredVideoStabilizationMode:
|
||||
return \AVMixer.preferredVideoStabilizationMode
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
#else
|
||||
public struct Option: KeyPathRepresentable {
|
||||
public static var allCases: [AVMixer.Option] = []
|
||||
public var keyPath: AnyKeyPath
|
||||
public typealias AllCases = [Option]
|
||||
}
|
||||
#endif
|
||||
|
||||
#if os(iOS)
|
||||
var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode {
|
||||
get { return videoIO.preferredVideoStabilizationMode }
|
||||
set { videoIO.preferredVideoStabilizationMode = newValue }
|
||||
}
|
||||
#elseif os(macOS)
|
||||
static let supportedSettingsKeys: [String] = [
|
||||
"fps",
|
||||
"sessionPreset",
|
||||
"continuousAutofocus",
|
||||
"continuousExposure"
|
||||
]
|
||||
#else
|
||||
static let supportedSettingsKeys: [String] = [
|
||||
]
|
||||
#endif
|
||||
|
||||
#if os(iOS) || os(macOS)
|
||||
@objc var fps: Float64 {
|
||||
var fps: Float64 {
|
||||
get { return videoIO.fps }
|
||||
set { videoIO.fps = newValue }
|
||||
}
|
||||
|
||||
@objc var continuousExposure: Bool {
|
||||
var continuousExposure: Bool {
|
||||
get { return videoIO.continuousExposure }
|
||||
set { videoIO.continuousExposure = newValue }
|
||||
}
|
||||
|
||||
@objc var continuousAutofocus: Bool {
|
||||
var continuousAutofocus: Bool {
|
||||
get { return videoIO.continuousAutofocus }
|
||||
set { videoIO.continuousAutofocus = newValue }
|
||||
}
|
||||
|
||||
@objc var sessionPreset: AVCaptureSession.Preset = .default {
|
||||
var sessionPreset: AVCaptureSession.Preset = .default {
|
||||
didSet {
|
||||
guard sessionPreset != oldValue else {
|
||||
return
|
||||
|
@ -86,6 +104,12 @@ public class AVMixer: NSObject {
|
|||
}
|
||||
#endif
|
||||
|
||||
var settings: Setting<AVMixer, Option> = [:] {
|
||||
didSet {
|
||||
settings.observer = self
|
||||
}
|
||||
}
|
||||
|
||||
weak var delegate: AVMixerDelegate?
|
||||
|
||||
private var _recorder: AVRecorder?
|
||||
|
@ -117,6 +141,10 @@ public class AVMixer: NSObject {
|
|||
dispose()
|
||||
}
|
||||
|
||||
public init() {
|
||||
settings.observer = self
|
||||
}
|
||||
|
||||
public func dispose() {
|
||||
#if os(iOS) || os(macOS)
|
||||
if let session = _session, session.isRunning {
|
||||
|
|
|
@ -69,52 +69,30 @@ open class NetStream: NSObject {
|
|||
}
|
||||
#endif
|
||||
|
||||
open var audioSettings: [String: Any] {
|
||||
open var audioSettings: Setting<AudioConverter, AudioConverter.Option> {
|
||||
get {
|
||||
var audioSettings: [String: Any]!
|
||||
ensureLockQueue {
|
||||
audioSettings = self.mixer.audioIO.encoder.dictionaryWithValues(forKeys: AudioConverter.supportedSettingsKeys)
|
||||
}
|
||||
return audioSettings
|
||||
return mixer.audioIO.encoder.settings
|
||||
}
|
||||
set {
|
||||
ensureLockQueue {
|
||||
self.mixer.audioIO.encoder.setValuesForKeys(newValue)
|
||||
}
|
||||
mixer.audioIO.encoder.settings = newValue
|
||||
}
|
||||
}
|
||||
|
||||
open var videoSettings: [String: Any] {
|
||||
open var videoSettings: Setting<H264Encoder, H264Encoder.Option> {
|
||||
get {
|
||||
var videoSettings: [String: Any]!
|
||||
ensureLockQueue {
|
||||
videoSettings = self.mixer.videoIO.encoder.dictionaryWithValues(forKeys: H264Encoder.supportedSettingsKeys)
|
||||
}
|
||||
return videoSettings
|
||||
return mixer.videoIO.encoder.settings
|
||||
}
|
||||
set {
|
||||
if DispatchQueue.getSpecific(key: NetStream.queueKey) == NetStream.queueValue {
|
||||
self.mixer.videoIO.encoder.setValuesForKeys(newValue)
|
||||
} else {
|
||||
ensureLockQueue {
|
||||
self.mixer.videoIO.encoder.setValuesForKeys(newValue)
|
||||
}
|
||||
}
|
||||
mixer.videoIO.encoder.settings = newValue
|
||||
}
|
||||
}
|
||||
|
||||
open var captureSettings: [String: Any] {
|
||||
open var captureSettings: Setting<AVMixer, AVMixer.Option> {
|
||||
get {
|
||||
var captureSettings: [String: Any]!
|
||||
ensureLockQueue {
|
||||
captureSettings = self.mixer.dictionaryWithValues(forKeys: AVMixer.supportedSettingsKeys)
|
||||
}
|
||||
return captureSettings
|
||||
return mixer.settings
|
||||
}
|
||||
set {
|
||||
ensureLockQueue {
|
||||
self.mixer.setValuesForKeys(newValue)
|
||||
}
|
||||
mixer.settings = newValue
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,150 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
public protocol KeyPathRepresentable: Hashable, CaseIterable {
|
||||
var keyPath: AnyKeyPath { get }
|
||||
}
|
||||
|
||||
public class Setting<T: AnyObject, Key: KeyPathRepresentable>: ExpressibleByDictionaryLiteral {
|
||||
public typealias Key = Key
|
||||
public typealias Value = Any
|
||||
|
||||
weak var observer: T? {
|
||||
didSet {
|
||||
for (key, value) in elements {
|
||||
self[key] = value
|
||||
}
|
||||
elements.removeAll()
|
||||
}
|
||||
}
|
||||
|
||||
private var elements: [(Key, Any)] = []
|
||||
|
||||
public required init(dictionaryLiteral elements: (Key, Any)...) {
|
||||
self.elements = elements
|
||||
}
|
||||
|
||||
public subscript(key: Key) -> Any? {
|
||||
get {
|
||||
return observer?[keyPath: key.keyPath]
|
||||
}
|
||||
set {
|
||||
switch key.keyPath {
|
||||
case let path as ReferenceWritableKeyPath<T, Bool>:
|
||||
if let newValue = newValue as? Bool {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
case let path as ReferenceWritableKeyPath<T, UInt32>:
|
||||
if let newValue = toUInt32(value: newValue) {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
case let path as ReferenceWritableKeyPath<T, Int32>:
|
||||
if let newValue = toInt32(value: newValue) {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
case let path as ReferenceWritableKeyPath<T, Double>:
|
||||
if let newValue = toDouble(value: newValue) {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
case let path as ReferenceWritableKeyPath<T, String>:
|
||||
if let newValue = newValue as? String {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
case let path as ReferenceWritableKeyPath<T, ScalingMode>:
|
||||
if let newValue = newValue as? ScalingMode {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
#if os(iOS)
|
||||
case let path as ReferenceWritableKeyPath<T, AVCaptureVideoStabilizationMode>:
|
||||
if let newValue = newValue as? AVCaptureVideoStabilizationMode {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
#endif
|
||||
#if !os(tvOS)
|
||||
case let path as ReferenceWritableKeyPath<T, AVCaptureSession.Preset>:
|
||||
if let newValue = newValue as? AVCaptureSession.Preset {
|
||||
observer?[keyPath: path] = newValue
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func toDouble(value: Any?) -> Double? {
|
||||
switch value {
|
||||
case let value as Float:
|
||||
return Double(value)
|
||||
case let value as Double:
|
||||
return value
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private func toUInt32(value: Any?) -> UInt32? {
|
||||
switch value {
|
||||
case let value as Int:
|
||||
return numericCast(value)
|
||||
case let value as Int8:
|
||||
return numericCast(value)
|
||||
case let value as Int16:
|
||||
return numericCast(value)
|
||||
case let value as Int32:
|
||||
return numericCast(value)
|
||||
case let value as Int64:
|
||||
return numericCast(value)
|
||||
case let value as UInt:
|
||||
return numericCast(value)
|
||||
case let value as UInt8:
|
||||
return numericCast(value)
|
||||
case let value as UInt16:
|
||||
return numericCast(value)
|
||||
case let value as UInt32:
|
||||
return value
|
||||
case let value as UInt64:
|
||||
return numericCast(value)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private func toInt32(value: Any?) -> Int32? {
|
||||
switch value {
|
||||
case let value as Int:
|
||||
return numericCast(value)
|
||||
case let value as Int8:
|
||||
return numericCast(value)
|
||||
case let value as Int16:
|
||||
return numericCast(value)
|
||||
case let value as Int32:
|
||||
return value
|
||||
case let value as Int64:
|
||||
return numericCast(value)
|
||||
case let value as UInt:
|
||||
return numericCast(value)
|
||||
case let value as UInt8:
|
||||
return numericCast(value)
|
||||
case let value as UInt16:
|
||||
return numericCast(value)
|
||||
case let value as UInt32:
|
||||
return numericCast(value)
|
||||
case let value as UInt64:
|
||||
return numericCast(value)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension Setting: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
public var debugDescription: String {
|
||||
var data: [Key: Any] = [:]
|
||||
for key in Key.allCases {
|
||||
data[key] = observer?[keyPath: key.keyPath]
|
||||
}
|
||||
return data.description
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
import Foundation
|
||||
import XCTest
|
||||
import AVFoundation
|
||||
|
||||
@testable import HaishinKit
|
||||
|
||||
final class SettingTests: XCTestCase {
|
||||
func testH264Encoder() {
|
||||
let encoder = H264Encoder()
|
||||
XCTAssertEqual(encoder.settings[.muted] as? Bool, encoder.muted)
|
||||
XCTAssertEqual(encoder.settings[.width] as? Int32, encoder.width)
|
||||
XCTAssertEqual(encoder.settings[.height] as? Int32, encoder.height)
|
||||
XCTAssertEqual(encoder.settings[.profileLevel] as? String, encoder.profileLevel)
|
||||
XCTAssertEqual(encoder.settings[.scalingMode] as? ScalingMode, encoder.scalingMode)
|
||||
XCTAssertEqual(encoder.settings[.maxKeyFrameIntervalDuration] as? Double, encoder.maxKeyFrameIntervalDuration)
|
||||
|
||||
encoder.settings[.width] = Int8(100)
|
||||
XCTAssertEqual(100, encoder.width)
|
||||
|
||||
encoder.settings[.scalingMode] = ScalingMode.letterbox
|
||||
XCTAssertEqual(encoder.settings[.scalingMode] as? ScalingMode, ScalingMode.letterbox)
|
||||
|
||||
encoder.settings[.maxKeyFrameIntervalDuration] = Float(5.0)
|
||||
XCTAssertEqual(5.0, encoder.maxKeyFrameIntervalDuration)
|
||||
}
|
||||
|
||||
func testAVMixer() {
|
||||
let mixier = AVMixer()
|
||||
XCTAssertEqual(mixier.settings[.fps] as? Float64, mixier.fps)
|
||||
XCTAssertEqual(mixier.settings[.continuousAutofocus] as? Bool, mixier.continuousAutofocus)
|
||||
XCTAssertEqual(mixier.settings[.continuousExposure] as? Bool, mixier.continuousExposure)
|
||||
XCTAssertEqual(mixier.settings[.sessionPreset] as? AVCaptureSession.Preset, mixier.sessionPreset)
|
||||
|
||||
mixier.settings[.sessionPreset] = AVCaptureSession.Preset.high
|
||||
XCTAssertEqual(AVCaptureSession.Preset.high, mixier.sessionPreset)
|
||||
|
||||
mixier.settings = [
|
||||
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
|
||||
.continuousAutofocus: false,
|
||||
.continuousExposure: false,
|
||||
]
|
||||
XCTAssertEqual(false, mixier.continuousAutofocus)
|
||||
XCTAssertEqual(false, mixier.continuousExposure)
|
||||
XCTAssertEqual(AVCaptureSession.Preset.hd1280x720, mixier.sessionPreset)
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue