Compliant codable for VideoCodecSettings and AudioCodecSettings.

This commit is contained in:
shogo4405 2023-02-16 21:29:51 +09:00
parent 67db4b55fd
commit c2b37f416f
24 changed files with 238 additions and 625 deletions

View File

@ -32,7 +32,7 @@ final class LiveViewController: UIViewController {
pipIntentView.layer.borderWidth = 1.0
pipIntentView.layer.borderColor = UIColor.white.cgColor
pipIntentView.bounds = MultiCamCaptureSetting.default.regionOfInterest
pipIntentView.bounds = MultiCamCaptureSettings.default.regionOfInterest
pipIntentView.isUserInteractionEnabled = true
view.addSubview(pipIntentView)
@ -40,14 +40,10 @@ final class LiveViewController: UIViewController {
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.videoOrientation = orientation
}
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.videoSettings.videoSize = .init(width: 720, height: 1280)
rtmpStream.mixer.recorder.delegate = self
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
}
@ -107,10 +103,11 @@ final class LiveViewController: UIViewController {
currentFrame.origin.x += deltaX
currentFrame.origin.y += deltaY
pipIntentView.frame = currentFrame
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSetting(
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSettings(
mode: rtmpStream.multiCamCaptureSettings.mode,
cornerRadius: 16.0,
regionOfInterest: currentFrame
regionOfInterest: currentFrame,
direction: .east
)
}
}
@ -138,11 +135,11 @@ final class LiveViewController: UIViewController {
@IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
rtmpStream.audioSettings.bitRate = UInt32(slider.value * 1000)
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
rtmpStream.videoSettings.bitRate = UInt32(slider.value * 1000)
}
if slider == zoomSlider {
let zoomFactor = CGFloat(slider.value)

View File

@ -43,11 +43,7 @@ open class SampleHandler: RPBroadcastSampleHandler {
case .video:
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [
.width: dimensions.width,
.height: dimensions.height,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
rtmpStream.videoSettings.videoSize = .init(width: dimensions.width, height: dimensions.height)
}
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
case .audioMic:

View File

@ -65,10 +65,7 @@ final class ViewModel: ObservableObject {
rtmpStream.videoOrientation = orientation
}
rtmpStream.sessionPreset = .hd1280x720
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.videoSettings.videoSize = .init(width: 720, height: 1280)
rtmpStream.mixer.recorder.delegate = self
nc.publisher(for: UIDevice.orientationDidChangeNotification, object: nil)
@ -195,11 +192,11 @@ final class ViewModel: ObservableObject {
}
func changeVideoRate(level: CGFloat) {
rtmpStream.videoSettings[.bitrate] = level * 1000
rtmpStream.videoSettings.bitRate = UInt32(level * 1000)
}
func changeAudioRate(level: CGFloat) {
rtmpStream.audioSettings[.bitrate] = level * 1000
rtmpStream.audioSettings.bitRate = UInt32(level * 1000)
}
@objc

View File

@ -55,9 +55,6 @@
2942A4FA21A9418A004E1BEE /* Running.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2942A4F721A9418A004E1BEE /* Running.swift */; };
2942EF841DFF4D06008E620C /* HaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; };
2942EF861DFF4D3C008E620C /* HaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
2943ED53232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
2943ED54232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
2943ED55232FCA7C00ED6301 /* Setting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2943ED52232FCA7C00ED6301 /* Setting.swift */; };
294637A41EC8961C008EEC71 /* RTMPReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294637A31EC8961C008EEC71 /* RTMPReaderTests.swift */; };
294637A81EC89BC9008EEC71 /* Config.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294637A71EC89BC9008EEC71 /* Config.swift */; };
294637AA1EC8A79F008EEC71 /* SampleVideo_360x240_5mb.flv in Resources */ = {isa = PBXBuildFile; fileRef = 294637A91EC8A79F008EEC71 /* SampleVideo_360x240_5mb.flv */; };
@ -321,9 +318,9 @@
BC110253292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */; };
BC110254292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */; };
BC110255292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */; };
BC110257292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSetting.swift */; };
BC110258292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSetting.swift */; };
BC110259292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSetting.swift */; };
BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */; };
BC110258292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */; };
BC110259292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */; };
BC11D94625A1B01000D710BA /* Screencast.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 2915EC521D85BDF100621092 /* Screencast.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
BC20DF38250377A3007BC608 /* IOUIScreenCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = 299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */; };
BC3004CE296B0A1700119932 /* Shape.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3004CD296B0A1700119932 /* Shape.swift */; };
@ -393,9 +390,15 @@
BC7A23F525171C8F0089F77C /* MTHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2999C3742071138F00892E55 /* MTHKView.swift */; };
BC7A23F625171C8F0089F77C /* MTHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2999C3742071138F00892E55 /* MTHKView.swift */; };
BC7C56892995082700C41A9B /* NetStreamTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56882995082700C41A9B /* NetStreamTests.swift */; };
BC7C568C299538CF00C41A9B /* H264Profile.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C568A299526F800C41A9B /* H264Profile.swift */; };
BC7C568D299538D000C41A9B /* H264Profile.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C568A299526F800C41A9B /* H264Profile.swift */; };
BC7C568E299538D000C41A9B /* H264Profile.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C568A299526F800C41A9B /* H264Profile.swift */; };
BC7C56B7299E579F00C41A9B /* AudioCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */; };
BC7C56B8299E579F00C41A9B /* AudioCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */; };
BC7C56B9299E579F00C41A9B /* AudioCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */; };
BC7C56BB299E595000C41A9B /* VideoCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BA299E595000C41A9B /* VideoCodecSettings.swift */; };
BC7C56BC299E595000C41A9B /* VideoCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BA299E595000C41A9B /* VideoCodecSettings.swift */; };
BC7C56BD299E595000C41A9B /* VideoCodecSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BA299E595000C41A9B /* VideoCodecSettings.swift */; };
BC7C56BF299FC38D00C41A9B /* VideoSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BE299FC38D00C41A9B /* VideoSize.swift */; };
BC7C56C0299FC38D00C41A9B /* VideoSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BE299FC38D00C41A9B /* VideoSize.swift */; };
BC7C56C1299FC38D00C41A9B /* VideoSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC7C56BE299FC38D00C41A9B /* VideoSize.swift */; };
BC83A4732403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */; };
BC83A4742403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */; };
BC83A4752403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */; };
@ -588,7 +591,6 @@
2941746A22D069B300A2944F /* AudioEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioEffect.swift; sourceTree = "<group>"; };
2942424C1CF4C01300D65DCB /* MD5.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MD5.swift; sourceTree = "<group>"; };
2942A4F721A9418A004E1BEE /* Running.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Running.swift; sourceTree = "<group>"; };
2943ED52232FCA7C00ED6301 /* Setting.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Setting.swift; sourceTree = "<group>"; };
2945CBBD1B4BE66000104112 /* HaishinKit.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = HaishinKit.framework; sourceTree = BUILT_PRODUCTS_DIR; };
294637A31EC8961C008EEC71 /* RTMPReaderTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPReaderTests.swift; sourceTree = "<group>"; };
294637A71EC89BC9008EEC71 /* Config.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Config.swift; sourceTree = "<group>"; };
@ -719,7 +721,7 @@
BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CVPixelBufferPool+Extension.swift"; sourceTree = "<group>"; };
BC1102492925147300D48035 /* IOCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOCaptureUnit.swift; sourceTree = "<group>"; };
BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "vImage_Buffer+Extension.swift"; sourceTree = "<group>"; };
BC110256292E661E00D48035 /* MultiCamCaptureSetting.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultiCamCaptureSetting.swift; sourceTree = "<group>"; };
BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultiCamCaptureSettings.swift; sourceTree = "<group>"; };
BC3004CD296B0A1700119932 /* Shape.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Shape.swift; sourceTree = "<group>"; };
BC3004D3296BFFF600119932 /* MainSplitViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MainSplitViewController.swift; sourceTree = "<group>"; };
BC3004F0296C0C7400119932 /* MenuViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MenuViewController.swift; sourceTree = "<group>"; };
@ -744,7 +746,9 @@
BC6FC91D29609A6800A746EE /* ShapeFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShapeFactory.swift; sourceTree = "<group>"; };
BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "vImage_CGImageFormat+Extension.swift"; sourceTree = "<group>"; };
BC7C56882995082700C41A9B /* NetStreamTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamTests.swift; sourceTree = "<group>"; };
BC7C568A299526F800C41A9B /* H264Profile.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = H264Profile.swift; sourceTree = "<group>"; };
BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecSettings.swift; sourceTree = "<group>"; };
BC7C56BA299E595000C41A9B /* VideoCodecSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoCodecSettings.swift; sourceTree = "<group>"; };
BC7C56BE299FC38D00C41A9B /* VideoSize.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoSize.swift; sourceTree = "<group>"; };
BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTCompressionSession+Extension.swift"; sourceTree = "<group>"; };
BC959EEE296EE4190067BA97 /* ImageTransform.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageTransform.swift; sourceTree = "<group>"; };
BC959F0D29705B1B0067BA97 /* SCStreamPublishViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SCStreamPublishViewController.swift; sourceTree = "<group>"; };
@ -865,8 +869,10 @@
29B876571CD70A7900FC07DA /* AudioCodec.swift */,
BC44A1A823D31E92002D4297 /* AudioCodecBuffer.swift */,
297E69112324E38800D418AB /* AudioCodecFormat.swift */,
BC7C568A299526F800C41A9B /* H264Profile.swift */,
BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */,
29B876591CD70A7900FC07DA /* VideoCodec.swift */,
BC7C56BA299E595000C41A9B /* VideoCodecSettings.swift */,
BC7C56BE299FC38D00C41A9B /* VideoSize.swift */,
BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */,
BC4914B528DEC2FE009E2DF6 /* VTSessionMode.swift */,
BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */,
@ -888,7 +894,6 @@
2942424C1CF4C01300D65DCB /* MD5.swift */,
294B2D3123785E3800CE7BDC /* RingBuffer.swift */,
2942A4F721A9418A004E1BEE /* Running.swift */,
2943ED52232FCA7C00ED6301 /* Setting.swift */,
);
path = Util;
sourceTree = "<group>";
@ -1190,7 +1195,7 @@
29B8768E1CD70AFE00FC07DA /* IOVideoUnit.swift */,
2901A4ED1D437170002BBD23 /* MediaLink.swift */,
2999C3742071138F00892E55 /* MTHKView.swift */,
BC110256292E661E00D48035 /* MultiCamCaptureSetting.swift */,
BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */,
BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */,
295891001EEB7A8B00CE51E1 /* ScalingMode.swift */,
BC3004CD296B0A1700119932 /* Shape.swift */,
@ -1833,7 +1838,6 @@
BCB9773F2621812800C9A649 /* AVCFormatStream.swift in Sources */,
295891011EEB7A8B00CE51E1 /* ScalingMode.swift in Sources */,
BC83A4732403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */,
2943ED53232FCA7C00ED6301 /* Setting.swift in Sources */,
BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */,
2915EC4D1D85BB8C00621092 /* RTMPTSocket.swift in Sources */,
BC11023E2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */,
@ -1877,7 +1881,7 @@
BC34FA0B286CB90A00EFAF27 /* PiPHKView.swift in Sources */,
293B42E92340B4840086F973 /* RTMPObjectEncoding.swift in Sources */,
2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */,
BC110257292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */,
BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */,
29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */,
2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */,
BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */,
@ -1894,7 +1898,6 @@
BC562DCB29576D220048D89A /* AVCaptureSession.Preset+Extension.swift in Sources */,
29B876B51CD70B2800FC07DA /* RTMPSocket.swift in Sources */,
29B876AB1CD70B2800FC07DA /* AMF0Serializer.swift in Sources */,
BC7C568E299538D000C41A9B /* H264Profile.swift in Sources */,
29B8765B1CD70A7900FC07DA /* AudioCodec.swift in Sources */,
29EA87D51E799F670043A5F8 /* Mirror+Extension.swift in Sources */,
297E69122324E38800D418AB /* AudioCodecFormat.swift in Sources */,
@ -1915,15 +1918,18 @@
29B876781CD70ACE00FC07DA /* HTTPService.swift in Sources */,
BC570B4828E9ACC10098A12C /* IOUnit.swift in Sources */,
2976A4861D4903C300B53EF2 /* DeviceUtil.swift in Sources */,
BC7C56BB299E595000C41A9B /* VideoCodecSettings.swift in Sources */,
29B876881CD70AE800FC07DA /* TSPacket.swift in Sources */,
29B876BE1CD70B3900FC07DA /* EventDispatcher.swift in Sources */,
29B8769D1CD70B1100FC07DA /* NetService.swift in Sources */,
BC7C56BF299FC38D00C41A9B /* VideoSize.swift in Sources */,
29B8769E1CD70B1100FC07DA /* NetSocket.swift in Sources */,
2958911A1EEB8E3F00CE51E1 /* FLVAudioCodec.swift in Sources */,
BC4914B628DEC2FE009E2DF6 /* VTSessionMode.swift in Sources */,
295891261EEB8EF300CE51E1 /* FLVAACPacket.swift in Sources */,
29B876791CD70ACE00FC07DA /* HTTPStream.swift in Sources */,
BC6FC91E29609A6800A746EE /* ShapeFactory.swift in Sources */,
BC7C56B7299E579F00C41A9B /* AudioCodecSettings.swift in Sources */,
29B876AC1CD70B2800FC07DA /* AMF3Serializer.swift in Sources */,
2916196C1E7F0768009FB344 /* CMFormatDescription+Extension.swift in Sources */,
BCB976DF26107B5600C9A649 /* TSField.swift in Sources */,
@ -1977,14 +1983,12 @@
buildActionMask = 2147483647;
files = (
29B876EC1CD70D5900FC07DA /* AudioCodec.swift in Sources */,
BC7C568D299538D000C41A9B /* H264Profile.swift in Sources */,
BCB977402621812800C9A649 /* AVCFormatStream.swift in Sources */,
29B876EE1CD70D5900FC07DA /* VideoCodec.swift in Sources */,
BCA2252D293CC5B600DD7CB2 /* IOScreenCaptureUnit.swift in Sources */,
29EA87EB1E79A3B70043A5F8 /* CMBlockBuffer+Extension.swift in Sources */,
29B876F01CD70D5900FC07DA /* Constants.swift in Sources */,
29EA87D91E79A0090043A5F8 /* URL+Extension.swift in Sources */,
2943ED54232FCA7C00ED6301 /* Setting.swift in Sources */,
292AC17C1CF4C871004F5730 /* MD5.swift in Sources */,
2958910B1EEB8D1800CE51E1 /* FLVReader.swift in Sources */,
29B876F41CD70D5900FC07DA /* DataConvertible.swift in Sources */,
@ -2005,6 +2009,7 @@
BC11024B2925147300D48035 /* IOCaptureUnit.swift in Sources */,
29B876FA1CD70D5900FC07DA /* M3U.swift in Sources */,
29B876FD1CD70D5A00FC07DA /* AudioSpecificConfig.swift in Sources */,
BC7C56C0299FC38D00C41A9B /* VideoSize.swift in Sources */,
2958911F1EEB8E9600CE51E1 /* FLVSoundRate.swift in Sources */,
2941746C22D069B300A2944F /* AudioEffect.swift in Sources */,
BC9CFA9423BDE8B700917EEF /* NetStreamDrawable.swift in Sources */,
@ -2014,7 +2019,7 @@
294852571D852499002DE492 /* RTMPTSocket.swift in Sources */,
BC83A4742403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */,
BCC1A72C264FAC1800661156 /* ESSpecificData.swift in Sources */,
BC110258292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */,
BC110258292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */,
29B877001CD70D5A00FC07DA /* PacketizedElementaryStream.swift in Sources */,
BC570B4928E9ACC10098A12C /* IOUnit.swift in Sources */,
BC566F6F25D2ECC500573C4C /* HLSService.swift in Sources */,
@ -2078,6 +2083,7 @@
29B8771B1CD70D5A00FC07DA /* ByteArray.swift in Sources */,
295891231EEB8EC500CE51E1 /* FLVAVCPacketType.swift in Sources */,
29EA87DA1E79A00E0043A5F8 /* ExpressibleByIntegerLiteral+Extension.swift in Sources */,
BC7C56B8299E579F00C41A9B /* AudioCodecSettings.swift in Sources */,
29D0E3681DD4CE3700863B3B /* AnyUtil.swift in Sources */,
29B8771C1CD70D5A00FC07DA /* CRC32.swift in Sources */,
2958912B1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */,
@ -2085,6 +2091,7 @@
29B8771D1CD70D5A00FC07DA /* EventDispatcher.swift in Sources */,
BC4914AF28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift in Sources */,
2901A4EF1D437662002BBD23 /* MediaLink.swift in Sources */,
BC7C56BC299E595000C41A9B /* VideoCodecSettings.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -2146,6 +2153,7 @@
29EB3E1A1ED0589B001CAE8B /* NetStream.swift in Sources */,
29EB3E211ED059FB001CAE8B /* RTMPHandshake.swift in Sources */,
29EB3DF41ED05776001CAE8B /* CMBlockBuffer+Extension.swift in Sources */,
BC7C56B9299E579F00C41A9B /* AudioCodecSettings.swift in Sources */,
29EB3DF01ED05768001CAE8B /* VideoCodec.swift in Sources */,
29EB3E351ED05A33001CAE8B /* DeviceUtil.swift in Sources */,
29DC17B521D0CC0600E26CED /* Atomic.swift in Sources */,
@ -2172,9 +2180,8 @@
BCC1A72D264FAC1800661156 /* ESSpecificData.swift in Sources */,
BCB976E126107B5600C9A649 /* TSField.swift in Sources */,
BC4914B028DDF445009E2DF6 /* VTDecompressionSession+Extension.swift in Sources */,
BC7C568C299538CF00C41A9B /* H264Profile.swift in Sources */,
BC4914A828DDD367009E2DF6 /* VTSessionOption.swift in Sources */,
BC110259292E661E00D48035 /* MultiCamCaptureSetting.swift in Sources */,
BC110259292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */,
BC4914A428DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */,
2941746D22D069B300A2944F /* AudioEffect.swift in Sources */,
29EB3E151ED0588C001CAE8B /* VideoEffect.swift in Sources */,
@ -2182,6 +2189,7 @@
29EB3DF71ED05797001CAE8B /* URL+Extension.swift in Sources */,
29DF20682312A436004057C3 /* RTMPSocketCompatible.swift in Sources */,
29EB3E0B1ED05871001CAE8B /* TSReader.swift in Sources */,
BC7C56BD299E595000C41A9B /* VideoCodecSettings.swift in Sources */,
297E69142324E38800D418AB /* AudioCodecFormat.swift in Sources */,
29EB3DF51ED05779001CAE8B /* CMFormatDescription+Extension.swift in Sources */,
BC110255292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */,
@ -2207,11 +2215,11 @@
29EB3E011ED05856001CAE8B /* HTTPStream.swift in Sources */,
29EB3E171ED05893001CAE8B /* NetClient.swift in Sources */,
BC0D236F26331BAB001DDA0C /* DataBuffer.swift in Sources */,
BC7C56C1299FC38D00C41A9B /* VideoSize.swift in Sources */,
BCB977412621812800C9A649 /* AVCFormatStream.swift in Sources */,
BC4C9EB123F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */,
2958912C1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */,
295891241EEB8EC500CE51E1 /* FLVAVCPacketType.swift in Sources */,
2943ED55232FCA7C00ED6301 /* Setting.swift in Sources */,
BC3004D0296B0A1700119932 /* Shape.swift in Sources */,
29EB3DF31ED05773001CAE8B /* CMAudioFormatDescription+Extension.swift in Sources */,
29EB3DFC1ED057AC001CAE8B /* HTTPRequest.swift in Sources */,

View File

@ -16,37 +16,13 @@ public protocol AudioCodecDelegate: AnyObject {
* - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
*/
public class AudioCodec {
/// The AudioCodec error domain codes.
enum Error: Swift.Error {
case setPropertyError(id: AudioConverterPropertyID, status: OSStatus)
}
/**
* The audio encoding or decoding options.
*/
public enum Option: String, KeyPathRepresentable {
/// Specifies the bitRate of audio output.
case bitrate
/// Specifies the sampleRate of audio output.
case sampleRate
/// The bitRate of audio output.
case actualBitrate
public var keyPath: AnyKeyPath {
switch self {
case .bitrate:
return \AudioCodec.bitrate
case .sampleRate:
return \AudioCodec.sampleRate
case .actualBitrate:
return \AudioCodec.actualBitrate
}
}
}
/// The default minimum bitrate for an AudioCodec, value is 8000.
public static let minimumBitrate: UInt32 = 8 * 1000
/// The default bitrate for an AudioCidec, the value is 32000.
public static let defaultBitrate: UInt32 = 32 * 1000
/// The default channels for an AudioCodec, the value is 0 means according to a input source.
public static let defaultChannels: UInt32 = 0
/// The default sampleRate for an AudioCodec, the value is 0 means according to a input source.
@ -54,33 +30,28 @@ public class AudioCodec {
/// The default mamimu buffers for an AudioCodec.
public static let defaultMaximumBuffers: Int = 1
private static let numSamples: Int = 1024
/// Specifies the output format.
public var destination: AudioCodecFormat = .aac
/// Specifies the delegate.
public weak var delegate: AudioCodecDelegate?
public private(set) var isRunning: Atomic<Bool> = .init(false)
/// Specifies the settings for audio codec.
public var settings: Setting<AudioCodec, Option> = [:] {
public var settings: AudioCodecSettings = .default {
didSet {
settings.observer = self
}
}
private static let numSamples: Int = 1024
var bitrate: UInt32 = AudioCodec.defaultBitrate {
didSet {
guard bitrate != oldValue else {
return
}
lockQueue.async {
if let format = self._inDestinationFormat {
self.setBitrateUntilNoErr(self.bitrate * format.mChannelsPerFrame)
if settings.bitRate != oldValue.bitRate {
lockQueue.async {
if let format = self._inDestinationFormat {
self.setBitrateUntilNoErr(self.settings.bitRate * format.mChannelsPerFrame)
}
}
}
}
}
var sampleRate: Double = AudioCodec.defaultSampleRate
var actualBitrate: UInt32 = AudioCodec.defaultBitrate {
var actualBitrate: UInt32 = AudioCodecSettings.default.bitRate {
didSet {
logger.info(actualBitrate)
}
@ -146,11 +117,6 @@ public class AudioCodec {
)
}
/// Create an AudioCodec instance.
public init() {
settings.observer = self
}
private var _converter: AudioConverterRef?
private var converter: AudioConverterRef {
var status: OSStatus = noErr
@ -163,7 +129,7 @@ public class AudioCodec {
&inClassDescriptions,
&_converter
)
setBitrateUntilNoErr(bitrate * inDestinationFormat.mChannelsPerFrame)
setBitrateUntilNoErr(settings.bitRate * inDestinationFormat.mChannelsPerFrame)
}
if status != noErr {
logger.warn("\(status)")

View File

@ -0,0 +1,15 @@
import Foundation
/// The AudioCodecSettings class specifying audio compression settings.
public struct AudioCodecSettings: Codable {
/// The defualt value.
public static let `default` = AudioCodecSettings()
/// Specifies the bitRate of audio output.
public var bitRate: UInt32 = 32 * 1000
/// Create an new AudioCodecSettings instance.
public init(bitRate: UInt32 = 32 * 1000) {
self.bitRate = bitRate
}
}

View File

@ -1,102 +0,0 @@
import Foundation
import VideoToolbox
/// The type of VideoCodec supports H264 profiles.
/// - Notes: For flutter plugin.
public enum H264Profile: String {
/// Baseline Profile.
case baseline
/// Main Profile.
case main
/// High Profile.
case high
func CFString(_ level: H264Level) -> CFString {
switch self {
case .baseline:
switch level {
case .auto:
return kVTProfileLevel_H264_Baseline_AutoLevel
case .level3_0:
return kVTProfileLevel_H264_Baseline_3_0
case .level3_1:
return kVTProfileLevel_H264_Baseline_3_1
case .level3_2:
return kVTProfileLevel_H264_Baseline_3_2
case .level4_0:
return kVTProfileLevel_H264_Baseline_4_0
case .level4_1:
return kVTProfileLevel_H264_Baseline_4_1
case .level4_2:
return kVTProfileLevel_H264_Baseline_4_2
case .level5_0:
return kVTProfileLevel_H264_Baseline_5_0
case .level5_1:
return kVTProfileLevel_H264_Baseline_5_1
case .level5_2:
return kVTProfileLevel_H264_Baseline_5_2
}
case .main:
switch level {
case .auto:
return kVTProfileLevel_H264_Main_AutoLevel
case .level3_0:
return kVTProfileLevel_H264_Main_3_0
case .level3_1:
return kVTProfileLevel_H264_Main_3_1
case .level3_2:
return kVTProfileLevel_H264_Main_3_2
case .level4_0:
return kVTProfileLevel_H264_Main_4_0
case .level4_1:
return kVTProfileLevel_H264_Main_4_1
case .level4_2:
return kVTProfileLevel_H264_Main_4_2
case .level5_0:
return kVTProfileLevel_H264_Main_5_0
case .level5_1:
return kVTProfileLevel_H264_Main_5_1
case .level5_2:
return kVTProfileLevel_H264_Main_5_2
}
case .high:
switch level {
case .auto:
return kVTProfileLevel_H264_High_AutoLevel
case .level3_0:
return kVTProfileLevel_H264_High_3_0
case .level3_1:
return kVTProfileLevel_H264_High_3_1
case .level3_2:
return kVTProfileLevel_H264_High_3_2
case .level4_0:
return kVTProfileLevel_H264_High_4_0
case .level4_1:
return kVTProfileLevel_H264_High_4_1
case .level4_2:
return kVTProfileLevel_H264_High_4_2
case .level5_0:
return kVTProfileLevel_H264_High_5_0
case .level5_1:
return kVTProfileLevel_H264_High_5_1
case .level5_2:
return kVTProfileLevel_H264_High_5_2
}
}
}
}
/// The type of VideoCodec supports profile levels.
/// - Note: For flutter plugin.
public enum H264Level {
case auto
case level3_0
case level3_1
case level3_2
case level4_0
case level4_1
case level4_2
case level5_0
case level5_1
case level5_2
}

View File

@ -11,8 +11,8 @@ enum VTSessionMode {
var session: VTCompressionSession?
var status = VTCompressionSessionCreate(
allocator: kCFAllocatorDefault,
width: videoCodec.width,
height: videoCodec.height,
width: videoCodec.settings.videoSize.width,
height: videoCodec.settings.videoSize.height,
codecType: kCMVideoCodecType_H264,
encoderSpecification: nil,
imageBufferAttributes: videoCodec.attributes as CFDictionary?,
@ -25,7 +25,7 @@ enum VTSessionMode {
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: status))
return nil
}
status = session.setOptions(videoCodec.options())
status = session.setOptions(videoCodec.settings.options())
guard status == noErr else {
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToPrepare(status: status))
return nil

View File

@ -34,7 +34,7 @@ public class VideoCodec {
#endif
/// A bitRate mode that affectes how to encode the video source.
public enum BitRateMode {
public enum BitRateMode: String, Codable {
/// The average bit rate.
case average
/// The constant bit rate.
@ -68,171 +68,29 @@ public class VideoCodec {
case failedToSetOption(status: OSStatus, option: VTSessionOption)
}
/**
* The video encoding or decoding options.
*/
public enum Option: String, KeyPathRepresentable, CaseIterable {
/// Specifies the width of video.
case width
/// Specifies the height of video.
case height
/// Specifies the bitrate.
case bitrate
/// Specifies the H264 profile level.
case profileLevel
#if os(macOS)
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE).
case enabledHardwareEncoder
#endif
/// Specifies the keyframeInterval.
case maxKeyFrameIntervalDuration
/// Specifies the scalingMode.
case scalingMode
/// Specifies the allowFrameRecording.
case allowFrameReordering
/// Specifies the bitRateMode.
case bitRateMode
public var keyPath: AnyKeyPath {
switch self {
case .width:
return \VideoCodec.width
case .height:
return \VideoCodec.height
case .bitrate:
return \VideoCodec.bitrate
#if os(macOS)
case .enabledHardwareEncoder:
return \VideoCodec.enabledHardwareEncoder
#endif
case .maxKeyFrameIntervalDuration:
return \VideoCodec.maxKeyFrameIntervalDuration
case .scalingMode:
return \VideoCodec.scalingMode
case .profileLevel:
return \VideoCodec.profileLevel
case .allowFrameReordering:
return \VideoCodec.allowFrameReordering
case .bitRateMode:
return \VideoCodec.bitRateMode
}
}
}
/// The videoCodec's width value. The default value is 480.
public static let defaultWidth: Int32 = 480
/// The videoCodec's height value. The default value is 272.
public static let defaultHeight: Int32 = 272
/// The videoCodec's bitrate value. The default value is 160,000.
public static let defaultBitrate: UInt32 = 160 * 1000
/// The videoCodec's scalingMode value. The default value is trim.
public static let defaultScalingMode: ScalingMode = .trim
/// The videoCodec's attributes value.
public static var defaultAttributes: [NSString: AnyObject]? = [
kCVPixelBufferIOSurfacePropertiesKey: [:] as AnyObject,
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
]
/// Specifies the settings for a VideoCodec.
public var settings: Setting<VideoCodec, Option> = [:] {
public var settings: VideoCodecSettings = .default {
didSet {
settings.observer = self
let invalidateSession = settings.invalidateSession(oldValue)
if invalidateSession {
self.invalidateSession = invalidateSession
} else {
settings.apply(self, rhs: oldValue)
}
}
}
/// The running value indicating whether the VideoCodec is running.
public private(set) var isRunning: Atomic<Bool> = .init(false)
var scalingMode = VideoCodec.defaultScalingMode {
didSet {
guard scalingMode != oldValue else {
return
}
invalidateSession = true
}
}
var bitRateMode: BitRateMode = .average {
didSet {
guard bitRateMode != oldValue else {
return
}
invalidateSession = true
}
}
var width = VideoCodec.defaultWidth {
didSet {
guard width != oldValue else {
return
}
invalidateSession = true
}
}
var height = VideoCodec.defaultHeight {
didSet {
guard height != oldValue else {
return
}
invalidateSession = true
}
}
#if os(macOS)
var enabledHardwareEncoder = true {
didSet {
guard enabledHardwareEncoder != oldValue else {
return
}
invalidateSession = true
}
}
#endif
var bitrate = VideoCodec.defaultBitrate {
didSet {
guard bitrate != oldValue else {
return
}
let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitrate))
if let status = session?.setOption(option), status != noErr {
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
var profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String {
didSet {
guard profileLevel != oldValue else {
return
}
invalidateSession = true
}
}
var maxKeyFrameIntervalDuration = 2.0 {
didSet {
guard maxKeyFrameIntervalDuration != oldValue else {
return
}
invalidateSession = true
}
}
// swiftlint:disable discouraged_optional_boolean
var allowFrameReordering: Bool? = false {
didSet {
guard allowFrameReordering != oldValue else {
return
}
invalidateSession = true
}
}
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock")
var expectedFrameRate = IOMixer.defaultFrameRate {
didSet {
guard expectedFrameRate != oldValue else {
return
}
let option = VTSessionOption(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate))
if let status = session?.setOption(option), status != noErr {
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
var formatDescription: CMFormatDescription? {
didSet {
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
@ -255,14 +113,14 @@ public class VideoCodec {
for (key, value) in VideoCodec.defaultAttributes ?? [:] {
attributes[key] = value
}
attributes[kCVPixelBufferWidthKey] = NSNumber(value: width)
attributes[kCVPixelBufferHeightKey] = NSNumber(value: height)
attributes[kCVPixelBufferWidthKey] = NSNumber(value: settings.videoSize.width)
attributes[kCVPixelBufferHeightKey] = NSNumber(value: settings.videoSize.height)
return attributes
}
weak var delegate: VideoCodecDelegate?
private var lastImageBuffer: CVImageBuffer?
private var session: VTSessionConvertible? {
private(set) var session: VTSessionConvertible? {
didSet {
oldValue?.invalidate()
invalidateSession = false
@ -272,10 +130,6 @@ public class VideoCodec {
private var buffers: [CMSampleBuffer] = []
private var minimumGroupOfPictures: Int = VideoCodec.defaultMinimumGroupOfPictures
init() {
settings.observer = self
}
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
guard isRunning.value else {
return
@ -360,32 +214,6 @@ public class VideoCodec {
}
}
func options() -> Set<VTSessionOption> {
let isBaseline = profileLevel.contains("Baseline")
var options = Set<VTSessionOption>([
.init(key: .realTime, value: kCFBooleanTrue),
.init(key: .profileLevel, value: profileLevel as NSObject),
.init(key: bitRateMode.key, value: NSNumber(value: bitrate)),
.init(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate)),
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
.init(key: .pixelTransferProperties, value: [
"ScalingMode": scalingMode.rawValue
] as NSObject)
])
#if os(OSX)
if enabledHardwareEncoder {
options.insert(.init(key: .encoderID, value: VideoCodec.encoderName))
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
}
#endif
if !isBaseline {
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
}
return options
}
#if os(iOS)
@objc
private func applicationWillEnterForeground(_ notification: Notification) {

View File

@ -0,0 +1,101 @@
import Foundation
import VideoToolbox
/// The VideoCodecSettings class specifying video compression settings.
public struct VideoCodecSettings: Codable {
/// The defulat value.
public static let `default` = VideoCodecSettings()
/// Specifies the video size of encoding video.
public var videoSize: VideoSize
/// Specifies the bitrate.
public var bitRate: UInt32
/// Specifies the keyframeInterval.
public var maxKeyFrameIntervalDuration: Int32
/// Specifies the scalingMode.
public var scalingMode: ScalingMode
// swiftlint:disable discouraged_optional_boolean
/// Specifies the allowFrameRecording.
public var allowFrameReordering: Bool?
/// Specifies the bitRateMode.
public var bitRateMode: VideoCodec.BitRateMode
/// Specifies the H264 profileLevel.
public var profileLevel: String
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE) for macOS.
public var isHardwareEncoderEnabled = true
var expectedFrameRate: Float64 = 30
/// Creates a new VideoCodecSettings instance.
public init(
videoSize: VideoSize = .init(width: 480, height: 272),
profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: UInt32 = 160 * 1000,
maxKeyFrameIntervalDuration: Int32 = 2,
scalingMode: ScalingMode = .trim,
bitRateMode: VideoCodec.BitRateMode = .average,
allowFrameReordering: Bool? = nil,
isHardwareEncoderEnabled: Bool = true
) {
self.videoSize = videoSize
self.profileLevel = profileLevel
self.bitRate = bitRate
self.maxKeyFrameIntervalDuration = maxKeyFrameIntervalDuration
self.scalingMode = scalingMode
self.bitRateMode = bitRateMode
self.allowFrameReordering = allowFrameReordering
self.isHardwareEncoderEnabled = isHardwareEncoderEnabled
}
func invalidateSession(_ rhs: VideoCodecSettings) -> Bool {
return !(videoSize == rhs.videoSize &&
maxKeyFrameIntervalDuration == rhs.maxKeyFrameIntervalDuration &&
scalingMode == rhs.scalingMode &&
allowFrameReordering == rhs.allowFrameReordering &&
bitRateMode == rhs.bitRateMode &&
profileLevel == rhs.profileLevel &&
isHardwareEncoderEnabled == rhs.isHardwareEncoderEnabled
)
}
func apply(_ codec: VideoCodec, rhs: VideoCodecSettings) {
if expectedFrameRate != rhs.expectedFrameRate {
let option = VTSessionOption(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate))
if let status = codec.session?.setOption(option), status != noErr {
codec.delegate?.videoCodec(codec, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
if bitRate != rhs.bitRate {
let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitRate))
if let status = codec.session?.setOption(option), status != noErr {
codec.delegate?.videoCodec(codec, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
func options() -> Set<VTSessionOption> {
let isBaseline = profileLevel.contains("Baseline")
var options = Set<VTSessionOption>([
.init(key: .realTime, value: kCFBooleanTrue),
.init(key: .profileLevel, value: profileLevel as NSObject),
.init(key: bitRateMode.key, value: NSNumber(value: bitRate)),
.init(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate)),
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
.init(key: .pixelTransferProperties, value: [
"ScalingMode": scalingMode.rawValue
] as NSObject)
])
#if os(macOS)
if isHardwareEncoderEnabled {
options.insert(.init(key: .encoderID, value: VideoCodec.encoderName))
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
}
#endif
if !isBaseline {
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
}
return options
}
}

View File

@ -0,0 +1,20 @@
import Foundation
/// The VideoSize class represents video width and height.
public struct VideoSize: Equatable, Codable {
/// The video width.
public let width: Int32
/// The video height.
public let height: Int32
/// Creates a VideoSize object.
public init(width: Int32, height: Int32) {
self.width = width
self.height = height
}
/// Swap width for height.
public func swap() -> VideoSize {
return VideoSize(width: height, height: width)
}
}

View File

@ -15,7 +15,6 @@ enum ElementaryStreamType: UInt8 {
case h265 = 0x24
}
struct ElementaryStreamSpecificData {
static let fixedHeaderSize: Int = 5

View File

@ -265,8 +265,8 @@ extension IOMixer: Running {
#endif
guard let device = error.device,
let format = device.videoFormat(
width: sessionPreset.width ?? videoIO.codec.width,
height: sessionPreset.height ?? videoIO.codec.height,
width: sessionPreset.width ?? videoIO.codec.settings.videoSize.width,
height: sessionPreset.height ?? videoIO.codec.settings.videoSize.height,
isMultiCamSupported: isMultiCamSupported
), device.activeFormat != format else {
return

View File

@ -72,7 +72,7 @@ final class IOVideoUnit: NSObject, IOUnit {
guard frameRate != oldValue else {
return
}
codec.expectedFrameRate = frameRate
codec.settings.expectedFrameRate = frameRate
capture.setFrameRate(frameRate)
multiCamCapture.setFrameRate(frameRate)
}
@ -112,7 +112,7 @@ final class IOVideoUnit: NSObject, IOUnit {
private(set) var multiCamCapture: IOVideoCaptureUnit = .init()
#endif
var multiCamCaptureSettings: MultiCamCaptureSetting = .default
var multiCamCaptureSettings: MultiCamCaptureSettings = .default
private var pixelBuffer: CVPixelBuffer?
private var multiCamSampleBuffer: CMSampleBuffer?
@ -247,8 +247,8 @@ final class IOVideoUnit: NSObject, IOUnit {
regionOfInterest: multiCamCaptureSettings.regionOfInterest,
radius: multiCamCaptureSettings.cornerRadius
)
case .splitView(let direction):
buffer.split(multiCamPixelBuffer, direction: direction)
case .splitView:
buffer.split(multiCamPixelBuffer, direction: multiCamCaptureSettings.direction)
}
multiCamPixelBuffer.unlockBaseAddress()
}

View File

@ -1,7 +1,7 @@
import Foundation
/// The type of image transform direction.
public enum ImageTransform {
public enum ImageTransform: String, Codable {
/// The north direction.
case north
/// The south direction.

View File

@ -3,23 +3,24 @@ import CoreMedia
import Foundation
/// The MultiCamCaptureSetting represents the pip capture settings for the video capture.
public struct MultiCamCaptureSetting {
public struct MultiCamCaptureSettings: Codable {
/// The type of image display mode.
public enum Mode {
public enum Mode: String, Codable {
/// The picture in picture mode means video stream playing within an inset window, freeing the rest of the screen for other tasks.
case pip
/// The split view means video stream playing within two individual windows.
case splitView(direction: ImageTransform)
case splitView
}
/// The default setting for the stream.
public static let `default` = MultiCamCaptureSetting(
public static let `default` = MultiCamCaptureSettings(
mode: .pip,
cornerRadius: 16.0,
regionOfInterest: .init(
origin: CGPoint(x: 16, y: 16),
size: .init(width: 160, height: 160)
)
),
direction: .east
)
/// The image display mode.
@ -28,11 +29,14 @@ public struct MultiCamCaptureSetting {
public let cornerRadius: CGFloat
/// The region of the picture in picture image.
public let regionOfInterest: CGRect
/// The direction of the splitView position.
public let direction: ImageTransform
/// Create a new MultiCamCaptureSetting.
public init(mode: Mode, cornerRadius: CGFloat, regionOfInterest: CGRect) {
public init(mode: Mode, cornerRadius: CGFloat, regionOfInterest: CGRect, direction: ImageTransform) {
self.mode = mode
self.cornerRadius = cornerRadius
self.regionOfInterest = regionOfInterest
self.direction = direction
}
}

View File

@ -3,7 +3,7 @@
* - seealso: https://developer.apple.com/documentation/videotoolbox/kvtpixeltransferpropertykey_scalingmode
* - seealso: https://developer.apple.com/documentation/videotoolbox/vtpixeltransfersession/pixel_transfer_properties/scaling_mode_constants
*/
public enum ScalingMode: String {
public enum ScalingMode: String, Codable {
/// kVTScalingMode_Normal
case normal = "Normal"
/// kVTScalingMode_Letterbox:

View File

@ -90,7 +90,7 @@ open class NetStream: NSObject {
}
/// Specifies the multi camera capture properties.
public var multiCamCaptureSettings: MultiCamCaptureSetting {
public var multiCamCaptureSettings: MultiCamCaptureSettings {
get {
mixer.videoIO.multiCamCaptureSettings
}
@ -121,7 +121,7 @@ open class NetStream: NSObject {
}
/// Specifies the audio compression properties.
public var audioSettings: Setting<AudioCodec, AudioCodec.Option> {
public var audioSettings: AudioCodecSettings {
get {
mixer.audioIO.codec.settings
}
@ -131,7 +131,7 @@ open class NetStream: NSObject {
}
/// Specifies the video compression properties.
public var videoSettings: Setting<VideoCodec, VideoCodec.Option> {
public var videoSettings: VideoCodecSettings {
get {
mixer.videoIO.codec.settings
}

View File

@ -312,9 +312,8 @@ open class RTMPConnection: EventDispatcher {
var outputBufferSize: Int = 0
for stream in streams {
// in bytes.
outputBufferSize += Int(stream.mixer.videoIO.codec.bitrate + stream.mixer.audioIO.codec.bitrate) / 8
outputBufferSize += Int(stream.mixer.videoIO.codec.settings.bitRate + stream.mixer.audioIO.codec.settings.bitRate) / 8
}
print(outputBufferSize, socket.outputBufferSize)
if socket.outputBufferSize < outputBufferSize {
socket.outputBufferSize = outputBufferSize
}

View File

@ -183,11 +183,6 @@ open class RTMPStream: NetStream {
}
static let defaultID: UInt32 = 0
/// The default audio bitrate for RTMPStream.
public static let defaultAudioBitrate: UInt32 = AudioCodec.defaultBitrate
/// The default video bitrate for RTMPStream.
public static let defaultVideoBitrate: UInt32 = VideoCodec.defaultBitrate
/// Specifies the delegate of the RTMPStream.
public weak var delegate: RTMPStreamDelegate?
/// The NetStreamInfo object whose properties contain data.
@ -415,15 +410,15 @@ open class RTMPStream: NetStream {
var metadata: [String: Any] = [:]
#if os(iOS) || os(macOS)
if mixer.videoIO.capture.device != nil {
metadata["width"] = mixer.videoIO.codec.width
metadata["height"] = mixer.videoIO.codec.height
metadata["width"] = mixer.videoIO.codec.settings.videoSize.width
metadata["height"] = mixer.videoIO.codec.settings.videoSize.height
metadata["framerate"] = mixer.videoIO.frameRate
metadata["videocodecid"] = FLVVideoCodec.avc.rawValue
metadata["videodatarate"] = mixer.videoIO.codec.bitrate / 1000
metadata["videodatarate"] = mixer.videoIO.codec.settings.bitRate / 1000
}
if mixer.audioIO.capture.device != nil {
metadata["audiocodecid"] = FLVAudioCodec.aac.rawValue
metadata["audiodatarate"] = mixer.audioIO.codec.bitrate / 1000
metadata["audiodatarate"] = mixer.audioIO.codec.settings.bitRate / 1000
if let sampleRate = mixer.audioIO.codec.inSourceFormat?.mSampleRate {
metadata["audiosamplerate"] = sampleRate
}

View File

@ -1,170 +0,0 @@
import AVFoundation
import Foundation
public protocol KeyPathRepresentable: Hashable, CaseIterable {
var keyPath: AnyKeyPath { get }
}
public class Setting<T: AnyObject, Key: KeyPathRepresentable>: ExpressibleByDictionaryLiteral {
public typealias Key = Key
public typealias Value = Any
weak var observer: T? {
didSet {
for (key, value) in elements {
self[key] = value
}
elements.removeAll()
}
}
private var elements: [(Key, Any)] = []
public required init(dictionaryLiteral elements: (Key, Any)...) {
self.elements = elements
}
public subscript(key: Key) -> Any? {
get {
observer?[keyPath: key.keyPath]
}
set {
switch key.keyPath {
case let path as ReferenceWritableKeyPath<T, Bool>:
if let newValue = newValue as? Bool {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, UInt32>:
if let newValue = toUInt32(value: newValue) {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, Int32>:
if let newValue = toInt32(value: newValue) {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, Double>:
if let newValue = toDouble(value: newValue) {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, String>:
if let newValue = newValue as? String {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, ScalingMode>:
if let newValue = newValue as? ScalingMode {
observer?[keyPath: path] = newValue
}
case let path as ReferenceWritableKeyPath<T, VideoCodec.BitRateMode>:
if let newValue = newValue as? VideoCodec.BitRateMode {
observer?[keyPath: path] = newValue
}
#if os(iOS)
case let path as ReferenceWritableKeyPath<T, AVCaptureVideoStabilizationMode>:
if let newValue = newValue as? AVCaptureVideoStabilizationMode {
observer?[keyPath: path] = newValue
}
#endif
#if !os(tvOS)
case let path as ReferenceWritableKeyPath<T, AVCaptureSession.Preset>:
if let newValue = newValue as? AVCaptureSession.Preset {
observer?[keyPath: path] = newValue
}
#endif
default:
return
}
}
}
private func toDouble(value: Any?) -> Double? {
switch value {
case let value as Float:
return Double(value)
case let value as Double:
return value
case let value as Int:
return Double(value)
case let value as CGFloat:
return Double(value)
default:
return nil
}
}
private func toUInt32(value: Any?) -> UInt32? {
switch value {
case let value as Int:
return numericCast(value)
case let value as Int8:
return numericCast(value)
case let value as Int16:
return numericCast(value)
case let value as Int32:
return numericCast(value)
case let value as Int64:
return numericCast(value)
case let value as UInt:
return numericCast(value)
case let value as UInt8:
return numericCast(value)
case let value as UInt16:
return numericCast(value)
case let value as UInt32:
return value
case let value as UInt64:
return numericCast(value)
case let value as Double:
return UInt32(value)
case let value as Float:
return UInt32(value)
case let value as CGFloat:
return UInt32(value)
default:
return nil
}
}
private func toInt32(value: Any?) -> Int32? {
switch value {
case let value as Int:
return numericCast(value)
case let value as Int8:
return numericCast(value)
case let value as Int16:
return numericCast(value)
case let value as Int32:
return value
case let value as Int64:
return numericCast(value)
case let value as UInt:
return numericCast(value)
case let value as UInt8:
return numericCast(value)
case let value as UInt16:
return numericCast(value)
case let value as UInt32:
return numericCast(value)
case let value as UInt64:
return numericCast(value)
case let value as Double:
return Int32(value)
case let value as Float:
return Int32(value)
case let value as CGFloat:
return Int32(value)
default:
return nil
}
}
}
extension Setting: CustomDebugStringConvertible {
// MARK: CustomDebugStringConvertible
public var debugDescription: String {
var data: [Key: Any] = [:]
for key in Key.allCases {
data[key] = observer?[keyPath: key.keyPath]
}
return data.description
}
}

View File

@ -5,32 +5,5 @@ import AVFoundation
@testable import HaishinKit
final class VideoCodecTests: XCTestCase {
func testSettigs() {
let codec = VideoCodec()
XCTAssertEqual(codec.settings[.width] as? Int32, codec.width)
XCTAssertEqual(codec.settings[.height] as? Int32, codec.height)
XCTAssertEqual(codec.settings[.profileLevel] as? String, codec.profileLevel)
XCTAssertEqual(codec.settings[.scalingMode] as? ScalingMode, codec.scalingMode)
XCTAssertEqual(codec.settings[.maxKeyFrameIntervalDuration] as? Double, codec.maxKeyFrameIntervalDuration)
XCTAssertEqual(codec.settings[.bitRateMode] as? VideoCodec.BitRateMode, codec.bitRateMode)
codec.settings[.width] = Int8(100)
XCTAssertEqual(100, codec.width)
let cgfloatHeight: CGFloat = 200
codec.settings[.height] = cgfloatHeight
XCTAssertEqual(200, codec.height)
codec.settings[.scalingMode] = ScalingMode.letterbox
XCTAssertEqual(codec.settings[.scalingMode] as? ScalingMode, ScalingMode.letterbox)
codec.settings[.maxKeyFrameIntervalDuration] = Float(5.0)
XCTAssertEqual(5.0, codec.maxKeyFrameIntervalDuration)
if #available(iOS 16.0, *) {
codec.settings[.bitRateMode] = VideoCodec.BitRateMode.constant
XCTAssertEqual(VideoCodec.BitRateMode.constant, codec.bitRateMode)
}
}
}

View File

@ -8,8 +8,8 @@ final class IOMixerTests: XCTestCase {
weak var weakIOMixer: IOMixer?
_ = {
let mixer = IOMixer()
mixer.audioIO.codec.bitrate = 1000
mixer.videoIO.codec.bitrate = 1000
mixer.audioIO.codec.settings.bitRate = 100000
mixer.videoIO.codec.settings.bitRate = 100000
weakIOMixer = mixer
}()
XCTAssertNil(weakIOMixer)

View File

@ -4,17 +4,4 @@ import XCTest
@testable import HaishinKit
final class NetStreamTests: XCTestCase {
func testVideoSettings() {
let stream = NetStream()
stream.videoSettings = [
.profileLevel: "H264_Main_AudoLevel",
.bitrate: 3000 * 0000,
.width: 700,
.height: 1400
]
XCTAssertEqual("H264_Main_AudoLevel", stream.videoSettings[.profileLevel] as? String)
XCTAssertEqual(3000 * 0000, stream.videoSettings[.bitrate] as? UInt32)
XCTAssertEqual(700, stream.videoSettings[.width] as? Int32)
XCTAssertEqual(1400, stream.videoSettings[.height] as? Int32)
}
}