Compare commits
180 Commits
Author | SHA1 | Date |
---|---|---|
![]() |
1d851afe32 | |
![]() |
74ca73795a | |
![]() |
b383042509 | |
![]() |
56e817669f | |
![]() |
442077e218 | |
![]() |
0d3e7b7572 | |
![]() |
7867c44a27 | |
![]() |
cfba9b6be2 | |
![]() |
c6901ba7a1 | |
![]() |
0994741b7c | |
![]() |
2310c2403c | |
![]() |
876ccf1699 | |
![]() |
f6489f866a | |
![]() |
b5c59a7424 | |
![]() |
dd1899c8e0 | |
![]() |
1b1d613950 | |
![]() |
52ad7d6ad4 | |
![]() |
1cebb9c91c | |
![]() |
591e090925 | |
![]() |
5c09825119 | |
![]() |
5d4787383a | |
![]() |
6cef14c59d | |
![]() |
eef9eb0cb5 | |
![]() |
8675b37473 | |
![]() |
b252405083 | |
![]() |
d72ae256da | |
![]() |
cd57781dd3 | |
![]() |
28da84c16c | |
![]() |
39c03c3e17 | |
![]() |
7b4f896c16 | |
![]() |
efa5c5e980 | |
![]() |
da0abce59d | |
![]() |
915758d0ec | |
![]() |
f7f6ee42b0 | |
![]() |
fdc9aa3272 | |
![]() |
263689ffaa | |
![]() |
434f6df596 | |
![]() |
3d0e6fe470 | |
![]() |
8dfdfd991c | |
![]() |
954a3c85f4 | |
![]() |
da2fd3c8df | |
![]() |
c84dce18ba | |
![]() |
0f543dddbd | |
![]() |
e14c6f0ecf | |
![]() |
d5fd16b334 | |
![]() |
7f3bb91ac4 | |
![]() |
ca931ec15e | |
![]() |
61c0715328 | |
![]() |
727a2af70f | |
![]() |
9913bbfea7 | |
![]() |
fc76b0886c | |
![]() |
3ad1f17e63 | |
![]() |
e44bf9187e | |
![]() |
534076025b | |
![]() |
252ab16434 | |
![]() |
42f060a8b3 | |
![]() |
8ef80f98fb | |
![]() |
dc06244df2 | |
![]() |
05897ff5e7 | |
![]() |
a3a9e85c89 | |
![]() |
7ca0185652 | |
![]() |
3449811213 | |
![]() |
08979c7dfd | |
![]() |
0951831663 | |
![]() |
39a489ca6a | |
![]() |
e96705580d | |
![]() |
d1f78c9e70 | |
![]() |
701cd70d76 | |
![]() |
a4ee619245 | |
![]() |
7494034938 | |
![]() |
bae06f303d | |
![]() |
69f0f1415f | |
![]() |
50ae37d59c | |
![]() |
986e6417b5 | |
![]() |
da030cc5f2 | |
![]() |
c07d57e343 | |
![]() |
897010e9ed | |
![]() |
28ae653209 | |
![]() |
b1c2c27534 | |
![]() |
0bcfe9aea1 | |
![]() |
fa9347379d | |
![]() |
7efcc6f4d7 | |
![]() |
e4651e0889 | |
![]() |
9b8b709a61 | |
![]() |
1158799a97 | |
![]() |
d470ada4cd | |
![]() |
0be9a79090 | |
![]() |
068308c6a2 | |
![]() |
bb54f7ada3 | |
![]() |
b4ae19fc2a | |
![]() |
1da2b6f5c6 | |
![]() |
f997eac673 | |
![]() |
74e167c4b2 | |
![]() |
37d3e41079 | |
![]() |
b219c27648 | |
![]() |
9dda67511e | |
![]() |
6222479abc | |
![]() |
622e67449d | |
![]() |
40d6d5c2b5 | |
![]() |
c05722ade2 | |
![]() |
29306a5e18 | |
![]() |
0ba6595cdb | |
![]() |
f223eaa71e | |
![]() |
edb94bf580 | |
![]() |
3c7b5b7147 | |
![]() |
a89613b840 | |
![]() |
5fe41172cd | |
![]() |
542c9026d3 | |
![]() |
357a16c358 | |
![]() |
909823740f | |
![]() |
fdd9a2a958 | |
![]() |
37d27f1f90 | |
![]() |
21e9d4bce9 | |
![]() |
76dc71fecb | |
![]() |
38e7440b04 | |
![]() |
cc7c578a3a | |
![]() |
a22a51080d | |
![]() |
513079bb5c | |
![]() |
1ae79f5b4e | |
![]() |
005eba1036 | |
![]() |
00843a3cf9 | |
![]() |
ad3d88a593 | |
![]() |
408b6b5886 | |
![]() |
ac09af04bf | |
![]() |
0715282a55 | |
![]() |
e2ef1ae0c0 | |
![]() |
d44d1187af | |
![]() |
0f8fd4569f | |
![]() |
83e55bee8e | |
![]() |
c2b37f416f | |
![]() |
67db4b55fd | |
![]() |
b9bcf572aa | |
![]() |
1e1ae9d97a | |
![]() |
cea6194b17 | |
![]() |
be347428a8 | |
![]() |
de39939755 | |
![]() |
18132ec3c3 | |
![]() |
0a17faddda | |
![]() |
a3fde220a7 | |
![]() |
26b8b1cdf6 | |
![]() |
0058acaebc | |
![]() |
b7d55c66f5 | |
![]() |
060129d8b7 | |
![]() |
912b5cafd6 | |
![]() |
9c3fe4a084 | |
![]() |
728bc444f7 | |
![]() |
b7bd39e501 | |
![]() |
6870656f4a | |
![]() |
7d03dd6829 | |
![]() |
00abb4cf23 | |
![]() |
1325264e4d | |
![]() |
66c3a0837b | |
![]() |
96bcff339e | |
![]() |
dcde99d9b4 | |
![]() |
dfe0d1a280 | |
![]() |
ec24c44b94 | |
![]() |
dbb57f225a | |
![]() |
94c98eea69 | |
![]() |
385c16a913 | |
![]() |
57eccf99e6 | |
![]() |
9c1d7a6838 | |
![]() |
c67564cb46 | |
![]() |
7c8726f65a | |
![]() |
07562eda58 | |
![]() |
d627d7f87b | |
![]() |
218eb51b10 | |
![]() |
73bedb5eec | |
![]() |
cdedd898b4 | |
![]() |
c1686fdccf | |
![]() |
be03719072 | |
![]() |
ec1f4f8216 | |
![]() |
4837a789c5 | |
![]() |
4734a5f5a8 | |
![]() |
a5894dd58e | |
![]() |
4df8fdc98a | |
![]() |
5a726c4d2f | |
![]() |
8aeca904df | |
![]() |
f8c43ba01e | |
![]() |
aafe0e40a0 | |
![]() |
38bf2afdce |
|
@ -1 +1 @@
|
|||
2.7.4
|
||||
3.0.6
|
||||
|
|
2
Cartfile
2
Cartfile
|
@ -1 +1 @@
|
|||
github "shogo4405/Logboard" ~> 2.3.0
|
||||
github "shogo4405/Logboard" ~> 2.3.1
|
||||
|
|
|
@ -26,13 +26,16 @@ final class LiveViewController: UIViewController {
|
|||
private var currentEffect: VideoEffect?
|
||||
private var currentPosition: AVCaptureDevice.Position = .back
|
||||
private var retryCount: Int = 0
|
||||
private var videoBitRate = VideoCodecSettings.default.bitRate
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
|
||||
rtmpConnection.delegate = self
|
||||
|
||||
pipIntentView.layer.borderWidth = 1.0
|
||||
pipIntentView.layer.borderColor = UIColor.white.cgColor
|
||||
pipIntentView.bounds = MultiCamCaptureSetting.default.regionOfInterest
|
||||
pipIntentView.bounds = MultiCamCaptureSettings.default.regionOfInterest
|
||||
pipIntentView.isUserInteractionEnabled = true
|
||||
view.addSubview(pipIntentView)
|
||||
|
||||
|
@ -40,14 +43,25 @@ final class LiveViewController: UIViewController {
|
|||
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
|
||||
rtmpStream.videoOrientation = orientation
|
||||
}
|
||||
rtmpStream.videoSettings = [
|
||||
.width: 720,
|
||||
.height: 1280
|
||||
]
|
||||
rtmpStream.mixer.recorder.delegate = self
|
||||
|
||||
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
|
||||
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
|
||||
rtmpStream.audioSettings = AudioCodecSettings(
|
||||
bitRate: 64 * 1000
|
||||
)
|
||||
|
||||
rtmpStream.videoSettings = VideoCodecSettings(
|
||||
videoSize: .init(width: 854, height: 480),
|
||||
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
|
||||
bitRate: 640 * 1000,
|
||||
maxKeyFrameIntervalDuration: 2,
|
||||
scalingMode: .trim,
|
||||
bitRateMode: .average,
|
||||
allowFrameReordering: nil,
|
||||
isHardwareEncoderEnabled: true
|
||||
)
|
||||
|
||||
rtmpStream.mixer.recorder.delegate = self
|
||||
videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
|
||||
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000
|
||||
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
|
||||
}
|
||||
|
@ -64,6 +78,7 @@ final class LiveViewController: UIViewController {
|
|||
}
|
||||
if #available(iOS 13.0, *) {
|
||||
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
|
||||
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = true
|
||||
rtmpStream.attachMultiCamera(front)
|
||||
}
|
||||
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
|
||||
|
@ -77,6 +92,11 @@ final class LiveViewController: UIViewController {
|
|||
super.viewWillDisappear(animated)
|
||||
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
|
||||
rtmpStream.close()
|
||||
rtmpStream.attachAudio(nil)
|
||||
rtmpStream.attachCamera(nil)
|
||||
if #available(iOS 13.0, *) {
|
||||
rtmpStream.attachMultiCamera(nil)
|
||||
}
|
||||
// swiftlint:disable notification_center_detachment
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
}
|
||||
|
@ -101,10 +121,11 @@ final class LiveViewController: UIViewController {
|
|||
currentFrame.origin.x += deltaX
|
||||
currentFrame.origin.y += deltaY
|
||||
pipIntentView.frame = currentFrame
|
||||
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSetting(
|
||||
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSettings(
|
||||
mode: rtmpStream.multiCamCaptureSettings.mode,
|
||||
cornerRadius: 16.0,
|
||||
regionOfInterest: currentFrame
|
||||
regionOfInterest: currentFrame,
|
||||
direction: .east
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -112,10 +133,12 @@ final class LiveViewController: UIViewController {
|
|||
@IBAction func rotateCamera(_ sender: UIButton) {
|
||||
logger.info("rotateCamera")
|
||||
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
|
||||
rtmpStream.videoCapture(for: 0)?.isVideoMirrored = position == .front
|
||||
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in
|
||||
logger.warn(error)
|
||||
}
|
||||
if #available(iOS 13.0, *) {
|
||||
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = currentPosition == .front
|
||||
rtmpStream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in
|
||||
logger.warn(error)
|
||||
}
|
||||
|
@ -130,11 +153,11 @@ final class LiveViewController: UIViewController {
|
|||
@IBAction func on(slider: UISlider) {
|
||||
if slider == audioBitrateSlider {
|
||||
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
|
||||
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
|
||||
rtmpStream.audioSettings.bitRate = Int(slider.value * 1000)
|
||||
}
|
||||
if slider == videoBitrateSlider {
|
||||
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
|
||||
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
|
||||
rtmpStream.videoSettings.bitRate = UInt32(slider.value * 1000)
|
||||
}
|
||||
if slider == zoomSlider {
|
||||
let zoomFactor = CGFloat(slider.value)
|
||||
|
@ -273,6 +296,26 @@ final class LiveViewController: UIViewController {
|
|||
}
|
||||
}
|
||||
|
||||
extension LiveViewController: RTMPConnectionDelegate {
|
||||
func connection(_ connection: RTMPConnection, publishInsufficientBWOccured stream: RTMPStream) {
|
||||
// Adaptive bitrate streaming exsample. Please feedback me your good algorithm. :D
|
||||
videoBitRate -= 32 * 1000
|
||||
stream.videoSettings.bitRate = max(videoBitRate, 64 * 1000)
|
||||
}
|
||||
|
||||
func connection(_ connection: RTMPConnection, publishSufficientBWOccured stream: RTMPStream) {
|
||||
videoBitRate += 32 * 1000
|
||||
stream.videoSettings.bitRate = min(videoBitRate, VideoCodecSettings.default.bitRate)
|
||||
}
|
||||
|
||||
func connection(_ connection: RTMPConnection, updateStats stream: RTMPStream) {
|
||||
}
|
||||
|
||||
func connection(_ connection: RTMPConnection, didClear stream: RTMPStream) {
|
||||
videoBitRate = VideoCodecSettings.default.bitRate
|
||||
}
|
||||
}
|
||||
|
||||
extension LiveViewController: IORecorderDelegate {
|
||||
// MARK: IORecorderDelegate
|
||||
func recorder(_ recorder: IORecorder, errorOccured error: IORecorder.Error) {
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="49e-Tb-3d3">
|
||||
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="21701" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="49e-Tb-3d3">
|
||||
<device id="retina6_1" orientation="portrait" appearance="light"/>
|
||||
<dependencies>
|
||||
<deployment identifier="iOS"/>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="21678"/>
|
||||
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||
<capability name="System colors in document resources" minToolsVersion="11.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
|
@ -22,7 +22,7 @@
|
|||
<viewLayoutGuide key="safeArea" id="h8f-2Q-C5a"/>
|
||||
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
|
||||
</view>
|
||||
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="32" minValue="15" maxValue="120" translatesAutoresizingMaskIntoConstraints="NO" id="aKS-oc-LrT">
|
||||
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="64" minValue="15" maxValue="120" translatesAutoresizingMaskIntoConstraints="NO" id="aKS-oc-LrT">
|
||||
<rect key="frame" x="14" y="775" width="340" height="31"/>
|
||||
<connections>
|
||||
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="ICf-sz-Jsg"/>
|
||||
|
@ -30,14 +30,14 @@
|
|||
</connections>
|
||||
</slider>
|
||||
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="LTk-1V-jZa">
|
||||
<rect key="frame" x="259" y="44" width="54" height="30"/>
|
||||
<rect key="frame" x="259" y="48" width="54" height="30"/>
|
||||
<state key="normal" title="Camera"/>
|
||||
<connections>
|
||||
<action selector="rotateCamera:" destination="9pv-A4-QxB" eventType="touchDown" id="516-MC-1k2"/>
|
||||
</connections>
|
||||
</button>
|
||||
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="2Sy-na-foy">
|
||||
<rect key="frame" x="206" y="82" width="200" height="32"/>
|
||||
<rect key="frame" x="206" y="86" width="200" height="32"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="200" id="RrQ-qe-7IF"/>
|
||||
</constraints>
|
||||
|
@ -51,13 +51,13 @@
|
|||
</connections>
|
||||
</segmentedControl>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="FPS" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="YMl-Xb-JZb">
|
||||
<rect key="frame" x="8" y="44" width="31" height="21"/>
|
||||
<rect key="frame" x="8" y="48" width="31" height="21"/>
|
||||
<fontDescription key="fontDescription" type="system" pointSize="17"/>
|
||||
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
|
||||
<nil key="highlightedColor"/>
|
||||
</label>
|
||||
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="1" translatesAutoresizingMaskIntoConstraints="NO" id="fbC-rC-wNg">
|
||||
<rect key="frame" x="206" y="121" width="200" height="32"/>
|
||||
<rect key="frame" x="206" y="125" width="200" height="32"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="200" id="BBl-Vi-PoJ"/>
|
||||
</constraints>
|
||||
|
@ -79,26 +79,26 @@
|
|||
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="IS3-vj-jFX"/>
|
||||
</connections>
|
||||
</slider>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="video 160/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="dLf-ee-K3I">
|
||||
<rect key="frame" x="236" y="732" width="116" height="21"/>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="video 640/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="dLf-ee-K3I">
|
||||
<rect key="frame" x="233" y="732" width="119" height="21"/>
|
||||
<fontDescription key="fontDescription" type="system" pointSize="17"/>
|
||||
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
|
||||
<nil key="highlightedColor"/>
|
||||
</label>
|
||||
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="160" minValue="32" maxValue="1024" translatesAutoresizingMaskIntoConstraints="NO" id="4s5-OW-qAO">
|
||||
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="640" minValue="32" maxValue="2048" translatesAutoresizingMaskIntoConstraints="NO" id="4s5-OW-qAO">
|
||||
<rect key="frame" x="14" y="737" width="340" height="31"/>
|
||||
<connections>
|
||||
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="Nm5-Xr-jcw"/>
|
||||
</connections>
|
||||
</slider>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="audio 32/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="gR3-9k-qhK">
|
||||
<rect key="frame" x="243.5" y="770" width="108.5" height="21"/>
|
||||
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="audio 64/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="gR3-9k-qhK">
|
||||
<rect key="frame" x="242.5" y="770" width="109.5" height="21"/>
|
||||
<fontDescription key="fontDescription" type="system" pointSize="17"/>
|
||||
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
|
||||
<nil key="highlightedColor"/>
|
||||
</label>
|
||||
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="oVn-9L-n2U">
|
||||
<rect key="frame" x="329" y="44" width="39" height="30"/>
|
||||
<rect key="frame" x="329" y="48" width="39" height="30"/>
|
||||
<state key="normal" title="Torch"/>
|
||||
<connections>
|
||||
<action selector="toggleTorch:" destination="9pv-A4-QxB" eventType="touchDown" id="gY1-x2-YlF"/>
|
||||
|
@ -123,7 +123,7 @@
|
|||
</connections>
|
||||
</button>
|
||||
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="yIo-MW-aK8">
|
||||
<rect key="frame" x="384" y="44" width="30" height="30"/>
|
||||
<rect key="frame" x="384" y="48" width="30" height="30"/>
|
||||
<state key="normal" title="❌"/>
|
||||
<connections>
|
||||
<action selector="onClose:" destination="9pv-A4-QxB" eventType="touchDown" id="d0Y-4e-dGf"/>
|
||||
|
@ -195,7 +195,7 @@
|
|||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="Hiy-yh-Bwn">
|
||||
<rect key="frame" x="8" y="52" width="394" height="34"/>
|
||||
<rect key="frame" x="8" y="56" width="394" height="34"/>
|
||||
<fontDescription key="fontDescription" type="system" pointSize="14"/>
|
||||
<textInputTraits key="textInputTraits"/>
|
||||
<connections>
|
||||
|
@ -203,7 +203,7 @@
|
|||
</connections>
|
||||
</textField>
|
||||
<textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="A5Y-FA-epc">
|
||||
<rect key="frame" x="8" y="94" width="246" height="34"/>
|
||||
<rect key="frame" x="8" y="98" width="246" height="34"/>
|
||||
<fontDescription key="fontDescription" type="system" pointSize="14"/>
|
||||
<textInputTraits key="textInputTraits"/>
|
||||
<connections>
|
||||
|
@ -304,7 +304,7 @@
|
|||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="p4J-1x-O1c">
|
||||
<rect key="frame" x="369" y="768" width="30" height="30"/>
|
||||
<rect key="frame" x="369" y="734" width="30" height="30"/>
|
||||
<color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="30" id="8DG-lB-HSj"/>
|
||||
|
@ -317,7 +317,7 @@
|
|||
</connections>
|
||||
</button>
|
||||
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Nie-97-pLL">
|
||||
<rect key="frame" x="369" y="723" width="30" height="30"/>
|
||||
<rect key="frame" x="369" y="689" width="30" height="30"/>
|
||||
<color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="height" constant="30" id="6VI-Zc-kOn"/>
|
||||
|
|
|
@ -21,19 +21,14 @@ final class PlaybackViewController: UIViewController {
|
|||
override func viewWillAppear(_ animated: Bool) {
|
||||
logger.info("viewWillAppear")
|
||||
super.viewWillAppear(animated)
|
||||
(view as? MTHKView)?.attachStream(rtmpStream)
|
||||
(view as? PiPHKView)?.attachStream(rtmpStream)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
|
||||
if let layer = view.layer as? AVSampleBufferDisplayLayer, #available(iOS 15.0, *) {
|
||||
(view as? (any NetStreamDrawable))?.attachStream(rtmpStream)
|
||||
if #available(iOS 15.0, *), let layer = view.layer as? AVSampleBufferDisplayLayer {
|
||||
pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: layer, playbackDelegate: self))
|
||||
}
|
||||
}
|
||||
|
||||
override func viewWillDisappear(_ animated: Bool) {
|
||||
logger.info("viewWillDisappear")
|
||||
// swiftlint:disable notification_center_detachment
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
super.viewWillDisappear(animated)
|
||||
}
|
||||
|
||||
|
@ -61,7 +56,7 @@ final class PlaybackViewController: UIViewController {
|
|||
@objc
|
||||
private func rtmpStatusHandler(_ notification: Notification) {
|
||||
let e = Event.from(notification)
|
||||
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
|
||||
guard let data = e.data as? ASObject, let code = data["code"] as? String else {
|
||||
return
|
||||
}
|
||||
logger.info(code)
|
||||
|
@ -102,16 +97,6 @@ final class PlaybackViewController: UIViewController {
|
|||
rtmpStream.receiveVideo = true
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
private func didInterruptionNotification(_ notification: Notification) {
|
||||
logger.info(notification)
|
||||
}
|
||||
|
||||
@objc
|
||||
private func didRouteChangeNotification(_ notification: Notification) {
|
||||
logger.info(notification)
|
||||
}
|
||||
}
|
||||
|
||||
extension PlaybackViewController: AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
struct Preference {
|
||||
static var defaultInstance = Preference()
|
||||
|
||||
var uri: String? = "rtmp://192.168.1.10/live"
|
||||
var uri: String? = "rtmp://192.168.1.6/live"
|
||||
var streamName: String? = "live"
|
||||
}
|
||||
|
|
|
@ -43,21 +43,17 @@ open class SampleHandler: RPBroadcastSampleHandler {
|
|||
case .video:
|
||||
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
|
||||
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
|
||||
rtmpStream.videoSettings = [
|
||||
.width: dimensions.width,
|
||||
.height: dimensions.height,
|
||||
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
|
||||
]
|
||||
rtmpStream.videoSettings.videoSize = .init(width: dimensions.width, height: dimensions.height)
|
||||
}
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer)
|
||||
case .audioMic:
|
||||
isMirophoneOn = true
|
||||
if CMSampleBufferDataIsReady(sampleBuffer) {
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .audio)
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
case .audioApp:
|
||||
if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) {
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .audio)
|
||||
rtmpStream.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
@unknown default:
|
||||
break
|
||||
|
|
|
@ -17,8 +17,8 @@ final class ViewModel: ObservableObject {
|
|||
private var retryCount: Int = 0
|
||||
@Published var published = false
|
||||
@Published var zoomLevel: CGFloat = 1.0
|
||||
@Published var videoRate: CGFloat = 160.0
|
||||
@Published var audioRate: CGFloat = 32.0
|
||||
@Published var videoRate = CGFloat(VideoCodecSettings.default.bitRate / 1000)
|
||||
@Published var audioRate = CGFloat(AudioCodecSettings.default.bitRate / 1000)
|
||||
@Published var fps: String = "FPS"
|
||||
private var nc = NotificationCenter.default
|
||||
|
||||
|
@ -65,10 +65,7 @@ final class ViewModel: ObservableObject {
|
|||
rtmpStream.videoOrientation = orientation
|
||||
}
|
||||
rtmpStream.sessionPreset = .hd1280x720
|
||||
rtmpStream.videoSettings = [
|
||||
.width: 720,
|
||||
.height: 1280
|
||||
]
|
||||
rtmpStream.videoSettings.videoSize = .init(width: 720, height: 1280)
|
||||
rtmpStream.mixer.recorder.delegate = self
|
||||
|
||||
nc.publisher(for: UIDevice.orientationDidChangeNotification, object: nil)
|
||||
|
@ -195,11 +192,11 @@ final class ViewModel: ObservableObject {
|
|||
}
|
||||
|
||||
func changeVideoRate(level: CGFloat) {
|
||||
rtmpStream.videoSettings[.bitrate] = level * 1000
|
||||
rtmpStream.videoSettings.bitRate = UInt32(level * 1000)
|
||||
}
|
||||
|
||||
func changeAudioRate(level: CGFloat) {
|
||||
rtmpStream.audioSettings[.bitrate] = level * 1000
|
||||
rtmpStream.audioSettings.bitRate = Int(level * 1000)
|
||||
}
|
||||
|
||||
@objc
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="17701" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="B8D-0N-5wS">
|
||||
<document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="21507" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="4Lp-xV-zxC">
|
||||
<dependencies>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="17701"/>
|
||||
<deployment identifier="macosx"/>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="21507"/>
|
||||
<capability name="NSView safe area layout guides" minToolsVersion="12.0"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<scenes>
|
||||
|
@ -34,31 +36,10 @@
|
|||
</objects>
|
||||
<point key="canvasLocation" x="75" y="0.0"/>
|
||||
</scene>
|
||||
<!--Window Controller-->
|
||||
<scene sceneID="R2V-B0-nI4">
|
||||
<objects>
|
||||
<windowController id="B8D-0N-5wS" customClass="MainWindowController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<window key="window" title="Window" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" releasedWhenClosed="NO" visibleAtLaunch="NO" animationBehavior="default" id="IQv-IB-iLA">
|
||||
<windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
|
||||
<windowPositionMask key="initialPositionMask" leftStrut="YES" rightStrut="YES" topStrut="YES" bottomStrut="YES"/>
|
||||
<rect key="contentRect" x="196" y="240" width="480" height="270"/>
|
||||
<rect key="screenRect" x="0.0" y="0.0" width="1680" height="1027"/>
|
||||
<connections>
|
||||
<outlet property="delegate" destination="B8D-0N-5wS" id="Q4g-rY-pfC"/>
|
||||
</connections>
|
||||
</window>
|
||||
<connections>
|
||||
<segue destination="XfG-lQ-9wD" kind="relationship" relationship="window.shadowedContentViewController" id="cq2-FE-JQM"/>
|
||||
</connections>
|
||||
</windowController>
|
||||
<customObject id="Oky-zY-oP4" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="75" y="250"/>
|
||||
</scene>
|
||||
<!--Main View Controller-->
|
||||
<!--Camera Publish View Controller-->
|
||||
<scene sceneID="hIz-AP-VOD">
|
||||
<objects>
|
||||
<viewController id="XfG-lQ-9wD" customClass="MainViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<viewController storyboardIdentifier="CameraPublishViewController" id="XfG-lQ-9wD" customClass="CameraPublishViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" wantsLayer="YES" id="m2S-Jp-Qdl">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
|
@ -77,10 +58,7 @@
|
|||
</connections>
|
||||
</button>
|
||||
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="Hjj-Fo-QAo">
|
||||
<rect key="frame" x="257" y="226" width="207" height="25"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="200" id="ufS-dM-mn2"/>
|
||||
</constraints>
|
||||
<rect key="frame" x="425" y="226" width="39" height="25"/>
|
||||
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="lef-XS-nIm">
|
||||
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
|
||||
<font key="font" metaFont="menu"/>
|
||||
|
@ -91,10 +69,7 @@
|
|||
</connections>
|
||||
</popUpButton>
|
||||
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="vfl-SO-iw0">
|
||||
<rect key="frame" x="257" y="196" width="207" height="25"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="200" id="Pi0-Q3-jXO"/>
|
||||
</constraints>
|
||||
<rect key="frame" x="425" y="196" width="39" height="25"/>
|
||||
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="FVb-fk-AdX">
|
||||
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
|
||||
<font key="font" metaFont="menu"/>
|
||||
|
@ -154,22 +129,27 @@
|
|||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstAttribute="bottom" secondItem="8aC-9s-bew" secondAttribute="bottom" constant="20" id="0Cc-JK-ooG"/>
|
||||
<constraint firstItem="8aC-9s-bew" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="lB9-7R-daQ" secondAttribute="trailing" constant="8" symbolic="YES" id="2xn-8b-V4J"/>
|
||||
<constraint firstAttribute="trailing" secondItem="vfl-SO-iw0" secondAttribute="trailing" constant="20" id="9IB-O9-pG4"/>
|
||||
<constraint firstAttribute="trailing" secondItem="iLC-eL-Nn7" secondAttribute="trailing" constant="20" id="D3J-tH-bAk"/>
|
||||
<constraint firstItem="9g2-aW-5KE" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" id="GEH-yz-mrh"/>
|
||||
<constraint firstAttribute="trailing" secondItem="CIE-H2-55S" secondAttribute="trailing" constant="20" id="HAA-kt-OKg"/>
|
||||
<constraint firstItem="CIE-H2-55S" firstAttribute="top" secondItem="iLC-eL-Nn7" secondAttribute="bottom" constant="10" id="NCq-Mi-4Nc"/>
|
||||
<constraint firstItem="Wuc-0E-MpH" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="RBg-L3-2bO"/>
|
||||
<constraint firstItem="vfl-SO-iw0" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="akJ-Nz-JcV"/>
|
||||
<constraint firstItem="Hjj-Fo-QAo" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="bUT-0e-MAJ"/>
|
||||
<constraint firstItem="Wuc-0E-MpH" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="cJf-Im-eBM"/>
|
||||
<constraint firstItem="vfl-SO-iw0" firstAttribute="top" secondItem="Hjj-Fo-QAo" secondAttribute="bottom" constant="10" id="cV1-7j-UCY"/>
|
||||
<constraint firstItem="9g2-aW-5KE" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" id="cyh-4S-TRS"/>
|
||||
<constraint firstItem="Hjj-Fo-QAo" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="Wuc-0E-MpH" secondAttribute="trailing" constant="8" symbolic="YES" id="f5z-js-iaQ"/>
|
||||
<constraint firstAttribute="trailing" secondItem="8aC-9s-bew" secondAttribute="trailing" constant="20" id="i6i-9B-SkI"/>
|
||||
<constraint firstAttribute="bottom" secondItem="lB9-7R-daQ" secondAttribute="bottom" constant="20" id="j60-Ve-mht"/>
|
||||
<constraint firstItem="iLC-eL-Nn7" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="nUd-Vt-Mxs"/>
|
||||
<constraint firstItem="9g2-aW-5KE" firstAttribute="width" secondItem="m2S-Jp-Qdl" secondAttribute="width" id="nXs-El-9Z1"/>
|
||||
<constraint firstItem="8aC-9s-bew" firstAttribute="top" secondItem="CIE-H2-55S" secondAttribute="bottom" constant="10" id="oNA-aB-zP0"/>
|
||||
<constraint firstItem="9g2-aW-5KE" firstAttribute="height" secondItem="m2S-Jp-Qdl" secondAttribute="height" id="p40-XI-4o1"/>
|
||||
<constraint firstItem="lB9-7R-daQ" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="sLw-PG-Hgd"/>
|
||||
<constraint firstItem="CIE-H2-55S" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="vyF-uI-1Cp"/>
|
||||
<constraint firstAttribute="trailing" secondItem="Hjj-Fo-QAo" secondAttribute="trailing" constant="20" id="vzC-3x-4nE"/>
|
||||
</constraints>
|
||||
</view>
|
||||
|
@ -185,5 +165,340 @@
|
|||
</objects>
|
||||
<point key="canvasLocation" x="75" y="655"/>
|
||||
</scene>
|
||||
<!--Window Controller-->
|
||||
<scene sceneID="uIN-fj-SfU">
|
||||
<objects>
|
||||
<windowController id="4Lp-xV-zxC" sceneMemberID="viewController">
|
||||
<window key="window" title="Window" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" releasedWhenClosed="NO" visibleAtLaunch="NO" frameAutosaveName="" animationBehavior="default" id="3N9-7c-j7V">
|
||||
<windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
|
||||
<windowPositionMask key="initialPositionMask" leftStrut="YES" rightStrut="YES" topStrut="YES" bottomStrut="YES"/>
|
||||
<rect key="contentRect" x="211" y="267" width="480" height="270"/>
|
||||
<rect key="screenRect" x="0.0" y="0.0" width="1512" height="944"/>
|
||||
<view key="contentView" id="w23-Ay-0Ti">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
</view>
|
||||
<toolbar key="toolbar" implicitIdentifier="09D11707-F4A3-4FD5-970E-AC5832E91C2B" autosavesConfiguration="NO" displayMode="iconAndLabel" sizeMode="regular" id="Uxk-Q0-ROW">
|
||||
<allowedToolbarItems/>
|
||||
<defaultToolbarItems/>
|
||||
</toolbar>
|
||||
<connections>
|
||||
<outlet property="delegate" destination="4Lp-xV-zxC" id="vCA-of-aRI"/>
|
||||
</connections>
|
||||
</window>
|
||||
<connections>
|
||||
<segue destination="NF7-WS-c3B" kind="relationship" relationship="window.shadowedContentViewController" id="Xsj-HD-e4r"/>
|
||||
</connections>
|
||||
</windowController>
|
||||
<customObject id="qnS-t2-2hl" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="-457" y="604"/>
|
||||
</scene>
|
||||
<!--Main Split View Controller-->
|
||||
<scene sceneID="ayw-Nc-hmj">
|
||||
<objects>
|
||||
<splitViewController id="NF7-WS-c3B" customClass="MainSplitViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<splitViewItems>
|
||||
<splitViewItem canCollapse="YES" holdingPriority="260" behavior="sidebar" id="rWa-Cz-lZU"/>
|
||||
<splitViewItem id="Uyi-Rm-rQN"/>
|
||||
</splitViewItems>
|
||||
<splitView key="splitView" dividerStyle="thin" vertical="YES" id="KOg-Sx-jxE">
|
||||
<rect key="frame" x="0.0" y="0.0" width="450" height="300"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
</splitView>
|
||||
<connections>
|
||||
<segue destination="rX7-HP-XFe" kind="relationship" relationship="splitItems" id="L5L-nw-bp6"/>
|
||||
<segue destination="GyZ-hD-VHK" kind="relationship" relationship="splitItems" id="BVl-mY-rOQ"/>
|
||||
</connections>
|
||||
</splitViewController>
|
||||
<customObject id="YJG-VL-2Ch" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="-974" y="329"/>
|
||||
</scene>
|
||||
<!--Menu View Controller-->
|
||||
<scene sceneID="YOR-fR-sUU">
|
||||
<objects>
|
||||
<viewController id="rX7-HP-XFe" customClass="MenuViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" misplaced="YES" id="sB5-Mo-WgH">
|
||||
<rect key="frame" x="0.0" y="0.0" width="154" height="1072"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<scrollView autohidesScrollers="YES" horizontalLineScroll="24" horizontalPageScroll="10" verticalLineScroll="24" verticalPageScroll="10" usesPredominantAxisScrolling="NO" translatesAutoresizingMaskIntoConstraints="NO" id="U7X-zB-Ct9">
|
||||
<rect key="frame" x="0.0" y="0.0" width="154" height="1684"/>
|
||||
<clipView key="contentView" id="TC0-20-xwt">
|
||||
<rect key="frame" x="1" y="1" width="152" height="1682"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<tableView verticalHuggingPriority="750" allowsExpansionToolTips="YES" columnAutoresizingStyle="lastColumnOnly" multipleSelection="NO" autosaveColumns="NO" rowHeight="24" rowSizeStyle="automatic" viewBased="YES" id="Zf5-MB-jdh">
|
||||
<rect key="frame" x="0.0" y="0.0" width="152" height="1682"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<size key="intercellSpacing" width="17" height="0.0"/>
|
||||
<color key="backgroundColor" name="controlBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="gridColor" name="gridColor" catalog="System" colorSpace="catalog"/>
|
||||
<tableColumns>
|
||||
<tableColumn identifier="AutomaticTableColumnIdentifier.0" width="140" minWidth="40" maxWidth="1000" id="RvV-a9-2Bt">
|
||||
<tableHeaderCell key="headerCell" lineBreakMode="truncatingTail" borderStyle="border">
|
||||
<color key="textColor" name="headerTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="headerColor" catalog="System" colorSpace="catalog"/>
|
||||
</tableHeaderCell>
|
||||
<textFieldCell key="dataCell" lineBreakMode="truncatingTail" selectable="YES" editable="YES" title="Text Cell" id="gul-Xr-O4P">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="controlBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
<tableColumnResizingMask key="resizingMask" resizeWithTable="YES" userResizable="YES"/>
|
||||
<prototypeCellViews>
|
||||
<tableCellView id="TMq-ik-BLg">
|
||||
<rect key="frame" x="8" y="0.0" width="135" height="24"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" horizontalCompressionResistancePriority="250" translatesAutoresizingMaskIntoConstraints="NO" id="nKL-O8-0aU">
|
||||
<rect key="frame" x="0.0" y="4" width="135" height="16"/>
|
||||
<textFieldCell key="cell" lineBreakMode="truncatingTail" sendsActionOnEndEditing="YES" title="Table View Cell" id="Lf1-hH-841">
|
||||
<font key="font" usesAppearanceFont="YES"/>
|
||||
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
</textField>
|
||||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstItem="nKL-O8-0aU" firstAttribute="leading" secondItem="TMq-ik-BLg" secondAttribute="leading" constant="2" id="Aum-kK-h0z"/>
|
||||
<constraint firstItem="nKL-O8-0aU" firstAttribute="centerX" secondItem="TMq-ik-BLg" secondAttribute="centerX" id="NIe-lm-RIB"/>
|
||||
<constraint firstItem="nKL-O8-0aU" firstAttribute="centerY" secondItem="TMq-ik-BLg" secondAttribute="centerY" id="ct0-oa-BwY"/>
|
||||
</constraints>
|
||||
<connections>
|
||||
<outlet property="textField" destination="nKL-O8-0aU" id="4o7-5Z-5xG"/>
|
||||
</connections>
|
||||
</tableCellView>
|
||||
</prototypeCellViews>
|
||||
</tableColumn>
|
||||
</tableColumns>
|
||||
<connections>
|
||||
<outlet property="dataSource" destination="rX7-HP-XFe" id="oT0-DO-XfB"/>
|
||||
<outlet property="delegate" destination="rX7-HP-XFe" id="kak-CH-GFv"/>
|
||||
</connections>
|
||||
</tableView>
|
||||
</subviews>
|
||||
</clipView>
|
||||
<scroller key="horizontalScroller" hidden="YES" wantsLayer="YES" verticalHuggingPriority="750" horizontal="YES" id="5e8-aJ-U5N">
|
||||
<rect key="frame" x="1" y="1565" width="152" height="16"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
</scroller>
|
||||
<scroller key="verticalScroller" hidden="YES" wantsLayer="YES" verticalHuggingPriority="750" horizontal="NO" id="eRz-M7-DJS">
|
||||
<rect key="frame" x="224" y="17" width="15" height="102"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
</scroller>
|
||||
</scrollView>
|
||||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="bottom" secondItem="9lo-11-SF8" secondAttribute="bottom" id="Efs-dl-fxX"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="top" secondItem="sB5-Mo-WgH" secondAttribute="top" id="I4r-iI-g6v"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="N0j-lF-qW4"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="Vbe-Eq-sRp"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="Z7z-AA-ydR"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="fRl-78-kln"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="gEO-m2-Qfk"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="gvA-u2-WJB"/>
|
||||
<constraint firstItem="U7X-zB-Ct9" firstAttribute="bottom" secondItem="9lo-11-SF8" secondAttribute="bottom" id="nzb-Ve-sGC"/>
|
||||
</constraints>
|
||||
<viewLayoutGuide key="safeArea" id="9lo-11-SF8"/>
|
||||
<viewLayoutGuide key="layoutMargins" id="wZT-hI-llD"/>
|
||||
</view>
|
||||
<connections>
|
||||
<outlet property="tableView" destination="Zf5-MB-jdh" id="KWE-xw-xM5"/>
|
||||
</connections>
|
||||
</viewController>
|
||||
<customObject id="dST-Xk-5EF" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="442" y="329"/>
|
||||
</scene>
|
||||
<!--View Controller-->
|
||||
<scene sceneID="JZE-qq-5Gq">
|
||||
<objects>
|
||||
<viewController id="GyZ-hD-VHK" sceneMemberID="viewController">
|
||||
<view key="view" id="VCU-ot-Zd4">
|
||||
<rect key="frame" x="0.0" y="0.0" width="302" height="300"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<viewLayoutGuide key="safeArea" id="40k-YB-9Sh"/>
|
||||
<viewLayoutGuide key="layoutMargins" id="JeG-zt-ieP"/>
|
||||
</view>
|
||||
</viewController>
|
||||
<customObject id="804-s7-rc2" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="-42" y="-408"/>
|
||||
</scene>
|
||||
<!--Playback View Controller-->
|
||||
<scene sceneID="CUf-T6-3jm">
|
||||
<objects>
|
||||
<viewController storyboardIdentifier="RTMPPlaybackViewController" id="Lqg-9j-gZP" customClass="RTMPPlaybackViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" wantsLayer="YES" id="J9d-S9-Trt">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<openGLView wantsLayer="YES" useAuxiliaryDepthBufferStencil="NO" allowOffline="YES" wantsBestResolutionOpenGLSurface="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Yr3-Li-WPD" customClass="MTHKView" customModule="HaishinKit">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
|
||||
</openGLView>
|
||||
<button verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="6iZ-Ur-gur">
|
||||
<rect key="frame" x="378" y="13" width="89" height="32"/>
|
||||
<buttonCell key="cell" type="push" title="Playback" bezelStyle="rounded" alignment="center" borderStyle="border" imageScaling="proportionallyDown" inset="2" id="W1x-8U-Phb">
|
||||
<behavior key="behavior" pushIn="YES" lightByBackground="YES" lightByGray="YES"/>
|
||||
<font key="font" metaFont="system"/>
|
||||
</buttonCell>
|
||||
<connections>
|
||||
<action selector="didTappedPlayback:" target="Lqg-9j-gZP" id="ngd-dz-DdL"/>
|
||||
</connections>
|
||||
</button>
|
||||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstItem="Yr3-Li-WPD" firstAttribute="leading" secondItem="J9d-S9-Trt" secondAttribute="leading" id="Ah4-lE-cQH"/>
|
||||
<constraint firstAttribute="trailing" secondItem="6iZ-Ur-gur" secondAttribute="trailing" constant="20" id="JfI-gL-PM3"/>
|
||||
<constraint firstItem="Yr3-Li-WPD" firstAttribute="width" secondItem="J9d-S9-Trt" secondAttribute="width" id="og8-2D-xo1"/>
|
||||
<constraint firstItem="Yr3-Li-WPD" firstAttribute="top" secondItem="J9d-S9-Trt" secondAttribute="top" id="rLV-Q5-UcE"/>
|
||||
<constraint firstItem="Yr3-Li-WPD" firstAttribute="height" secondItem="J9d-S9-Trt" secondAttribute="height" id="xmd-Zw-bSA"/>
|
||||
<constraint firstAttribute="bottom" secondItem="6iZ-Ur-gur" secondAttribute="bottom" constant="20" id="zwB-ve-FoP"/>
|
||||
</constraints>
|
||||
</view>
|
||||
<connections>
|
||||
<outlet property="lfView" destination="Yr3-Li-WPD" id="Kfd-7R-psD"/>
|
||||
</connections>
|
||||
</viewController>
|
||||
<customObject id="51v-jx-wcj" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="75" y="655"/>
|
||||
</scene>
|
||||
<!--Stream Publish View Controller-->
|
||||
<scene sceneID="qMT-Br-6MQ">
|
||||
<objects>
|
||||
<viewController storyboardIdentifier="SCStreamPublishViewController" id="dhX-nT-Doa" customClass="SCStreamPublishViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" wantsLayer="YES" id="fUT-bB-KBi">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<button verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="Ko8-3C-ims">
|
||||
<rect key="frame" x="388" y="13" width="79" height="32"/>
|
||||
<buttonCell key="cell" type="push" title="Publish" bezelStyle="rounded" alignment="center" borderStyle="border" imageScaling="proportionallyDown" inset="2" id="PyO-G6-Sfr">
|
||||
<behavior key="behavior" pushIn="YES" lightByBackground="YES" lightByGray="YES"/>
|
||||
<font key="font" metaFont="system"/>
|
||||
</buttonCell>
|
||||
<connections>
|
||||
<action selector="publishOrStop:" target="dhX-nT-Doa" id="X3P-mQ-808"/>
|
||||
</connections>
|
||||
</button>
|
||||
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="8gp-R8-K0u">
|
||||
<rect key="frame" x="425" y="226" width="39" height="25"/>
|
||||
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="beY-wH-sGF">
|
||||
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
|
||||
<font key="font" metaFont="menu"/>
|
||||
<menu key="menu" id="45T-aX-cgp"/>
|
||||
</popUpButtonCell>
|
||||
<connections>
|
||||
<action selector="selectCamera:" target="dhX-nT-Doa" id="F16-Nj-MQi"/>
|
||||
</connections>
|
||||
</popUpButton>
|
||||
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="EUe-gL-Kfw">
|
||||
<rect key="frame" x="20" y="20" width="300" height="21"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="300" id="zUM-Oo-BRX"/>
|
||||
</constraints>
|
||||
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="nJx-eh-DMW">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
</textField>
|
||||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstAttribute="trailing" secondItem="Ko8-3C-ims" secondAttribute="trailing" constant="20" id="3qF-0K-uUh"/>
|
||||
<constraint firstAttribute="trailing" secondItem="8gp-R8-K0u" secondAttribute="trailing" constant="20" id="gxy-va-ciW"/>
|
||||
<constraint firstAttribute="bottom" secondItem="Ko8-3C-ims" secondAttribute="bottom" constant="20" id="jvL-RJ-ajb"/>
|
||||
<constraint firstItem="Ko8-3C-ims" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="EUe-gL-Kfw" secondAttribute="trailing" constant="8" symbolic="YES" id="nK8-te-3iT"/>
|
||||
<constraint firstItem="EUe-gL-Kfw" firstAttribute="leading" secondItem="fUT-bB-KBi" secondAttribute="leading" constant="20" id="pPn-nM-lBg"/>
|
||||
<constraint firstAttribute="bottom" secondItem="EUe-gL-Kfw" secondAttribute="bottom" constant="20" id="pW0-88-rWT"/>
|
||||
<constraint firstItem="8gp-R8-K0u" firstAttribute="top" secondItem="fUT-bB-KBi" secondAttribute="top" constant="20" id="rAd-HY-7d6"/>
|
||||
</constraints>
|
||||
</view>
|
||||
<connections>
|
||||
<outlet property="cameraPopUpButton" destination="8gp-R8-K0u" id="Y2e-oa-Q5i"/>
|
||||
<outlet property="urlField" destination="EUe-gL-Kfw" id="v14-Ru-i7A"/>
|
||||
</connections>
|
||||
</viewController>
|
||||
<customObject id="ZWx-by-4If" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="75" y="655"/>
|
||||
</scene>
|
||||
<!--Preference View Controller-->
|
||||
<scene sceneID="R4z-ix-pWI">
|
||||
<objects>
|
||||
<viewController storyboardIdentifier="PreferenceViewController" id="9vk-iW-BZX" customClass="PreferenceViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
|
||||
<view key="view" wantsLayer="YES" id="OQz-nx-Hf5">
|
||||
<rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="muK-Fu-lNp">
|
||||
<rect key="frame" x="16" y="419" width="300" height="21"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="300" id="onn-Pa-e0H"/>
|
||||
</constraints>
|
||||
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="kM2-u6-oC5">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
<connections>
|
||||
<outlet property="delegate" destination="9vk-iW-BZX" id="TTT-Ka-3O4"/>
|
||||
</connections>
|
||||
</textField>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="y43-vh-quB">
|
||||
<rect key="frame" x="14" y="448" width="73" height="16"/>
|
||||
<textFieldCell key="cell" lineBreakMode="clipping" title="RTMP URL:" id="a0O-iB-hpy">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="labelColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
</textField>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="WSq-ak-6dS">
|
||||
<rect key="frame" x="14" y="395" width="87" height="16"/>
|
||||
<textFieldCell key="cell" lineBreakMode="clipping" title="StreamName:" id="2cu-1b-UVj">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="labelColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
</textField>
|
||||
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="YII-qB-iiW">
|
||||
<rect key="frame" x="16" y="366" width="300" height="21"/>
|
||||
<constraints>
|
||||
<constraint firstAttribute="width" constant="300" id="03D-Ul-Uui"/>
|
||||
</constraints>
|
||||
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="Jsz-le-a4U">
|
||||
<font key="font" metaFont="system"/>
|
||||
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
|
||||
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
|
||||
</textFieldCell>
|
||||
<connections>
|
||||
<outlet property="delegate" destination="9vk-iW-BZX" id="tw1-fJ-VcE"/>
|
||||
</connections>
|
||||
</textField>
|
||||
</subviews>
|
||||
<constraints>
|
||||
<constraint firstItem="y43-vh-quB" firstAttribute="top" secondItem="OQz-nx-Hf5" secondAttribute="top" constant="16" id="14X-aF-lac"/>
|
||||
<constraint firstItem="YII-qB-iiW" firstAttribute="top" secondItem="WSq-ak-6dS" secondAttribute="bottom" constant="8" id="MBC-BB-Xt7"/>
|
||||
<constraint firstItem="WSq-ak-6dS" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="Nk6-TS-ym8"/>
|
||||
<constraint firstItem="WSq-ak-6dS" firstAttribute="top" secondItem="muK-Fu-lNp" secondAttribute="bottom" constant="8" id="VIc-O8-hqJ"/>
|
||||
<constraint firstItem="muK-Fu-lNp" firstAttribute="top" secondItem="y43-vh-quB" secondAttribute="bottom" constant="8" id="Vri-fn-xBI"/>
|
||||
<constraint firstItem="muK-Fu-lNp" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="WQs-OA-h3N"/>
|
||||
<constraint firstItem="YII-qB-iiW" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="ZX6-KR-rH8"/>
|
||||
<constraint firstItem="y43-vh-quB" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="aNe-eB-guK"/>
|
||||
<constraint firstAttribute="trailing" relation="greaterThanOrEqual" secondItem="y43-vh-quB" secondAttribute="trailing" constant="20" symbolic="YES" id="keo-Ad-GB2"/>
|
||||
</constraints>
|
||||
</view>
|
||||
<connections>
|
||||
<outlet property="streamNameField" destination="YII-qB-iiW" id="uPq-i5-fmx"/>
|
||||
<outlet property="urlField" destination="muK-Fu-lNp" id="3hV-RH-2Gx"/>
|
||||
</connections>
|
||||
</viewController>
|
||||
<customObject id="ARY-aJ-9uG" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
|
||||
</objects>
|
||||
<point key="canvasLocation" x="-131" y="109"/>
|
||||
</scene>
|
||||
</scenes>
|
||||
</document>
|
||||
|
|
|
@ -12,28 +12,38 @@ extension NSPopUpButton {
|
|||
}
|
||||
}
|
||||
|
||||
final class MainViewController: NSViewController {
|
||||
var rtmpConnection = RTMPConnection()
|
||||
var rtmpStream: RTMPStream!
|
||||
|
||||
var httpService = HLSService(
|
||||
domain: "local", type: HTTPService.type, name: "", port: HTTPService.defaultPort
|
||||
)
|
||||
var httpStream = HTTPStream()
|
||||
|
||||
final class CameraPublishViewController: NSViewController {
|
||||
@IBOutlet private weak var lfView: MTHKView!
|
||||
@IBOutlet private weak var audioPopUpButton: NSPopUpButton!
|
||||
@IBOutlet private weak var cameraPopUpButton: NSPopUpButton!
|
||||
@IBOutlet private weak var urlField: NSTextField!
|
||||
@IBOutlet private weak var segmentedControl: NSSegmentedControl!
|
||||
|
||||
private var currentStream: NetStream? {
|
||||
willSet {
|
||||
currentStream?.attachCamera(nil)
|
||||
currentStream?.attachMultiCamera(nil)
|
||||
currentStream?.attachAudio(nil)
|
||||
}
|
||||
didSet {
|
||||
currentStream?.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
||||
currentStream?.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
||||
}
|
||||
}
|
||||
private var rtmpConnection = RTMPConnection()
|
||||
private lazy var rtmpStream: RTMPStream = {
|
||||
let rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
|
||||
return rtmpStream
|
||||
}()
|
||||
private var httpService = HLSService(
|
||||
domain: "local", type: HTTPService.type, name: "", port: HTTPService.defaultPort
|
||||
)
|
||||
private var httpStream = HTTPStream()
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
|
||||
|
||||
urlField.stringValue = Preference.defaultInstance.uri ?? ""
|
||||
|
||||
audioPopUpButton?.present(mediaType: .audio)
|
||||
cameraPopUpButton?.present(mediaType: .video)
|
||||
}
|
||||
|
@ -42,7 +52,13 @@ final class MainViewController: NSViewController {
|
|||
super.viewWillAppear()
|
||||
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
||||
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
||||
var devices = AVCaptureDevice.devices(for: .video)
|
||||
devices.removeFirst()
|
||||
if let device = devices.first {
|
||||
rtmpStream.attachMultiCamera(device)
|
||||
}
|
||||
lfView?.attachStream(rtmpStream)
|
||||
currentStream = rtmpStream
|
||||
}
|
||||
|
||||
// swiftlint:disable block_based_kvo
|
||||
|
@ -62,14 +78,12 @@ final class MainViewController: NSViewController {
|
|||
// Publish
|
||||
if sender.title == "Publish" {
|
||||
sender.title = "Stop"
|
||||
|
||||
// Optional. If you don't specify; the frame size will be the current H264Encoder default of 480x272
|
||||
// rtmpStream.videoSettings = [
|
||||
// .profileLevel: kVTProfileLevel_H264_High_AutoLevel,
|
||||
// .width: 1920,
|
||||
// .height: 1280,
|
||||
// ]
|
||||
|
||||
segmentedControl.isEnabled = false
|
||||
switch segmentedControl.selectedSegment {
|
||||
case 0:
|
||||
|
@ -106,51 +120,27 @@ final class MainViewController: NSViewController {
|
|||
}
|
||||
|
||||
@IBAction private func mirror(_ sender: AnyObject) {
|
||||
rtmpStream.videoCapture(for: 0)?.isVideoMirrored.toggle()
|
||||
currentStream?.videoCapture(for: 0)?.isVideoMirrored.toggle()
|
||||
}
|
||||
|
||||
@IBAction private func selectAudio(_ sender: AnyObject) {
|
||||
let device: AVCaptureDevice? = DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)
|
||||
switch segmentedControl.selectedSegment {
|
||||
case 0:
|
||||
rtmpStream.attachAudio(device)
|
||||
httpStream.attachAudio(nil)
|
||||
case 1:
|
||||
rtmpStream.attachAudio(nil)
|
||||
httpStream.attachAudio(device)
|
||||
default:
|
||||
break
|
||||
}
|
||||
let device = DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)
|
||||
currentStream?.attachAudio(device)
|
||||
}
|
||||
|
||||
@IBAction private func selectCamera(_ sender: AnyObject) {
|
||||
let device: AVCaptureDevice? = DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video)
|
||||
switch segmentedControl.selectedSegment {
|
||||
case 0:
|
||||
rtmpStream.attachCamera(device)
|
||||
httpStream.attachCamera(nil)
|
||||
case 1:
|
||||
rtmpStream.attachCamera(nil)
|
||||
httpStream.attachCamera(device)
|
||||
default:
|
||||
break
|
||||
}
|
||||
let device = DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video)
|
||||
currentStream?.attachCamera(device)
|
||||
}
|
||||
|
||||
@IBAction private func modeChanged(_ sender: NSSegmentedControl) {
|
||||
switch sender.selectedSegment {
|
||||
case 0:
|
||||
httpStream.attachAudio(nil)
|
||||
httpStream.attachCamera(nil)
|
||||
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
||||
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
||||
currentStream = rtmpStream
|
||||
lfView.attachStream(rtmpStream)
|
||||
urlField.stringValue = Preference.defaultInstance.uri ?? ""
|
||||
case 1:
|
||||
rtmpStream.attachAudio(nil)
|
||||
rtmpStream.attachCamera(nil)
|
||||
httpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
||||
httpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
||||
currentStream = httpStream
|
||||
lfView.attachStream(httpStream)
|
||||
urlField.stringValue = "http://{ipAddress}:8080/hello/playlist.m3u8"
|
||||
default:
|
||||
|
@ -169,7 +159,7 @@ final class MainViewController: NSViewController {
|
|||
logger.info(data)
|
||||
switch code {
|
||||
case RTMPConnection.Code.connectSuccess.rawValue:
|
||||
rtmpStream?.publish(Preference.defaultInstance.streamName)
|
||||
rtmpStream.publish(Preference.defaultInstance.streamName)
|
||||
default:
|
||||
break
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
import Foundation
|
||||
|
||||
extension NSObject {
|
||||
class var className: String {
|
||||
return "\(self)"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
|
||||
extension NSStoryboard.Name {
|
||||
static let main: NSStoryboard.Name = "Main"
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
|
||||
extension NSViewController {
|
||||
class var identifier: NSStoryboard.SceneIdentifier {
|
||||
return className
|
||||
}
|
||||
|
||||
class func getUIViewController() -> NSViewController {
|
||||
let storyboard = NSStoryboard(name: .main, bundle: Bundle.main)
|
||||
return storyboard.instantiateController(withIdentifier: identifier) as! NSViewController
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
|
||||
final class MainSplitViewController: NSSplitViewController {
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
|
||||
final class MenuViewController: NSViewController {
|
||||
@IBOutlet private weak var tableView: NSTableView! {
|
||||
didSet {
|
||||
}
|
||||
}
|
||||
|
||||
struct Menu {
|
||||
let title: String
|
||||
let factory: () -> NSViewController
|
||||
}
|
||||
|
||||
private lazy var menus: [Menu] = {
|
||||
var menus: [Menu] = [
|
||||
.init(title: "Publish Test", factory: { CameraPublishViewController.getUIViewController() }),
|
||||
.init(title: "RTMP Playback Test", factory: { RTMPPlaybackViewController.getUIViewController() })
|
||||
]
|
||||
menus.append(.init(title: "SCStream Publish Test", factory: { SCStreamPublishViewController.getUIViewController() }))
|
||||
menus.append(.init(title: "Preference", factory: { PreferenceViewController.getUIViewController() }))
|
||||
return menus
|
||||
}()
|
||||
|
||||
override func viewDidAppear() {
|
||||
super.viewDidAppear()
|
||||
let indexSet = NSIndexSet(index: 0)
|
||||
tableView.selectRowIndexes(indexSet as IndexSet, byExtendingSelection: false)
|
||||
}
|
||||
}
|
||||
|
||||
extension MenuViewController: NSTableViewDataSource {
|
||||
func numberOfRows(in tableView: NSTableView) -> Int {
|
||||
return menus.count
|
||||
}
|
||||
|
||||
func tableViewSelectionDidChange(_ notification: Notification) {
|
||||
guard tableView.selectedRow != -1 else {
|
||||
return
|
||||
}
|
||||
guard let splitViewController = parent as? NSSplitViewController else {
|
||||
return
|
||||
}
|
||||
splitViewController.splitViewItems[1] = NSSplitViewItem(viewController: menus[tableView.selectedRow].factory())
|
||||
}
|
||||
}
|
||||
|
||||
extension MenuViewController: NSTableViewDelegate {
|
||||
func tableView(_ tableView: NSTableView, viewFor tableColumn: NSTableColumn?, row: Int) -> NSView? {
|
||||
guard let identifier = tableColumn?.identifier, let cellView = tableView.makeView(withIdentifier: identifier, owner: self) as? NSTableCellView else {
|
||||
return nil
|
||||
}
|
||||
cellView.textField?.stringValue = menus[row].title
|
||||
return cellView
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
|
||||
final class PreferenceViewController: NSViewController {
|
||||
@IBOutlet private weak var urlField: NSTextField!
|
||||
@IBOutlet private weak var streamNameField: NSTextField!
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
urlField.stringValue = Preference.defaultInstance.uri ?? ""
|
||||
streamNameField.stringValue = Preference.defaultInstance.streamName ?? ""
|
||||
}
|
||||
}
|
||||
|
||||
extension PreferenceViewController: NSTextFieldDelegate {
|
||||
func controlTextDidChange(_ obj: Notification) {
|
||||
guard let textFile = obj.object as? NSTextField else {
|
||||
return
|
||||
}
|
||||
if textFile == urlField {
|
||||
Preference.defaultInstance.uri = textFile.stringValue
|
||||
}
|
||||
if textFile == streamNameField {
|
||||
Preference.defaultInstance.streamName = textFile.stringValue
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
import HaishinKit
|
||||
|
||||
final class RTMPPlaybackViewController: NSViewController {
|
||||
@IBOutlet private weak var lfView: MTHKView!
|
||||
private var rtmpConnection = RTMPConnection()
|
||||
private var rtmpStream: RTMPStream!
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
|
||||
rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
lfView.attachStream(rtmpStream)
|
||||
}
|
||||
|
||||
@IBAction private func didTappedPlayback(_ button: NSButton) {
|
||||
if button.title == "Stop" {
|
||||
rtmpConnection.close()
|
||||
button.title = "Playback"
|
||||
} else {
|
||||
if let uri = Preference.defaultInstance.uri {
|
||||
rtmpConnection.connect(uri)
|
||||
button.title = "Stop"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
private func rtmpStatusHandler(_ notification: Notification) {
|
||||
let e = Event.from(notification)
|
||||
guard
|
||||
let data: ASObject = e.data as? ASObject,
|
||||
let code: String = data["code"] as? String else {
|
||||
return
|
||||
}
|
||||
logger.info(data)
|
||||
switch code {
|
||||
case RTMPConnection.Code.connectSuccess.rawValue:
|
||||
rtmpStream?.play(Preference.defaultInstance.streamName)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
import AppKit
|
||||
import Foundation
|
||||
import HaishinKit
|
||||
#if canImport(ScreenCaptureKit)
|
||||
import ScreenCaptureKit
|
||||
#endif
|
||||
|
||||
class SCStreamPublishViewController: NSViewController {
|
||||
@IBOutlet private weak var cameraPopUpButton: NSPopUpButton!
|
||||
@IBOutlet private weak var urlField: NSTextField!
|
||||
|
||||
private var currentStream: NetStream?
|
||||
private var rtmpConnection = RTMPConnection()
|
||||
private lazy var rtmpStream: RTMPStream = {
|
||||
let rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
return rtmpStream
|
||||
}()
|
||||
|
||||
private var _stream: Any?
|
||||
|
||||
@available(macOS 12.3, *)
|
||||
private var stream: SCStream? {
|
||||
get {
|
||||
_stream as? SCStream
|
||||
}
|
||||
set {
|
||||
_stream = newValue
|
||||
Task {
|
||||
try? newValue?.addStreamOutput(rtmpStream, type: .screen, sampleHandlerQueue: DispatchQueue.main)
|
||||
if #available(macOS 13.0, *) {
|
||||
try? newValue?.addStreamOutput(rtmpStream, type: .audio, sampleHandlerQueue: DispatchQueue.main)
|
||||
}
|
||||
try? await newValue?.startCapture()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
urlField.stringValue = Preference.defaultInstance.uri ?? ""
|
||||
if #available(macOS 12.3, *) {
|
||||
Task {
|
||||
try await SCShareableContent.current.windows.forEach {
|
||||
cameraPopUpButton.addItem(withTitle: $0.owningApplication?.applicationName ?? "")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func viewWillAppear() {
|
||||
super.viewWillAppear()
|
||||
currentStream = rtmpStream
|
||||
}
|
||||
|
||||
@IBAction private func selectCamera(_ sender: AnyObject) {
|
||||
if #available(macOS 12.3, *) {
|
||||
Task {
|
||||
guard let window = try? await SCShareableContent.current.windows.first(where: { $0.owningApplication?.applicationName == cameraPopUpButton.title }) else {
|
||||
return
|
||||
}
|
||||
let filter = SCContentFilter(desktopIndependentWindow: window)
|
||||
let configuration = SCStreamConfiguration()
|
||||
configuration.width = Int(window.frame.width)
|
||||
configuration.height = Int(window.frame.height)
|
||||
configuration.showsCursor = true
|
||||
self.stream = SCStream(filter: filter, configuration: configuration, delegate: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@IBAction private func publishOrStop(_ sender: NSButton) {
|
||||
// Publish
|
||||
if sender.title == "Publish" {
|
||||
sender.title = "Stop"
|
||||
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
|
||||
rtmpConnection.connect(Preference.defaultInstance.uri ?? "")
|
||||
return
|
||||
}
|
||||
// Stop
|
||||
sender.title = "Publish"
|
||||
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
|
||||
rtmpConnection.close()
|
||||
return
|
||||
}
|
||||
|
||||
@objc
|
||||
private func rtmpStatusHandler(_ notification: Notification) {
|
||||
let e = Event.from(notification)
|
||||
guard
|
||||
let data: ASObject = e.data as? ASObject,
|
||||
let code: String = data["code"] as? String else {
|
||||
return
|
||||
}
|
||||
logger.info(data)
|
||||
switch code {
|
||||
case RTMPConnection.Code.connectSuccess.rawValue:
|
||||
rtmpStream.publish(Preference.defaultInstance.streamName)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
1
Gemfile
1
Gemfile
|
@ -2,6 +2,5 @@ source 'https://rubygems.org'
|
|||
|
||||
gem 'cocoapods'
|
||||
gem 'fastlane'
|
||||
gem 'jazzy'
|
||||
gem 'synx'
|
||||
|
||||
|
|
100
Gemfile.lock
100
Gemfile.lock
|
@ -1,15 +1,14 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
CFPropertyList (3.0.5)
|
||||
CFPropertyList (3.0.6)
|
||||
rexml
|
||||
activesupport (6.1.7)
|
||||
activesupport (7.0.4.3)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 1.6, < 2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
zeitwerk (~> 2.3)
|
||||
addressable (2.8.1)
|
||||
addressable (2.8.4)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
algoliasearch (1.27.5)
|
||||
httpclient (~> 2.8, >= 2.8.3)
|
||||
|
@ -17,16 +16,16 @@ GEM
|
|||
artifactory (3.0.15)
|
||||
atomos (0.1.3)
|
||||
aws-eventstream (1.2.0)
|
||||
aws-partitions (1.684.0)
|
||||
aws-sdk-core (3.168.4)
|
||||
aws-partitions (1.771.0)
|
||||
aws-sdk-core (3.173.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
aws-partitions (~> 1, >= 1.651.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.61.0)
|
||||
aws-sdk-kms (1.64.0)
|
||||
aws-sdk-core (~> 3, >= 3.165.0)
|
||||
aws-sigv4 (~> 1.1)
|
||||
aws-sdk-s3 (1.117.2)
|
||||
aws-sdk-s3 (1.122.0)
|
||||
aws-sdk-core (~> 3, >= 3.165.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.4)
|
||||
|
@ -35,15 +34,15 @@ GEM
|
|||
babosa (1.0.4)
|
||||
claide (1.1.0)
|
||||
clamp (0.6.5)
|
||||
cocoapods (1.11.3)
|
||||
cocoapods (1.12.1)
|
||||
addressable (~> 2.8)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
cocoapods-core (= 1.11.3)
|
||||
cocoapods-core (= 1.12.1)
|
||||
cocoapods-deintegrate (>= 1.0.3, < 2.0)
|
||||
cocoapods-downloader (>= 1.4.0, < 2.0)
|
||||
cocoapods-downloader (>= 1.6.0, < 2.0)
|
||||
cocoapods-plugins (>= 1.0.0, < 2.0)
|
||||
cocoapods-search (>= 1.0.0, < 2.0)
|
||||
cocoapods-trunk (>= 1.4.0, < 2.0)
|
||||
cocoapods-trunk (>= 1.6.0, < 2.0)
|
||||
cocoapods-try (>= 1.1.0, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
escape (~> 0.0.4)
|
||||
|
@ -51,10 +50,10 @@ GEM
|
|||
gh_inspector (~> 1.0)
|
||||
molinillo (~> 0.8.0)
|
||||
nap (~> 1.0)
|
||||
ruby-macho (>= 1.0, < 3.0)
|
||||
ruby-macho (>= 2.3.0, < 3.0)
|
||||
xcodeproj (>= 1.21.0, < 2.0)
|
||||
cocoapods-core (1.11.3)
|
||||
activesupport (>= 5.0, < 7)
|
||||
cocoapods-core (1.12.1)
|
||||
activesupport (>= 5.0, < 8)
|
||||
addressable (~> 2.8)
|
||||
algoliasearch (~> 1.0)
|
||||
concurrent-ruby (~> 1.1)
|
||||
|
@ -77,7 +76,7 @@ GEM
|
|||
colorize (0.8.1)
|
||||
commander (4.6.0)
|
||||
highline (~> 2.0.0)
|
||||
concurrent-ruby (1.1.10)
|
||||
concurrent-ruby (1.2.2)
|
||||
declarative (0.0.20)
|
||||
digest-crc (0.6.4)
|
||||
rake (>= 12.0.0, < 14.0.0)
|
||||
|
@ -88,8 +87,8 @@ GEM
|
|||
escape (0.0.4)
|
||||
ethon (0.16.0)
|
||||
ffi (>= 1.15.0)
|
||||
excon (0.95.0)
|
||||
faraday (1.10.2)
|
||||
excon (0.99.0)
|
||||
faraday (1.10.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
|
@ -117,8 +116,8 @@ GEM
|
|||
faraday-retry (1.0.3)
|
||||
faraday_middleware (1.2.0)
|
||||
faraday (~> 1.0)
|
||||
fastimage (2.2.6)
|
||||
fastlane (2.211.0)
|
||||
fastimage (2.2.7)
|
||||
fastlane (2.213.0)
|
||||
CFPropertyList (>= 2.3, < 4.0.0)
|
||||
addressable (>= 2.8, < 3.0.0)
|
||||
artifactory (~> 3.0)
|
||||
|
@ -142,7 +141,7 @@ GEM
|
|||
json (< 3.0.0)
|
||||
jwt (>= 2.1.0, < 3)
|
||||
mini_magick (>= 4.9.4, < 5.0.0)
|
||||
multipart-post (~> 2.0.0)
|
||||
multipart-post (>= 2.0.0, < 3.0.0)
|
||||
naturally (~> 2.2)
|
||||
optparse (~> 0.1.1)
|
||||
plist (>= 3.1.0, < 4.0.0)
|
||||
|
@ -161,9 +160,9 @@ GEM
|
|||
fourflusher (2.3.1)
|
||||
fuzzy_match (2.0.4)
|
||||
gh_inspector (1.1.3)
|
||||
google-apis-androidpublisher_v3 (0.32.0)
|
||||
google-apis-core (>= 0.9.1, < 2.a)
|
||||
google-apis-core (0.9.2)
|
||||
google-apis-androidpublisher_v3 (0.42.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-core (0.11.0)
|
||||
addressable (~> 2.5, >= 2.5.1)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
httpclient (>= 2.8.1, < 3.a)
|
||||
|
@ -172,10 +171,10 @@ GEM
|
|||
retriable (>= 2.0, < 4.a)
|
||||
rexml
|
||||
webrick
|
||||
google-apis-iamcredentials_v1 (0.16.0)
|
||||
google-apis-core (>= 0.9.1, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.12.0)
|
||||
google-apis-core (>= 0.9.1, < 2.a)
|
||||
google-apis-iamcredentials_v1 (0.17.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.13.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-storage_v1 (0.19.0)
|
||||
google-apis-core (>= 0.9.0, < 2.a)
|
||||
google-cloud-core (1.6.0)
|
||||
|
@ -183,7 +182,7 @@ GEM
|
|||
google-cloud-errors (~> 1.0)
|
||||
google-cloud-env (1.6.0)
|
||||
faraday (>= 0.17.3, < 3.0)
|
||||
google-cloud-errors (1.3.0)
|
||||
google-cloud-errors (1.3.1)
|
||||
google-cloud-storage (1.44.0)
|
||||
addressable (~> 2.8)
|
||||
digest-crc (~> 0.4)
|
||||
|
@ -192,7 +191,7 @@ GEM
|
|||
google-cloud-core (~> 1.6)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
mini_mime (~> 1.0)
|
||||
googleauth (1.3.0)
|
||||
googleauth (1.5.2)
|
||||
faraday (>= 0.17.3, < 3.a)
|
||||
jwt (>= 1.4, < 3.0)
|
||||
memoist (~> 0.16)
|
||||
|
@ -203,42 +202,27 @@ GEM
|
|||
http-cookie (1.0.5)
|
||||
domain_name (~> 0.5)
|
||||
httpclient (2.8.3)
|
||||
i18n (1.12.0)
|
||||
i18n (1.13.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jazzy (0.14.3)
|
||||
cocoapods (~> 1.5)
|
||||
mustache (~> 1.1)
|
||||
open4 (~> 1.3)
|
||||
redcarpet (~> 3.4)
|
||||
rexml (~> 3.2)
|
||||
rouge (>= 2.0.6, < 4.0)
|
||||
sassc (~> 2.1)
|
||||
sqlite3 (~> 1.3)
|
||||
xcinvoke (~> 0.3.0)
|
||||
jmespath (1.6.2)
|
||||
json (2.6.3)
|
||||
jwt (2.6.0)
|
||||
liferaft (0.0.6)
|
||||
jwt (2.7.0)
|
||||
memoist (0.16.2)
|
||||
mini_magick (4.12.0)
|
||||
mini_mime (1.1.2)
|
||||
mini_portile2 (2.8.1)
|
||||
minitest (5.16.3)
|
||||
minitest (5.18.0)
|
||||
molinillo (0.8.0)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.0.0)
|
||||
mustache (1.1.1)
|
||||
multipart-post (2.3.0)
|
||||
nanaimo (0.3.0)
|
||||
nap (1.1.0)
|
||||
naturally (2.2.1)
|
||||
netrc (0.11.0)
|
||||
open4 (1.3.4)
|
||||
optparse (0.1.1)
|
||||
os (1.1.4)
|
||||
plist (3.6.0)
|
||||
plist (3.7.0)
|
||||
public_suffix (4.0.7)
|
||||
rake (13.0.6)
|
||||
redcarpet (3.5.1)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
|
@ -249,19 +233,15 @@ GEM
|
|||
ruby-macho (2.5.1)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (2.3.2)
|
||||
sassc (2.4.0)
|
||||
ffi (~> 1.9)
|
||||
security (0.1.3)
|
||||
signet (0.17.0)
|
||||
addressable (~> 2.8)
|
||||
faraday (>= 0.17.5, < 3.a)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
multi_json (~> 1.10)
|
||||
simctl (1.6.8)
|
||||
simctl (1.6.10)
|
||||
CFPropertyList
|
||||
naturally
|
||||
sqlite3 (1.5.4)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
synx (0.2.1)
|
||||
clamp (~> 0.6)
|
||||
colorize (~> 0.7)
|
||||
|
@ -276,17 +256,15 @@ GEM
|
|||
tty-cursor (~> 0.7)
|
||||
typhoeus (1.4.0)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (2.0.5)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uber (0.1.0)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.8.2)
|
||||
unicode-display_width (1.8.0)
|
||||
webrick (1.7.0)
|
||||
webrick (1.8.1)
|
||||
word_wrap (1.0.0)
|
||||
xcinvoke (0.3.0)
|
||||
liferaft (~> 0.0.6)
|
||||
xcodeproj (1.22.0)
|
||||
CFPropertyList (>= 2.3.3, < 4.0)
|
||||
atomos (~> 0.1.3)
|
||||
|
@ -298,7 +276,6 @@ GEM
|
|||
rouge (~> 2.0.7)
|
||||
xcpretty-travis-formatter (1.0.1)
|
||||
xcpretty (~> 0.2, >= 0.0.7)
|
||||
zeitwerk (2.6.6)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
@ -306,8 +283,7 @@ PLATFORMS
|
|||
DEPENDENCIES
|
||||
cocoapods
|
||||
fastlane
|
||||
jazzy
|
||||
synx
|
||||
|
||||
BUNDLED WITH
|
||||
2.1.4
|
||||
2.2.33
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Pod::Spec.new do |s|
|
||||
|
||||
s.name = "HaishinKit"
|
||||
s.version = "1.4.0"
|
||||
s.version = "1.5.2"
|
||||
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS and tvOS."
|
||||
s.swift_version = "5.7"
|
||||
|
||||
|
@ -25,6 +25,6 @@ Pod::Spec.new do |s|
|
|||
s.tvos.source_files = "Platforms/tvOS/*.{h,swift}"
|
||||
|
||||
s.source_files = "Sources/**/*.swift"
|
||||
s.dependency 'Logboard', '~> 2.3.0'
|
||||
s.dependency 'Logboard', '~> 2.3.1'
|
||||
|
||||
end
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -39,6 +39,7 @@
|
|||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
enableGPUValidationMode = "1"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
enableGPUValidationMode = "1"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Scheme
|
||||
LastUpgradeVersion = "1420"
|
||||
version = "1.3">
|
||||
<BuildAction
|
||||
parallelizeBuildables = "YES"
|
||||
buildImplicitDependencies = "YES">
|
||||
<BuildActionEntries>
|
||||
<BuildActionEntry
|
||||
buildForTesting = "YES"
|
||||
buildForRunning = "YES"
|
||||
buildForProfiling = "YES"
|
||||
buildForArchiving = "YES"
|
||||
buildForAnalyzing = "YES">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "29C932931CD76FD300283FC5"
|
||||
BuildableName = "Example macOS.app"
|
||||
BlueprintName = "Example macOS"
|
||||
ReferencedContainer = "container:HaishinKit.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildActionEntry>
|
||||
</BuildActionEntries>
|
||||
</BuildAction>
|
||||
<TestAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES">
|
||||
<Testables>
|
||||
</Testables>
|
||||
</TestAction>
|
||||
<LaunchAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
launchStyle = "0"
|
||||
useCustomWorkingDirectory = "NO"
|
||||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "29C932931CD76FD300283FC5"
|
||||
BuildableName = "Example macOS.app"
|
||||
BlueprintName = "Example macOS"
|
||||
ReferencedContainer = "container:HaishinKit.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES"
|
||||
savedToolIdentifier = ""
|
||||
useCustomWorkingDirectory = "NO"
|
||||
debugDocumentVersioning = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "29C932931CD76FD300283FC5"
|
||||
BuildableName = "Example macOS.app"
|
||||
BlueprintName = "Example macOS"
|
||||
ReferencedContainer = "container:HaishinKit.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</ProfileAction>
|
||||
<AnalyzeAction
|
||||
buildConfiguration = "Debug">
|
||||
</AnalyzeAction>
|
||||
<ArchiveAction
|
||||
buildConfiguration = "Release"
|
||||
revealArchiveInOrganizer = "YES">
|
||||
</ArchiveAction>
|
||||
</Scheme>
|
|
@ -26,14 +26,24 @@
|
|||
</TestAction>
|
||||
<LaunchAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
selectedDebuggerIdentifier = ""
|
||||
selectedLauncherIdentifier = "Xcode.IDEFoundation.Launcher.PosixSpawn"
|
||||
launchStyle = "0"
|
||||
useCustomWorkingDirectory = "NO"
|
||||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "29C9327D1CD76FB800283FC5"
|
||||
BuildableName = "Example iOS.app"
|
||||
BlueprintName = "Example iOS"
|
||||
ReferencedContainer = "container:HaishinKit.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
|
@ -41,6 +51,15 @@
|
|||
savedToolIdentifier = ""
|
||||
useCustomWorkingDirectory = "NO"
|
||||
debugDocumentVersioning = "YES">
|
||||
<MacroExpansion>
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "29C9327D1CD76FB800283FC5"
|
||||
BuildableName = "Example iOS.app"
|
||||
BlueprintName = "Example iOS"
|
||||
ReferencedContainer = "container:HaishinKit.xcodeproj">
|
||||
</BuildableReference>
|
||||
</MacroExpansion>
|
||||
</ProfileAction>
|
||||
<AnalyzeAction
|
||||
buildConfiguration = "Debug">
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// swift-tools-version:5.1
|
||||
// swift-tools-version:5.5
|
||||
// The swift-tools-version declares the minimum version of Swift required to build this package.
|
||||
import PackageDescription
|
||||
|
||||
|
@ -7,32 +7,36 @@ let package = Package(
|
|||
platforms: [
|
||||
.iOS(.v11),
|
||||
.tvOS(.v11),
|
||||
.macOS(.v10_13)
|
||||
.macOS(.v10_13),
|
||||
.macCatalyst(.v14)
|
||||
],
|
||||
products: [
|
||||
.library(name: "HaishinKit", targets: ["HaishinKit"])
|
||||
],
|
||||
dependencies: [
|
||||
.package(url: "https://github.com/shogo4405/Logboard.git", from: "2.3.0")
|
||||
.package(url: "https://github.com/shogo4405/Logboard.git", from: "2.3.1")
|
||||
],
|
||||
targets: [
|
||||
.target(name: "SwiftPMSupport"),
|
||||
.target(name: "HaishinKit",
|
||||
dependencies: ["Logboard", "SwiftPMSupport"],
|
||||
path: "Sources",
|
||||
exclude: [
|
||||
"Platforms/iOS/Info.plist",
|
||||
"Platforms/macOS/Info.plist",
|
||||
"Platforms/tvOS/Info.plist"
|
||||
],
|
||||
sources: [
|
||||
"Codec",
|
||||
"Extension",
|
||||
"FLV",
|
||||
"HTTP",
|
||||
"ISO",
|
||||
"Media",
|
||||
"MP4",
|
||||
"MPEG",
|
||||
"Net",
|
||||
"RTMP",
|
||||
"Util",
|
||||
"Platforms",
|
||||
"TS"
|
||||
"Platforms"
|
||||
])
|
||||
]
|
||||
)
|
||||
|
|
|
@ -102,6 +102,9 @@ extension HKView: NetStreamDrawable {
|
|||
|
||||
stream.lockQueue.async {
|
||||
stream.mixer.videoIO.drawable = self
|
||||
DispatchQueue.main.async {
|
||||
self.layer.session = stream.mixer.session
|
||||
}
|
||||
self.currentStream = stream
|
||||
stream.mixer.startRunning()
|
||||
}
|
||||
|
|
|
@ -3,9 +3,7 @@
|
|||
import AppKit
|
||||
import AVFoundation
|
||||
|
||||
/**
|
||||
* A view that displays a video content of a NetStream object which uses AVCaptureVideoPreviewLayer.
|
||||
*/
|
||||
/// A view that displays a video content of a NetStream object which uses AVCaptureVideoPreviewLayer.
|
||||
public class HKView: NSView {
|
||||
/// The view’s background color.
|
||||
public static var defaultBackgroundColor: NSColor = .black
|
||||
|
|
213
README.md
213
README.md
|
@ -1,11 +1,13 @@
|
|||
# HaishinKit for iOS, macOS, tvOS, and [Android](https://github.com/shogo4405/HaishinKit.kt).
|
||||
[](http://cocoapods.org/pods/HaishinKit)
|
||||

|
||||
[](http://cocoapods.org/pods/HaishinKit)
|
||||
[](https://github.com/shogo4405/HaishinKit.swift/stargazers)
|
||||
[](https://github.com/shogo4405/HaishinKit.swift/releases/latest)
|
||||
[](https://swiftpackageindex.com/shogo4405/HaishinKit.swift)
|
||||
[](https://swiftpackageindex.com/shogo4405/HaishinKit.swift)
|
||||
[](https://raw.githubusercontent.com/shogo4405/HaishinKit.swift/master/LICENSE.md)
|
||||
|
||||
* Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS.
|
||||
* [API Documentation](https://shogo4405.github.io/HaishinKit.swift/)
|
||||
* README.md contains unreleased content, which can be tested on the main branch.
|
||||
* [API Documentation](https://shogo4405.github.io/HaishinKit.swift/documentation/haishinkit)
|
||||
|
||||
<p align="center">
|
||||
<strong>Sponsored with 💖 by</strong><br />
|
||||
|
@ -26,21 +28,27 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
|
|||
* If you want to support e-mail based communication without GitHub.
|
||||
- Consulting fee is [$50](https://www.paypal.me/shogo4405/50USD)/1 incident. I'm able to response a few days.
|
||||
* [Discord chatroom](https://discord.com/invite/8nkshPnanr).
|
||||
* 日本語が分かる方は日本語でお願いします!
|
||||
* 日本語が分かる方は、日本語でのコミニケーションをお願いします!
|
||||
|
||||
## 💖 Sponsors
|
||||
<p align="center">
|
||||
<a href="https://streamlabs.com/" target="_blank"><img src="https://user-images.githubusercontent.com/810189/206836172-9c360977-ab6b-4eff-860b-82d0e7b06318.png" width="350px" alt="Streamlabs" /></a>
|
||||
</p>
|
||||
|
||||
## 🌏 Related projects
|
||||
Project name |Notes |License
|
||||
----------------|------------|--------------
|
||||
[SRTHaishinKit for iOS.](https://github.com/shogo4405/SRTHaishinKit.swift)|Camera and Microphone streaming library via SRT.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/SRTHaishinKit.swift/blob/master/LICENSE.md)
|
||||
[HaishinKit for Android.](https://github.com/shogo4405/HaishinKit.kt)|Camera and Microphone streaming library via RTMP for Android.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/HaishinKit.kt/blob/master/LICENSE.md)
|
||||
[HaishinKit for Flutter.](https://github.com/shogo4405/HaishinKit.dart)|Camera and Microphone streaming library via RTMP for Flutter.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/HaishinKit.dart/blob/master/LICENSE.md)
|
||||
|
||||
## 🎨 Features
|
||||
### RTMP
|
||||
- [x] Authentication
|
||||
- [x] Publish and Recording (H264/AAC)
|
||||
- [x] Publish and Recording
|
||||
- [x] _Playback (Beta)_
|
||||
- [x] Adaptive bitrate streaming
|
||||
- [x] Handling (see also [#126](/../../issues/126))
|
||||
- [x] Automatic drop frames
|
||||
- [x] Handling (see also [#1153](/../../issues/1153))
|
||||
- [ ] Action Message Format
|
||||
- [x] AMF0
|
||||
- [ ] AMF3
|
||||
|
@ -50,22 +58,50 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
|
|||
- [x] _Tunneled (RTMPT over SSL/TLS) (Technical Preview)_
|
||||
- [x] _RTMPT (Technical Preview)_
|
||||
- [x] ReplayKit Live as a Broadcast Upload Extension
|
||||
- [x] Supported codec
|
||||
- Audio
|
||||
- [x] AAC
|
||||
- Video
|
||||
- [x] H264/AVC
|
||||
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String`
|
||||
- [x] H265/HEVC ([Server-side support is required.](https://github.com/veovera/enhanced-rtmp/blob/main/enhanced-rtmp-v1.pdf))
|
||||
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_HEVC_Main_AutoLevel as String`
|
||||
|
||||
### HLS
|
||||
- [x] HTTPService
|
||||
- [x] HLS Publish
|
||||
|
||||
### Multi Camera
|
||||
Supports two camera video sources. A picture-in-picture display that shows the image of the secondary camera of the primary camera. Supports camera split display that displays horizontally and vertically.
|
||||
|
||||
|Picture-In-Picture|Split|
|
||||
|:-:|:-:|
|
||||
|<img width="1382" alt="" src="https://user-images.githubusercontent.com/810189/210043421-ceb18cb7-9b50-43fa-a0a2-8b92b78d9df1.png">|<img width="1382" alt="" src="https://user-images.githubusercontent.com/810189/210043687-a99f21b6-28b2-4170-96de-6c814debd84d.png">|
|
||||
|
||||
```swift
|
||||
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
|
||||
stream.attachCamera(back)
|
||||
|
||||
if #available(iOS 13.0, *) {
|
||||
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
|
||||
stream.attachMultiCamera(front)
|
||||
}
|
||||
```
|
||||
|
||||
### Rendering
|
||||
|-|[HKView](https://shogo4405.github.io/HaishinKit.swift/Classes/HKView.html)|[PiPHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/PiPHKView.html)|[MTHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/MTHKView.html)|
|
||||
|Features|[HKView](https://shogo4405.github.io/HaishinKit.swift/Classes/HKView.html)|[PiPHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/PiPHKView.html)|[MTHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/MTHKView.html)|
|
||||
|-|:---:|:---:|:---:|
|
||||
|Engine|AVCaptureVideoPreviewLayer|AVSampleBufferDisplayLayer|Metal|
|
||||
|Publish|○|◯|○|
|
||||
|Playback|×|◯|○|
|
||||
|VisualEffect|×|◯|○|
|
||||
|Publish|✔|✔|✔|
|
||||
|Playback|<br />|✔|✔|
|
||||
|VisualEffect|<br />|✔|✔|
|
||||
|PictureInPicture|<br />|✔|<br />|
|
||||
|MultiCamera|<br />|✔|✔|
|
||||
|
||||
### Others
|
||||
- [x] [Support multitasking camera access.](https://developer.apple.com/documentation/avfoundation/capture_setup/accessing_the_camera_while_multitasking)
|
||||
- [x] _Support tvOS 11.0+ (Technical Preview)_
|
||||
- tvOS can't publish Camera and Microphone. Available playback feature.
|
||||
- tvOS can't use camera and microphone devices.
|
||||
- [x] Hardware acceleration for H264 video encoding, AAC audio encoding
|
||||
- [x] Support "Allow app extension API only" option
|
||||
- [ ] ~~Support GPUImage framework (~> 0.5.12)~~
|
||||
|
@ -75,9 +111,8 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
|
|||
## 🌏 Requirements
|
||||
|-|iOS|OSX|tvOS|Xcode|Swift|
|
||||
|:----:|:----:|:----:|:----:|:----:|:----:|
|
||||
|1.5.0+|11.0+|10.13+|10.2+|14.3+|5.7+|
|
||||
|1.4.0+|11.0+|10.13+|10.2+|14.0+|5.7+|
|
||||
|1.3.0+|11.0+|10.13+|10.2+|14.0+|5.7+|
|
||||
|1.2.0+|9.0+|10.11+|10.2+|13.0+|5.5+|
|
||||
|
||||
## 🐾 Examples
|
||||
Examples project are available for iOS with UIKit, iOS with SwiftUI, macOS and tvOS.
|
||||
|
@ -108,7 +143,7 @@ source 'https://github.com/CocoaPods/Specs.git'
|
|||
use_frameworks!
|
||||
|
||||
def import_pods
|
||||
pod 'HaishinKit', '~> 1.4.0
|
||||
pod 'HaishinKit', '~> 1.5.2
|
||||
end
|
||||
|
||||
target 'Your Target' do
|
||||
|
@ -118,21 +153,15 @@ end
|
|||
```
|
||||
### Carthage
|
||||
```
|
||||
github "shogo4405/HaishinKit.swift" ~> 1.4.0
|
||||
github "shogo4405/HaishinKit.swift" ~> 1.5.2
|
||||
```
|
||||
### Swift Package Manager
|
||||
```
|
||||
https://github.com/shogo4405/HaishinKit.swift
|
||||
```
|
||||
|
||||
## 💠 Donation
|
||||
- GitHub Sponsors
|
||||
- https://github.com/sponsors/shogo4405
|
||||
- Paypal
|
||||
- https://www.paypal.me/shogo4405
|
||||
|
||||
## 🔧 Prerequisites
|
||||
Make sure you setup and activate your AVAudioSession.
|
||||
Make sure you setup and activate your AVAudioSession iOS.
|
||||
```swift
|
||||
import AVFoundation
|
||||
let session = AVAudioSession.sharedInstance()
|
||||
|
@ -147,26 +176,24 @@ do {
|
|||
## 📓 RTMP Usage
|
||||
Real Time Messaging Protocol (RTMP).
|
||||
```swift
|
||||
let rtmpConnection = RTMPConnection()
|
||||
let rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
|
||||
let connection = RTMPConnection()
|
||||
let stream = RTMPStream(connection: rtmpConnection)
|
||||
stream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
|
||||
// print(error)
|
||||
}
|
||||
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) { error in
|
||||
stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) { error in
|
||||
// print(error)
|
||||
}
|
||||
|
||||
let hkView = HKView(frame: view.bounds)
|
||||
let hkView = MTHKView(frame: view.bounds)
|
||||
hkView.videoGravity = AVLayerVideoGravity.resizeAspectFill
|
||||
hkView.attachStream(rtmpStream)
|
||||
hkView.attachStream(stream)
|
||||
|
||||
// add ViewController#view
|
||||
view.addSubview(hkView)
|
||||
|
||||
rtmpConnection.connect("rtmp://localhost/appName/instanceName")
|
||||
rtmpStream.publish("streamName")
|
||||
// if you want to record a stream.
|
||||
// rtmpStream.publish("streamName", type: .localRecord)
|
||||
connection.connect("rtmp://localhost/appName/instanceName")
|
||||
stream.publish("streamName")
|
||||
```
|
||||
|
||||
### RTMP URL Format
|
||||
|
@ -184,29 +211,35 @@ rtmpStream.publish("streamName")
|
|||
|
||||
### Settings
|
||||
```swift
|
||||
var rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
var stream = RTMPStream(connection: rtmpConnection)
|
||||
|
||||
rtmpStream.captureSettings = [
|
||||
.fps: 30, // FPS
|
||||
.sessionPreset: AVCaptureSession.Preset.medium, // input video width/height
|
||||
// .isVideoMirrored: false,
|
||||
// .continuousAutofocus: false, // use camera autofocus mode
|
||||
// .continuousExposure: false, // use camera exposure mode
|
||||
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
|
||||
]
|
||||
rtmpStream.audioSettings = [
|
||||
.muted: false, // mute audio
|
||||
.bitrate: 32 * 1000,
|
||||
]
|
||||
rtmpStream.videoSettings = [
|
||||
.width: 640, // video output width
|
||||
.height: 360, // video output height
|
||||
.bitrate: 160 * 1000, // video output bitrate
|
||||
.profileLevel: kVTProfileLevel_H264_Baseline_3_1, // H264 Profile require "import VideoToolbox"
|
||||
.maxKeyFrameIntervalDuration: 2, // key frame / sec
|
||||
]
|
||||
// "0" means the same of input
|
||||
rtmpStream.recorderSettings = [
|
||||
stream.frameRate = 30
|
||||
stream.sessionPreset = AVCaptureSession.Preset.medium
|
||||
|
||||
/// Specifies the video capture settings.
|
||||
stream.videoCapture(for: 0).isVideoMirrored = false
|
||||
stream.videoCapture(for: 0).preferredVideoStabilizationMode = .auto
|
||||
// rtmpStream.videoCapture(for: 1).isVideoMirrored = false
|
||||
|
||||
// Specifies the audio codec settings.
|
||||
stream.audioSettings = AudioCodecSettings(
|
||||
bitRate: 64 * 1000
|
||||
)
|
||||
|
||||
// Specifies the video codec settings.
|
||||
stream.videoSettings = VideoCodecSettings(
|
||||
videoSize: .init(width: 854, height: 480),
|
||||
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
|
||||
bitRate: 640 * 1000,
|
||||
maxKeyFrameIntervalDuration: 2,
|
||||
scalingMode: .trim,
|
||||
bitRateMode: .average,
|
||||
allowFrameReordering: nil,
|
||||
isHardwareEncoderEnabled: true
|
||||
)
|
||||
|
||||
// Specifies the recording settings. 0" means the same of input.
|
||||
stream.startRecording([
|
||||
AVMediaType.audio: [
|
||||
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
|
||||
AVSampleRateKey: 0,
|
||||
|
@ -224,49 +257,93 @@ rtmpStream.recorderSettings = [
|
|||
AVVideoAverageBitRateKey: 512000
|
||||
]
|
||||
*/
|
||||
],
|
||||
]
|
||||
])
|
||||
|
||||
// 2nd arguemnt set false
|
||||
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false)
|
||||
stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false)
|
||||
```
|
||||
|
||||
```swift
|
||||
// picrure in picrure settings.
|
||||
stream.multiCamCaptureSettings = MultiCamCaptureSetting(
|
||||
mode: .pip,
|
||||
cornerRadius: 16.0,
|
||||
regionOfInterest: .init(
|
||||
origin: CGPoint(x: 16, y: 16),
|
||||
size: .init(width: 160, height: 160)
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
```swift
|
||||
// split settings.
|
||||
stream.multiCamCaptureSettings = MultiCamCaptureSetting(
|
||||
mode: .split(direction: .east),
|
||||
cornerRadius: 0.0,
|
||||
regionOfInterest: .init(
|
||||
origin: .zero,
|
||||
size: .zero
|
||||
)
|
||||
)
|
||||
```
|
||||
### Authentication
|
||||
```swift
|
||||
var rtmpConnection = RTMPConnection()
|
||||
rtmpConnection.connect("rtmp://username:password@localhost/appName/instanceName")
|
||||
var connection = RTMPConnection()
|
||||
connection.connect("rtmp://username:password@localhost/appName/instanceName")
|
||||
```
|
||||
|
||||
### Screen Capture
|
||||
```swift
|
||||
// iOS
|
||||
let screen = IOUIScreenCaptureUnit(shared: UIApplication.shared)
|
||||
screen.delegate = rtmpStream
|
||||
screen.delegate = stream
|
||||
screen.startRunning()
|
||||
|
||||
// macOS
|
||||
rtmpStream.attachScreen(AVCaptureScreenInput(displayID: CGMainDisplayID()))
|
||||
stream.attachScreen(AVCaptureScreenInput(displayID: CGMainDisplayID()))
|
||||
```
|
||||
|
||||
## 📓 HTTP Usage
|
||||
HTTP Live Streaming (HLS). Your iPhone/Mac become a IP Camera. Basic snipet. You can see http://ip.address:8080/hello/playlist.m3u8
|
||||
```swift
|
||||
var httpStream = HTTPStream()
|
||||
httpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
|
||||
httpStream.attachAudio(AVCaptureDevice.default(for: .audio))
|
||||
httpStream.publish("hello")
|
||||
var stream = HTTPStream()
|
||||
stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
|
||||
stream.attachAudio(AVCaptureDevice.default(for: .audio))
|
||||
stream.publish("hello")
|
||||
|
||||
var hkView = HKView(frame: view.bounds)
|
||||
var hkView = MTHKView(frame: view.bounds)
|
||||
hkView.attachStream(httpStream)
|
||||
|
||||
var httpService = HLSService(domain: "", type: "_http._tcp", name: "HaishinKit", port: 8080)
|
||||
httpService.addHTTPStream(stream)
|
||||
httpService.startRunning()
|
||||
httpService.addHTTPStream(httpStream)
|
||||
|
||||
// add ViewController#view
|
||||
view.addSubview(hkView)
|
||||
```
|
||||
|
||||
## 💠 Sponsorship
|
||||
Looking for sponsors. Sponsoring I will enable us to:
|
||||
- Purchase smartphones or peripheral devices for testing purposes.
|
||||
- Pay for testing on a specific streaming service or for testing on mobile lines.
|
||||
- Potentially private use to continue the OSS development
|
||||
|
||||
If you use any of our libraries for work, see if your employers would be interested in sponsorship. I have some special offers. I would greatly appreciate. Thank you.
|
||||
- If you request I will note your name product our README.
|
||||
- If you mention on a discussion, an issue or pull request that you are sponsoring us I will prioritise helping you even higher.
|
||||
|
||||
スポンサーを募集しています。利用用途としては、
|
||||
- テスト目的で、スマートフォンの購入や周辺機器の購入を行います。
|
||||
- 特定のストリーミングサービスへのテストの支払いや、モバイル回線でのテストの支払いに利用します。
|
||||
- 著書のOSS開発を継続的に行う為に私的に利用する可能性もあります。
|
||||
|
||||
このライブラリーを仕事で継続的に利用している場合は、ぜひ。雇用主に、スポンサーに興味がないか確認いただけると幸いです。いくつか特典を用意しています。
|
||||
- README.mdへの企業ロゴの掲載
|
||||
- IssueやPull Requestの優先的な対応
|
||||
|
||||
[Sponsorship](https://github.com/sponsors/shogo4405)
|
||||
|
||||
## 📖 Reference
|
||||
* Adobe’s Real Time Messaging Protocol
|
||||
* http://www.adobe.com/content/dam/Adobe/en/devnet/rtmp/pdf/rtmp_specification_1.0.pdf
|
||||
|
|
|
@ -4,10 +4,12 @@ import AVFoundation
|
|||
* The interface a AudioCodec uses to inform its delegate.
|
||||
*/
|
||||
public protocol AudioCodecDelegate: AnyObject {
|
||||
/// Tells the receiver to set a formatDescription.
|
||||
func audioCodec(_ codec: AudioCodec, didSet formatDescription: CMFormatDescription?)
|
||||
/// Tells the receiver to output a encoded or decoded sampleBuffer.
|
||||
func audioCodec(_ codec: AudioCodec, didOutput sample: UnsafeMutableAudioBufferListPointer, presentationTimeStamp: CMTime)
|
||||
/// Tells the receiver to output an AVAudioFormat.
|
||||
func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat)
|
||||
/// Tells the receiver to output an encoded or decoded CMSampleBuffer.
|
||||
func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime)
|
||||
/// Tells the receiver to occured an error.
|
||||
func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error)
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
|
@ -16,302 +18,149 @@ public protocol AudioCodecDelegate: AnyObject {
|
|||
* - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
|
||||
*/
|
||||
public class AudioCodec {
|
||||
enum Error: Swift.Error {
|
||||
case setPropertyError(id: AudioConverterPropertyID, status: OSStatus)
|
||||
/// The AudioCodec error domain codes.
|
||||
public enum Error: Swift.Error {
|
||||
case failedToCreate(from: AVAudioFormat, to: AVAudioFormat)
|
||||
case failedToConvert(error: NSError)
|
||||
}
|
||||
|
||||
/**
|
||||
* The audio encoding or decoding options.
|
||||
*/
|
||||
public enum Option: String, KeyPathRepresentable {
|
||||
/// Specifies the bitRate of audio output.
|
||||
case bitrate
|
||||
/// Specifies the sampleRate of audio output.
|
||||
case sampleRate
|
||||
/// The bitRate of audio output.
|
||||
case actualBitrate
|
||||
|
||||
public var keyPath: AnyKeyPath {
|
||||
switch self {
|
||||
case .bitrate:
|
||||
return \AudioCodec.bitrate
|
||||
case .sampleRate:
|
||||
return \AudioCodec.sampleRate
|
||||
case .actualBitrate:
|
||||
return \AudioCodec.actualBitrate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The default minimum bitrate for an AudioCodec, value is 8000.
|
||||
public static let minimumBitrate: UInt32 = 8 * 1000
|
||||
/// The default bitrate for an AudioCidec, the value is 32000.
|
||||
public static let defaultBitrate: UInt32 = 32 * 1000
|
||||
/// The default channels for an AudioCodec, the value is 0 means according to a input source.
|
||||
public static let defaultChannels: UInt32 = 0
|
||||
/// The default sampleRate for an AudioCodec, the value is 0 means according to a input source.
|
||||
public static let defaultSampleRate: Double = 0
|
||||
/// The default mamimu buffers for an AudioCodec.
|
||||
public static let defaultMaximumBuffers: Int = 1
|
||||
|
||||
/// Specifies the output format.
|
||||
public var destination: AudioCodecFormat = .aac
|
||||
/// Specifies the delegate.
|
||||
public weak var delegate: AudioCodecDelegate?
|
||||
public weak var delegate: (any AudioCodecDelegate)?
|
||||
/// This instance is running to process(true) or not(false).
|
||||
public private(set) var isRunning: Atomic<Bool> = .init(false)
|
||||
/// Specifies the settings for audio codec.
|
||||
public var settings: Setting<AudioCodec, Option> = [:] {
|
||||
public var settings: AudioCodecSettings = .default {
|
||||
didSet {
|
||||
settings.observer = self
|
||||
}
|
||||
}
|
||||
private static let numSamples: Int = 1024
|
||||
|
||||
var bitrate: UInt32 = AudioCodec.defaultBitrate {
|
||||
didSet {
|
||||
guard bitrate != oldValue else {
|
||||
return
|
||||
}
|
||||
lockQueue.async {
|
||||
if let format = self._inDestinationFormat {
|
||||
self.setBitrateUntilNoErr(self.bitrate * format.mChannelsPerFrame)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
var sampleRate: Double = AudioCodec.defaultSampleRate
|
||||
var actualBitrate: UInt32 = AudioCodec.defaultBitrate {
|
||||
didSet {
|
||||
logger.info(actualBitrate)
|
||||
}
|
||||
}
|
||||
var channels: UInt32 = AudioCodec.defaultChannels
|
||||
var formatDescription: CMFormatDescription? {
|
||||
didSet {
|
||||
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
|
||||
return
|
||||
}
|
||||
logger.info(formatDescription.debugDescription)
|
||||
delegate?.audioCodec(self, didSet: formatDescription)
|
||||
}
|
||||
}
|
||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioConverter.lock")
|
||||
var inSourceFormat: AudioStreamBasicDescription? {
|
||||
didSet {
|
||||
guard let inSourceFormat = inSourceFormat, inSourceFormat != oldValue else {
|
||||
return
|
||||
}
|
||||
_converter = nil
|
||||
formatDescription = nil
|
||||
_inDestinationFormat = nil
|
||||
logger.info("\(String(describing: inSourceFormat))")
|
||||
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : AudioCodec.defaultMaximumBuffers
|
||||
currentAudioBuffer = AudioCodecBuffer(inSourceFormat, numSamples: AudioCodec.numSamples)
|
||||
settings.apply(audioConverter, oldValue: oldValue)
|
||||
}
|
||||
}
|
||||
var effects: Set<AudioEffect> = []
|
||||
private let numSamples = AudioCodec.numSamples
|
||||
private var maximumBuffers: Int = AudioCodec.defaultMaximumBuffers
|
||||
private var currentAudioBuffer = AudioCodecBuffer(AudioStreamBasicDescription(mSampleRate: 0, mFormatID: 0, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0))
|
||||
private var _inDestinationFormat: AudioStreamBasicDescription?
|
||||
private var inDestinationFormat: AudioStreamBasicDescription {
|
||||
get {
|
||||
if _inDestinationFormat == nil {
|
||||
_inDestinationFormat = destination.audioStreamBasicDescription(inSourceFormat, sampleRate: sampleRate, channels: channels)
|
||||
CMAudioFormatDescriptionCreate(
|
||||
allocator: kCFAllocatorDefault,
|
||||
asbd: &_inDestinationFormat!,
|
||||
layoutSize: 0,
|
||||
layout: nil,
|
||||
magicCookieSize: 0,
|
||||
magicCookie: nil,
|
||||
extensions: nil,
|
||||
formatDescriptionOut: &formatDescription
|
||||
)
|
||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCodec.lock")
|
||||
var inSourceFormat: AudioStreamBasicDescription? {
|
||||
didSet {
|
||||
guard var inSourceFormat, inSourceFormat != oldValue else {
|
||||
return
|
||||
}
|
||||
return _inDestinationFormat!
|
||||
}
|
||||
set {
|
||||
_inDestinationFormat = newValue
|
||||
outputBuffers.removeAll()
|
||||
ringBuffer = .init(&inSourceFormat)
|
||||
audioConverter = makeAudioConverter(&inSourceFormat)
|
||||
}
|
||||
}
|
||||
private var ringBuffer: AudioCodecRingBuffer?
|
||||
private var outputBuffers: [AVAudioBuffer] = []
|
||||
private var audioConverter: AVAudioConverter?
|
||||
|
||||
private var audioStreamPacketDescription = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: 0)
|
||||
private let inputDataProc: AudioConverterComplexInputDataProc = {(_: AudioConverterRef, ioNumberDataPackets: UnsafeMutablePointer<UInt32>, ioData: UnsafeMutablePointer<AudioBufferList>, outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?, inUserData: UnsafeMutableRawPointer?) in
|
||||
Unmanaged<AudioCodec>.fromOpaque(inUserData!).takeUnretainedValue().onInputDataForAudioConverter(
|
||||
ioNumberDataPackets,
|
||||
ioData: ioData,
|
||||
outDataPacketDescription: outDataPacketDescription
|
||||
)
|
||||
}
|
||||
|
||||
/// Create an AudioCodec instance.
|
||||
public init() {
|
||||
settings.observer = self
|
||||
}
|
||||
|
||||
private var _converter: AudioConverterRef?
|
||||
private var converter: AudioConverterRef {
|
||||
var status: OSStatus = noErr
|
||||
if _converter == nil {
|
||||
var inClassDescriptions = destination.inClassDescriptions
|
||||
status = AudioConverterNewSpecific(
|
||||
&inSourceFormat!,
|
||||
&inDestinationFormat,
|
||||
UInt32(inClassDescriptions.count),
|
||||
&inClassDescriptions,
|
||||
&_converter
|
||||
)
|
||||
setBitrateUntilNoErr(bitrate * inDestinationFormat.mChannelsPerFrame)
|
||||
}
|
||||
if status != noErr {
|
||||
logger.warn("\(status)")
|
||||
}
|
||||
return _converter!
|
||||
}
|
||||
|
||||
/// Encodes bytes data.
|
||||
public func encodeBytes(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
|
||||
/// Append a CMSampleBuffer.
|
||||
public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
|
||||
guard isRunning.value else {
|
||||
currentAudioBuffer.clear()
|
||||
return
|
||||
}
|
||||
currentAudioBuffer.write(bytes, count: count, presentationTimeStamp: presentationTimeStamp)
|
||||
convert(numSamples * Int(destination.bytesPerFrame), presentationTimeStamp: presentationTimeStamp)
|
||||
}
|
||||
|
||||
/// Encodes a CMSampleBuffer.
|
||||
public func encodeSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
|
||||
guard let format = sampleBuffer.formatDescription, CMSampleBufferDataIsReady(sampleBuffer) && isRunning.value else {
|
||||
currentAudioBuffer.clear()
|
||||
switch settings.format {
|
||||
case .aac:
|
||||
guard let audioConverter, let ringBuffer else {
|
||||
return
|
||||
}
|
||||
let numSamples = ringBuffer.appendSampleBuffer(sampleBuffer, offset: offset)
|
||||
if ringBuffer.isReady {
|
||||
guard let buffer = getOutputBuffer() else {
|
||||
return
|
||||
}
|
||||
|
||||
inSourceFormat = format.streamBasicDescription?.pointee
|
||||
|
||||
do {
|
||||
let numSamples = try currentAudioBuffer.write(sampleBuffer, offset: offset)
|
||||
if currentAudioBuffer.isReady {
|
||||
for effect in effects {
|
||||
effect.execute(currentAudioBuffer.input, format: inSourceFormat)
|
||||
effect.execute(ringBuffer.current, presentationTimeStamp: ringBuffer.presentationTimeStamp)
|
||||
}
|
||||
convert(currentAudioBuffer.maxLength, presentationTimeStamp: currentAudioBuffer.presentationTimeStamp)
|
||||
var error: NSError?
|
||||
audioConverter.convert(to: buffer, error: &error) { _, status in
|
||||
status.pointee = .haveData
|
||||
return ringBuffer.current
|
||||
}
|
||||
if let error {
|
||||
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
|
||||
} else {
|
||||
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: ringBuffer.presentationTimeStamp)
|
||||
}
|
||||
ringBuffer.next()
|
||||
}
|
||||
if offset + numSamples < sampleBuffer.numSamples {
|
||||
encodeSampleBuffer(sampleBuffer, offset: offset + numSamples)
|
||||
appendSampleBuffer(sampleBuffer, offset: offset + numSamples)
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
case .pcm:
|
||||
var offset = 0
|
||||
var presentationTimeStamp = sampleBuffer.presentationTimeStamp
|
||||
for i in 0..<sampleBuffer.numSamples {
|
||||
guard let buffer = makeInputBuffer() as? AVAudioCompressedBuffer else {
|
||||
continue
|
||||
}
|
||||
let sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, at: i)
|
||||
let byteCount = sampleSize - ADTSHeader.size
|
||||
buffer.packetDescriptions?.pointee = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: UInt32(byteCount))
|
||||
buffer.packetCount = 1
|
||||
buffer.byteLength = UInt32(byteCount)
|
||||
if let blockBuffer = sampleBuffer.dataBuffer {
|
||||
CMBlockBufferCopyDataBytes(blockBuffer, atOffset: offset + ADTSHeader.size, dataLength: byteCount, destination: buffer.data)
|
||||
appendAudioBuffer(buffer, presentationTimeStamp: presentationTimeStamp)
|
||||
presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(1024), timescale: sampleBuffer.presentationTimeStamp.timescale))
|
||||
offset += sampleSize
|
||||
}
|
||||
|
||||
@inline(__always)
|
||||
private func convert(_ dataBytesSize: Int, presentationTimeStamp: CMTime) {
|
||||
var finished = false
|
||||
repeat {
|
||||
var ioOutputDataPacketSize: UInt32 = destination.packetSize
|
||||
|
||||
let maximumBuffers = destination.maximumBuffers((channels == 0) ? inSourceFormat?.mChannelsPerFrame ?? 1 : channels)
|
||||
let outOutputData: UnsafeMutableAudioBufferListPointer = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
for i in 0..<maximumBuffers {
|
||||
outOutputData[i].mNumberChannels = inDestinationFormat.mChannelsPerFrame
|
||||
outOutputData[i].mDataByteSize = UInt32(dataBytesSize)
|
||||
outOutputData[i].mData = UnsafeMutableRawPointer.allocate(byteCount: dataBytesSize, alignment: 0)
|
||||
}
|
||||
|
||||
let status = AudioConverterFillComplexBuffer(
|
||||
converter,
|
||||
inputDataProc,
|
||||
Unmanaged.passUnretained(self).toOpaque(),
|
||||
&ioOutputDataPacketSize,
|
||||
outOutputData.unsafeMutablePointer,
|
||||
nil
|
||||
)
|
||||
|
||||
switch status {
|
||||
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
|
||||
case noErr, kAudioConverterErr_InvalidInputSize:
|
||||
delegate?.audioCodec(self, didOutput: outOutputData, presentationTimeStamp: presentationTimeStamp)
|
||||
case -1:
|
||||
if destination == .pcm {
|
||||
delegate?.audioCodec(self, didOutput: outOutputData, presentationTimeStamp: presentationTimeStamp)
|
||||
}
|
||||
finished = true
|
||||
default:
|
||||
finished = true
|
||||
}
|
||||
|
||||
for i in 0..<outOutputData.count {
|
||||
if let mData = outOutputData[i].mData {
|
||||
free(mData)
|
||||
}
|
||||
}
|
||||
|
||||
free(outOutputData.unsafeMutablePointer)
|
||||
} while !finished
|
||||
}
|
||||
|
||||
func invalidate() {
|
||||
lockQueue.async {
|
||||
self.inSourceFormat = nil
|
||||
self._inDestinationFormat = nil
|
||||
if let converter: AudioConverterRef = self._converter {
|
||||
AudioConverterDispose(converter)
|
||||
}
|
||||
self._converter = nil
|
||||
}
|
||||
}
|
||||
|
||||
func onInputDataForAudioConverter(
|
||||
_ ioNumberDataPackets: UnsafeMutablePointer<UInt32>,
|
||||
ioData: UnsafeMutablePointer<AudioBufferList>,
|
||||
outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?) -> OSStatus {
|
||||
guard currentAudioBuffer.isReady else {
|
||||
ioNumberDataPackets.pointee = 0
|
||||
return -1
|
||||
}
|
||||
|
||||
memcpy(ioData, currentAudioBuffer.input.unsafePointer, currentAudioBuffer.listSize)
|
||||
if destination == .pcm {
|
||||
ioNumberDataPackets.pointee = 1
|
||||
} else {
|
||||
ioNumberDataPackets.pointee = UInt32(numSamples)
|
||||
}
|
||||
|
||||
if destination == .pcm && outDataPacketDescription != nil {
|
||||
audioStreamPacketDescription.mDataByteSize = currentAudioBuffer.input.unsafePointer.pointee.mBuffers.mDataByteSize
|
||||
outDataPacketDescription?.pointee = withUnsafeMutablePointer(to: &audioStreamPacketDescription) { $0 }
|
||||
}
|
||||
|
||||
currentAudioBuffer.clear()
|
||||
|
||||
return noErr
|
||||
}
|
||||
|
||||
private func setBitrateUntilNoErr(_ bitrate: UInt32) {
|
||||
do {
|
||||
try setProperty(id: kAudioConverterEncodeBitRate, data: bitrate * inDestinationFormat.mChannelsPerFrame)
|
||||
actualBitrate = bitrate
|
||||
} catch {
|
||||
if Self.minimumBitrate < bitrate {
|
||||
setBitrateUntilNoErr(bitrate - Self.minimumBitrate)
|
||||
} else {
|
||||
actualBitrate = Self.minimumBitrate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func setProperty<T>(id: AudioConverterPropertyID, data: T) throws {
|
||||
guard let converter: AudioConverterRef = _converter else {
|
||||
func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
|
||||
guard isRunning.value, let audioConverter, let buffer = getOutputBuffer() else {
|
||||
return
|
||||
}
|
||||
let size = UInt32(MemoryLayout<T>.size)
|
||||
var buffer = data
|
||||
let status = AudioConverterSetProperty(converter, id, size, &buffer)
|
||||
guard status == 0 else {
|
||||
throw Error.setPropertyError(id: id, status: status)
|
||||
var error: NSError?
|
||||
audioConverter.convert(to: buffer, error: &error) { _, status in
|
||||
status.pointee = .haveData
|
||||
return audioBuffer
|
||||
}
|
||||
if let error {
|
||||
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
|
||||
} else {
|
||||
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: presentationTimeStamp)
|
||||
}
|
||||
}
|
||||
|
||||
func makeInputBuffer() -> AVAudioBuffer? {
|
||||
guard let inputFormat = audioConverter?.inputFormat else {
|
||||
return nil
|
||||
}
|
||||
switch inSourceFormat?.mFormatID {
|
||||
case kAudioFormatLinearPCM:
|
||||
return AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: 1024)
|
||||
default:
|
||||
return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024)
|
||||
}
|
||||
}
|
||||
|
||||
func releaseOutputBuffer(_ buffer: AVAudioBuffer) {
|
||||
outputBuffers.append(buffer)
|
||||
}
|
||||
|
||||
private func getOutputBuffer() -> AVAudioBuffer? {
|
||||
guard let outputFormat = audioConverter?.outputFormat else {
|
||||
return nil
|
||||
}
|
||||
if outputBuffers.isEmpty {
|
||||
return settings.format.makeAudioBuffer(outputFormat)
|
||||
}
|
||||
return outputBuffers.removeFirst()
|
||||
}
|
||||
|
||||
private func makeAudioConverter(_ inSourceFormat: inout AudioStreamBasicDescription) -> AVAudioConverter? {
|
||||
guard
|
||||
let inputFormat = AVAudioFormat(streamDescription: &inSourceFormat),
|
||||
let outputFormat = settings.format.makeAudioFormat(inSourceFormat) else {
|
||||
return nil
|
||||
}
|
||||
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
|
||||
settings.apply(converter, oldValue: nil)
|
||||
if converter == nil {
|
||||
delegate?.audioCodec(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat))
|
||||
} else {
|
||||
delegate?.audioCodec(self, didOutput: outputFormat)
|
||||
}
|
||||
return converter
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -319,20 +168,24 @@ extension AudioCodec: Running {
|
|||
// MARK: Running
|
||||
public func startRunning() {
|
||||
lockQueue.async {
|
||||
guard !self.isRunning.value else {
|
||||
return
|
||||
}
|
||||
if let audioConverter = self.audioConverter {
|
||||
self.delegate?.audioCodec(self, didOutput: audioConverter.outputFormat)
|
||||
}
|
||||
self.isRunning.mutate { $0 = true }
|
||||
}
|
||||
}
|
||||
|
||||
public func stopRunning() {
|
||||
lockQueue.async {
|
||||
if let convert: AudioQueueRef = self._converter {
|
||||
AudioConverterDispose(convert)
|
||||
self._converter = nil
|
||||
guard self.isRunning.value else {
|
||||
return
|
||||
}
|
||||
self.currentAudioBuffer.clear()
|
||||
self.inSourceFormat = nil
|
||||
self.formatDescription = nil
|
||||
self._inDestinationFormat = nil
|
||||
self.audioConverter = nil
|
||||
self.ringBuffer = nil
|
||||
self.isRunning.mutate { $0 = false }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
final class AudioCodecBuffer {
|
||||
enum Error: Swift.Error {
|
||||
case isReady
|
||||
case noBlockBuffer
|
||||
}
|
||||
|
||||
static let numSamples = 1024
|
||||
|
||||
let input: UnsafeMutableAudioBufferListPointer
|
||||
|
||||
var isReady: Bool {
|
||||
numSamples == index
|
||||
}
|
||||
|
||||
var maxLength: Int {
|
||||
numSamples * bytesPerFrame * numberChannels * maximumBuffers
|
||||
}
|
||||
|
||||
let listSize: Int
|
||||
|
||||
private var index = 0
|
||||
private var buffers: [Data]
|
||||
private var numSamples: Int
|
||||
private let bytesPerFrame: Int
|
||||
private let maximumBuffers: Int
|
||||
private let numberChannels: Int
|
||||
private let bufferList: UnsafeMutableAudioBufferListPointer
|
||||
private(set) var presentationTimeStamp: CMTime = .invalid
|
||||
|
||||
deinit {
|
||||
input.unsafeMutablePointer.deallocate()
|
||||
bufferList.unsafeMutablePointer.deallocate()
|
||||
}
|
||||
|
||||
init(_ inSourceFormat: AudioStreamBasicDescription, numSamples: Int = AudioCodecBuffer.numSamples) {
|
||||
self.numSamples = numSamples
|
||||
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||
bytesPerFrame = Int(inSourceFormat.mBytesPerFrame)
|
||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : 1
|
||||
listSize = AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers)
|
||||
input = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
bufferList = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
numberChannels = nonInterleaved ? 1 : Int(inSourceFormat.mChannelsPerFrame)
|
||||
let dataByteSize = numSamples * bytesPerFrame
|
||||
buffers = .init(repeating: .init(repeating: 0, count: numSamples * bytesPerFrame), count: maximumBuffers)
|
||||
input.unsafeMutablePointer.pointee.mNumberBuffers = UInt32(maximumBuffers)
|
||||
for i in 0..<maximumBuffers {
|
||||
input[i].mNumberChannels = UInt32(numberChannels)
|
||||
buffers[i].withUnsafeMutableBytes { pointer in
|
||||
input[i].mData = pointer.baseAddress
|
||||
}
|
||||
input[i].mDataByteSize = UInt32(dataByteSize)
|
||||
}
|
||||
}
|
||||
|
||||
func write(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
|
||||
numSamples = count
|
||||
index = count
|
||||
input.unsafeMutablePointer.pointee.mBuffers.mNumberChannels = 1
|
||||
input.unsafeMutablePointer.pointee.mBuffers.mData = bytes
|
||||
input.unsafeMutablePointer.pointee.mBuffers.mDataByteSize = UInt32(count)
|
||||
}
|
||||
|
||||
func write(_ sampleBuffer: CMSampleBuffer, offset: Int) throws -> Int {
|
||||
guard !isReady else {
|
||||
throw Error.isReady
|
||||
}
|
||||
|
||||
if presentationTimeStamp == .invalid {
|
||||
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
|
||||
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
|
||||
}
|
||||
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
|
||||
sampleBuffer,
|
||||
bufferListSizeNeededOut: nil,
|
||||
bufferListOut: bufferList.unsafeMutablePointer,
|
||||
bufferListSize: listSize,
|
||||
blockBufferAllocator: kCFAllocatorDefault,
|
||||
blockBufferMemoryAllocator: kCFAllocatorDefault,
|
||||
flags: 0,
|
||||
blockBufferOut: &blockBuffer
|
||||
)
|
||||
|
||||
guard blockBuffer != nil else {
|
||||
throw Error.noBlockBuffer
|
||||
}
|
||||
|
||||
let numSamples = min(self.numSamples - index, sampleBuffer.numSamples - offset)
|
||||
for i in 0..<maximumBuffers {
|
||||
guard let data = bufferList[i].mData else {
|
||||
continue
|
||||
}
|
||||
buffers[i].replaceSubrange(
|
||||
index * bytesPerFrame..<index * bytesPerFrame + numSamples * bytesPerFrame,
|
||||
with: data.advanced(by: offset * bytesPerFrame),
|
||||
count: numSamples * bytesPerFrame
|
||||
)
|
||||
}
|
||||
index += numSamples
|
||||
|
||||
return numSamples
|
||||
}
|
||||
|
||||
func muted() {
|
||||
for i in 0..<maximumBuffers {
|
||||
buffers[i].resetBytes(in: 0...)
|
||||
}
|
||||
}
|
||||
|
||||
func clear() {
|
||||
presentationTimeStamp = .invalid
|
||||
index = 0
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioCodecBuffer: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
import AudioToolbox
|
||||
|
||||
/// The type of the AudioCodec supports format.
|
||||
public enum AudioCodecFormat {
|
||||
/// The AAC format.
|
||||
case aac
|
||||
/// The PCM format.
|
||||
case pcm
|
||||
|
||||
var formatID: AudioFormatID {
|
||||
switch self {
|
||||
case .aac:
|
||||
return kAudioFormatMPEG4AAC
|
||||
case .pcm:
|
||||
return kAudioFormatLinearPCM
|
||||
}
|
||||
}
|
||||
|
||||
var formatFlags: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return UInt32(MPEG4ObjectID.AAC_LC.rawValue)
|
||||
case .pcm:
|
||||
return kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat
|
||||
}
|
||||
}
|
||||
|
||||
var framesPerPacket: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 1024
|
||||
case .pcm:
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
var packetSize: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 1
|
||||
case .pcm:
|
||||
return 1024
|
||||
}
|
||||
}
|
||||
|
||||
var bitsPerChannel: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return 32
|
||||
}
|
||||
}
|
||||
|
||||
var bytesPerPacket: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return (bitsPerChannel / 8)
|
||||
}
|
||||
}
|
||||
|
||||
var bytesPerFrame: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return (bitsPerChannel / 8)
|
||||
}
|
||||
}
|
||||
|
||||
var inClassDescriptions: [AudioClassDescription] {
|
||||
switch self {
|
||||
case .aac:
|
||||
#if os(iOS)
|
||||
return [
|
||||
AudioClassDescription(mType: kAudioEncoderComponentType, mSubType: kAudioFormatMPEG4AAC, mManufacturer: kAppleSoftwareAudioCodecManufacturer),
|
||||
AudioClassDescription(mType: kAudioEncoderComponentType, mSubType: kAudioFormatMPEG4AAC, mManufacturer: kAppleHardwareAudioCodecManufacturer)
|
||||
]
|
||||
#else
|
||||
return []
|
||||
#endif
|
||||
case .pcm:
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
func maximumBuffers(_ channel: UInt32) -> Int {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 1
|
||||
case .pcm:
|
||||
return Int(channel)
|
||||
}
|
||||
}
|
||||
|
||||
func audioStreamBasicDescription(_ inSourceFormat: AudioStreamBasicDescription?, sampleRate: Double, channels: UInt32) -> AudioStreamBasicDescription? {
|
||||
guard let inSourceFormat = inSourceFormat else {
|
||||
return nil
|
||||
}
|
||||
let destinationChannels: UInt32 = (channels == 0) ? inSourceFormat.mChannelsPerFrame : channels
|
||||
return AudioStreamBasicDescription(
|
||||
mSampleRate: sampleRate == 0 ? inSourceFormat.mSampleRate : sampleRate,
|
||||
mFormatID: formatID,
|
||||
mFormatFlags: formatFlags,
|
||||
mBytesPerPacket: bytesPerPacket,
|
||||
mFramesPerPacket: framesPerPacket,
|
||||
mBytesPerFrame: bytesPerFrame,
|
||||
mChannelsPerFrame: destinationChannels,
|
||||
mBitsPerChannel: bitsPerChannel,
|
||||
mReserved: 0
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
final class AudioCodecRingBuffer {
|
||||
enum Error: Swift.Error {
|
||||
case isReady
|
||||
case noBlockBuffer
|
||||
}
|
||||
|
||||
static let numSamples: UInt32 = 1024
|
||||
static let maxBuffers: Int = 6
|
||||
|
||||
var isReady: Bool {
|
||||
numSamples == index
|
||||
}
|
||||
|
||||
var current: AVAudioPCMBuffer {
|
||||
return buffers[cursor]
|
||||
}
|
||||
|
||||
private(set) var presentationTimeStamp: CMTime = .invalid
|
||||
private var index: Int = 0
|
||||
private var numSamples: Int
|
||||
private var format: AVAudioFormat
|
||||
private var buffers: [AVAudioPCMBuffer] = []
|
||||
private var cursor: Int = 0
|
||||
private var workingBuffer: AVAudioPCMBuffer
|
||||
private var maxBuffers: Int = AudioCodecRingBuffer.maxBuffers
|
||||
|
||||
init?(_ inSourceFormat: inout AudioStreamBasicDescription, numSamples: UInt32 = AudioCodecRingBuffer.numSamples) {
|
||||
guard
|
||||
inSourceFormat.mFormatID == kAudioFormatLinearPCM,
|
||||
let format = AVAudioFormat(streamDescription: &inSourceFormat),
|
||||
let workingBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numSamples) else {
|
||||
return nil
|
||||
}
|
||||
for _ in 0..<maxBuffers {
|
||||
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numSamples) else {
|
||||
return nil
|
||||
}
|
||||
buffer.frameLength = numSamples
|
||||
self.buffers.append(buffer)
|
||||
}
|
||||
self.format = format
|
||||
self.workingBuffer = workingBuffer
|
||||
self.numSamples = Int(numSamples)
|
||||
}
|
||||
|
||||
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int) -> Int {
|
||||
if isReady {
|
||||
return -1
|
||||
}
|
||||
if presentationTimeStamp == .invalid {
|
||||
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
|
||||
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
|
||||
}
|
||||
if offset == 0 {
|
||||
if workingBuffer.frameLength < sampleBuffer.numSamples {
|
||||
if let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) {
|
||||
self.workingBuffer = buffer
|
||||
}
|
||||
}
|
||||
workingBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples)
|
||||
CMSampleBufferCopyPCMDataIntoAudioBufferList(
|
||||
sampleBuffer,
|
||||
at: 0,
|
||||
frameCount: Int32(sampleBuffer.numSamples),
|
||||
into: workingBuffer.mutableAudioBufferList
|
||||
)
|
||||
}
|
||||
let numSamples = min(self.numSamples - index, Int(sampleBuffer.numSamples) - offset)
|
||||
if format.isInterleaved {
|
||||
let channelCount = Int(format.channelCount)
|
||||
switch format.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
memcpy(current.int16ChannelData?[0].advanced(by: index), workingBuffer.int16ChannelData?[0].advanced(by: offset), numSamples * 2 * channelCount)
|
||||
case .pcmFormatInt32:
|
||||
memcpy(current.int32ChannelData?[0].advanced(by: index), workingBuffer.int32ChannelData?[0].advanced(by: offset), numSamples * 4 * channelCount)
|
||||
case .pcmFormatFloat32:
|
||||
memcpy(current.floatChannelData?[0].advanced(by: index), workingBuffer.floatChannelData?[0].advanced(by: offset), numSamples * 4 * channelCount)
|
||||
default:
|
||||
break
|
||||
}
|
||||
} else {
|
||||
for i in 0..<Int(format.channelCount) {
|
||||
switch format.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
memcpy(current.int16ChannelData?[i].advanced(by: index), workingBuffer.int16ChannelData?[i].advanced(by: offset), numSamples * 2)
|
||||
case .pcmFormatInt32:
|
||||
memcpy(current.int32ChannelData?[i].advanced(by: index), workingBuffer.int32ChannelData?[i].advanced(by: offset), numSamples * 4)
|
||||
case .pcmFormatFloat32:
|
||||
memcpy(current.floatChannelData?[i].advanced(by: index), workingBuffer.floatChannelData?[i].advanced(by: offset), numSamples * 4)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
index += numSamples
|
||||
|
||||
return numSamples
|
||||
}
|
||||
|
||||
func muted() {
|
||||
if format.isInterleaved {
|
||||
let channelCount = Int(format.channelCount)
|
||||
switch format.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
current.int16ChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
|
||||
case .pcmFormatInt32:
|
||||
current.int32ChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
|
||||
case .pcmFormatFloat32:
|
||||
current.floatChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
|
||||
default:
|
||||
break
|
||||
}
|
||||
} else {
|
||||
for i in 0..<Int(format.channelCount) {
|
||||
switch format.commonFormat {
|
||||
case .pcmFormatInt16:
|
||||
current.int16ChannelData?[i].assign(repeating: 0, count: numSamples)
|
||||
case .pcmFormatInt32:
|
||||
current.int32ChannelData?[i].assign(repeating: 0, count: numSamples)
|
||||
case .pcmFormatFloat32:
|
||||
current.floatChannelData?[i].assign(repeating: 0, count: numSamples)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func next() {
|
||||
presentationTimeStamp = .invalid
|
||||
index = 0
|
||||
cursor += 1
|
||||
if cursor == buffers.count {
|
||||
cursor = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioCodecRingBuffer: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -0,0 +1,146 @@
|
|||
import AVFAudio
|
||||
import Foundation
|
||||
|
||||
/// The AudioCodecSettings class specifying audio compression settings.
|
||||
public struct AudioCodecSettings: Codable {
|
||||
/// The defualt value.
|
||||
public static let `default` = AudioCodecSettings()
|
||||
|
||||
/// The type of the AudioCodec supports format.
|
||||
public enum Format: Codable {
|
||||
/// The AAC format.
|
||||
case aac
|
||||
/// The PCM format.
|
||||
case pcm
|
||||
|
||||
var formatID: AudioFormatID {
|
||||
switch self {
|
||||
case .aac:
|
||||
return kAudioFormatMPEG4AAC
|
||||
case .pcm:
|
||||
return kAudioFormatLinearPCM
|
||||
}
|
||||
}
|
||||
|
||||
var formatFlags: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return UInt32(MPEG4ObjectID.AAC_LC.rawValue)
|
||||
case .pcm:
|
||||
return kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat
|
||||
}
|
||||
}
|
||||
|
||||
var framesPerPacket: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 1024
|
||||
case .pcm:
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
var packetSize: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 1
|
||||
case .pcm:
|
||||
return 1024
|
||||
}
|
||||
}
|
||||
|
||||
var bitsPerChannel: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return 32
|
||||
}
|
||||
}
|
||||
|
||||
var bytesPerPacket: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return (bitsPerChannel / 8)
|
||||
}
|
||||
}
|
||||
|
||||
var bytesPerFrame: UInt32 {
|
||||
switch self {
|
||||
case .aac:
|
||||
return 0
|
||||
case .pcm:
|
||||
return (bitsPerChannel / 8)
|
||||
}
|
||||
}
|
||||
|
||||
func makeAudioBuffer(_ format: AVAudioFormat) -> AVAudioBuffer? {
|
||||
switch self {
|
||||
case .aac:
|
||||
return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024)
|
||||
case .pcm:
|
||||
return AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024)
|
||||
}
|
||||
}
|
||||
|
||||
func makeAudioFormat(_ inSourceFormat: AudioStreamBasicDescription?) -> AVAudioFormat? {
|
||||
guard let inSourceFormat else {
|
||||
return nil
|
||||
}
|
||||
switch self {
|
||||
case .aac:
|
||||
var streamDescription = AudioStreamBasicDescription(
|
||||
mSampleRate: inSourceFormat.mSampleRate,
|
||||
mFormatID: formatID,
|
||||
mFormatFlags: formatFlags,
|
||||
mBytesPerPacket: bytesPerPacket,
|
||||
mFramesPerPacket: framesPerPacket,
|
||||
mBytesPerFrame: bytesPerFrame,
|
||||
mChannelsPerFrame: inSourceFormat.mChannelsPerFrame,
|
||||
mBitsPerChannel: bitsPerChannel,
|
||||
mReserved: 0
|
||||
)
|
||||
return AVAudioFormat(streamDescription: &streamDescription)
|
||||
case .pcm:
|
||||
return AVAudioFormat(
|
||||
commonFormat: .pcmFormatFloat32,
|
||||
sampleRate: inSourceFormat.mSampleRate,
|
||||
channels: inSourceFormat.mChannelsPerFrame,
|
||||
interleaved: true
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Specifies the bitRate of audio output.
|
||||
public var bitRate: Int
|
||||
|
||||
/// Specifies the output format.
|
||||
public var format: AudioCodecSettings.Format
|
||||
|
||||
/// Create an new AudioCodecSettings instance.
|
||||
public init(
|
||||
bitRate: Int = 64 * 1000,
|
||||
format: AudioCodecSettings.Format = .aac
|
||||
) {
|
||||
self.bitRate = bitRate
|
||||
self.format = format
|
||||
}
|
||||
|
||||
func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) {
|
||||
guard let converter else {
|
||||
return
|
||||
}
|
||||
if bitRate != oldValue?.bitRate {
|
||||
let minAvailableBitRate = converter.applicableEncodeBitRates?.min(by: { a, b in
|
||||
return a.intValue < b.intValue
|
||||
})?.intValue ?? bitRate
|
||||
let maxAvailableBitRate = converter.applicableEncodeBitRates?.max(by: { a, b in
|
||||
return a.intValue < b.intValue
|
||||
})?.intValue ?? bitRate
|
||||
converter.bitRate = min(maxAvailableBitRate, max(minAvailableBitRate, bitRate))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,8 +6,8 @@ protocol VTSessionConvertible {
|
|||
func setOption(_ option: VTSessionOption) -> OSStatus
|
||||
func setOptions(_ options: Set<VTSessionOption>) -> OSStatus
|
||||
func copySupportedPropertyDictionary() -> [AnyHashable: Any]
|
||||
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler)
|
||||
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler)
|
||||
func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus
|
||||
func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus
|
||||
func invalidate()
|
||||
}
|
||||
|
||||
|
|
|
@ -5,15 +5,15 @@ enum VTSessionMode {
|
|||
case compression
|
||||
case decompression
|
||||
|
||||
func makeSession(_ videoCodec: VideoCodec) -> VTSessionConvertible? {
|
||||
func makeSession(_ videoCodec: VideoCodec) -> (any VTSessionConvertible)? {
|
||||
switch self {
|
||||
case .compression:
|
||||
var session: VTCompressionSession?
|
||||
var status = VTCompressionSessionCreate(
|
||||
allocator: kCFAllocatorDefault,
|
||||
width: videoCodec.width,
|
||||
height: videoCodec.height,
|
||||
codecType: kCMVideoCodecType_H264,
|
||||
width: videoCodec.settings.videoSize.width,
|
||||
height: videoCodec.settings.videoSize.height,
|
||||
codecType: videoCodec.settings.format.codecType,
|
||||
encoderSpecification: nil,
|
||||
imageBufferAttributes: videoCodec.attributes as CFDictionary?,
|
||||
compressedDataAllocator: nil,
|
||||
|
@ -25,7 +25,7 @@ enum VTSessionMode {
|
|||
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: status))
|
||||
return nil
|
||||
}
|
||||
status = session.setOptions(videoCodec.options())
|
||||
status = session.setOptions(videoCodec.settings.options())
|
||||
guard status == noErr else {
|
||||
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToPrepare(status: status))
|
||||
return nil
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import Foundation
|
||||
|
||||
/// A structure that represents Key-Value-Object for the VideoToolbox option.
|
||||
public struct VTSessionOption {
|
||||
let key: VTSessionOptionKey
|
||||
let value: AnyObject
|
||||
|
|
|
@ -42,6 +42,8 @@ struct VTSessionOptionKey {
|
|||
static let maxH264SliceBytes = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxH264SliceBytes)
|
||||
static let maxFrameDelayCount = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxFrameDelayCount)
|
||||
static let encoderID = VTSessionOptionKey(CFString: kVTVideoEncoderSpecification_EncoderID)
|
||||
@available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
|
||||
static let constantBitRate = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_ConstantBitRate)
|
||||
|
||||
let CFString: CFString
|
||||
}
|
||||
|
|
|
@ -11,11 +11,13 @@ import UIKit
|
|||
*/
|
||||
public protocol VideoCodecDelegate: AnyObject {
|
||||
/// Tells the receiver to set a formatDescription.
|
||||
func videoCodec(_ codec: VideoCodec, didSet formatDescription: CMFormatDescription?)
|
||||
func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?)
|
||||
/// Tells the receiver to output an encoded or decoded sampleBuffer.
|
||||
func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer)
|
||||
/// Tells the receiver to occured an error.
|
||||
func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error)
|
||||
/// Tells the receiver to drop frame.
|
||||
func videoCodecWillDropFame(_ codec: VideoCodec) -> Bool
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
|
@ -23,16 +25,6 @@ public protocol VideoCodecDelegate: AnyObject {
|
|||
* The VideoCodec class provides methods for encode or decode for video.
|
||||
*/
|
||||
public class VideoCodec {
|
||||
static let defaultMinimumGroupOfPictures: Int = 12
|
||||
|
||||
#if os(OSX)
|
||||
#if arch(arm64)
|
||||
static let encoderName = NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc")
|
||||
#else
|
||||
static let encoderName = NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva")
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* The VideoCodec error domain codes.
|
||||
*/
|
||||
|
@ -47,173 +39,37 @@ public class VideoCodec {
|
|||
case failedToSetOption(status: OSStatus, option: VTSessionOption)
|
||||
}
|
||||
|
||||
/**
|
||||
* The video encoding or decoding options.
|
||||
*/
|
||||
public enum Option: String, KeyPathRepresentable, CaseIterable {
|
||||
/// Specifies the width of video.
|
||||
case width
|
||||
/// Specifies the height of video.
|
||||
case height
|
||||
/// Specifies the bitrate.
|
||||
case bitrate
|
||||
/// Specifies the H264 profile level.
|
||||
case profileLevel
|
||||
#if os(macOS)
|
||||
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE).
|
||||
case enabledHardwareEncoder
|
||||
#endif
|
||||
/// Specifies the keyframeInterval.
|
||||
case maxKeyFrameIntervalDuration
|
||||
/// Specifies the scalingMode.
|
||||
case scalingMode
|
||||
case allowFrameReordering
|
||||
|
||||
public var keyPath: AnyKeyPath {
|
||||
switch self {
|
||||
case .width:
|
||||
return \VideoCodec.width
|
||||
case .height:
|
||||
return \VideoCodec.height
|
||||
case .bitrate:
|
||||
return \VideoCodec.bitrate
|
||||
#if os(macOS)
|
||||
case .enabledHardwareEncoder:
|
||||
return \VideoCodec.enabledHardwareEncoder
|
||||
#endif
|
||||
case .maxKeyFrameIntervalDuration:
|
||||
return \VideoCodec.maxKeyFrameIntervalDuration
|
||||
case .scalingMode:
|
||||
return \VideoCodec.scalingMode
|
||||
case .profileLevel:
|
||||
return \VideoCodec.profileLevel
|
||||
case .allowFrameReordering:
|
||||
return \VideoCodec.allowFrameReordering
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The videoCodec's width value. The default value is 480.
|
||||
public static let defaultWidth: Int32 = 480
|
||||
/// The videoCodec's height value. The default value is 272.
|
||||
public static let defaultHeight: Int32 = 272
|
||||
/// The videoCodec's bitrate value. The default value is 160,000.
|
||||
public static let defaultBitrate: UInt32 = 160 * 1000
|
||||
/// The videoCodec's scalingMode value. The default value is trim.
|
||||
public static let defaultScalingMode: ScalingMode = .trim
|
||||
/// The videoCodec's attributes value.
|
||||
public static var defaultAttributes: [NSString: AnyObject]? = [
|
||||
kCVPixelBufferIOSurfacePropertiesKey: [:] as AnyObject,
|
||||
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
|
||||
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
|
||||
]
|
||||
|
||||
/// Specifies the settings for a VideoCodec.
|
||||
public var settings: Setting<VideoCodec, Option> = [:] {
|
||||
public var settings: VideoCodecSettings = .default {
|
||||
didSet {
|
||||
settings.observer = self
|
||||
let invalidateSession = settings.invalidateSession(oldValue)
|
||||
if invalidateSession {
|
||||
self.invalidateSession = invalidateSession
|
||||
} else {
|
||||
settings.apply(self, rhs: oldValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The running value indicating whether the VideoCodec is running.
|
||||
public private(set) var isRunning: Atomic<Bool> = .init(false)
|
||||
|
||||
var scalingMode = VideoCodec.defaultScalingMode {
|
||||
didSet {
|
||||
guard scalingMode != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
|
||||
var width = VideoCodec.defaultWidth {
|
||||
didSet {
|
||||
guard width != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
var height = VideoCodec.defaultHeight {
|
||||
didSet {
|
||||
guard height != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
#if os(macOS)
|
||||
var enabledHardwareEncoder = true {
|
||||
didSet {
|
||||
guard enabledHardwareEncoder != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
#endif
|
||||
var bitrate = VideoCodec.defaultBitrate {
|
||||
didSet {
|
||||
guard bitrate != oldValue else {
|
||||
return
|
||||
}
|
||||
let option = VTSessionOption(key: .averageBitRate, value: NSNumber(value: bitrate))
|
||||
if let status = session?.setOption(option), status != noErr {
|
||||
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
|
||||
}
|
||||
}
|
||||
}
|
||||
var profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String {
|
||||
didSet {
|
||||
guard profileLevel != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
var maxKeyFrameIntervalDuration = 2.0 {
|
||||
didSet {
|
||||
guard maxKeyFrameIntervalDuration != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
// swiftlint:disable discouraged_optional_boolean
|
||||
var allowFrameReordering: Bool? = false {
|
||||
didSet {
|
||||
guard allowFrameReordering != oldValue else {
|
||||
return
|
||||
}
|
||||
invalidateSession = true
|
||||
}
|
||||
}
|
||||
var locked: UInt32 = 0
|
||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock")
|
||||
var expectedFrameRate = IOMixer.defaultFrameRate {
|
||||
didSet {
|
||||
guard expectedFrameRate != oldValue else {
|
||||
return
|
||||
}
|
||||
let option = VTSessionOption(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate))
|
||||
if let status = session?.setOption(option), status != noErr {
|
||||
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
|
||||
}
|
||||
}
|
||||
}
|
||||
var formatDescription: CMFormatDescription? {
|
||||
didSet {
|
||||
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
|
||||
return
|
||||
}
|
||||
if let atoms: [String: AnyObject] = formatDescription?.`extension`(by: "SampleDescriptionExtensionAtoms"), let avcC: Data = atoms["avcC"] as? Data {
|
||||
let config = AVCConfigurationRecord(data: avcC)
|
||||
isBaseline = config.AVCProfileIndication == 66
|
||||
}
|
||||
delegate?.videoCodec(self, didSet: formatDescription)
|
||||
delegate?.videoCodec(self, didOutput: formatDescription)
|
||||
}
|
||||
}
|
||||
var needsSync: Atomic<Bool> = .init(true)
|
||||
var isBaseline = true
|
||||
var attributes: [NSString: AnyObject]? {
|
||||
guard VideoCodec.defaultAttributes != nil else {
|
||||
return nil
|
||||
|
@ -222,49 +78,44 @@ public class VideoCodec {
|
|||
for (key, value) in VideoCodec.defaultAttributes ?? [:] {
|
||||
attributes[key] = value
|
||||
}
|
||||
attributes[kCVPixelBufferWidthKey] = NSNumber(value: width)
|
||||
attributes[kCVPixelBufferHeightKey] = NSNumber(value: height)
|
||||
attributes[kCVPixelBufferWidthKey] = NSNumber(value: settings.videoSize.width)
|
||||
attributes[kCVPixelBufferHeightKey] = NSNumber(value: settings.videoSize.height)
|
||||
return attributes
|
||||
}
|
||||
weak var delegate: VideoCodecDelegate?
|
||||
|
||||
private var lastImageBuffer: CVImageBuffer?
|
||||
private var session: VTSessionConvertible? {
|
||||
weak var delegate: (any VideoCodecDelegate)?
|
||||
private(set) var session: (any VTSessionConvertible)? {
|
||||
didSet {
|
||||
oldValue?.invalidate()
|
||||
invalidateSession = false
|
||||
}
|
||||
}
|
||||
private var invalidateSession = true
|
||||
private var buffers: [CMSampleBuffer] = []
|
||||
private var minimumGroupOfPictures: Int = VideoCodec.defaultMinimumGroupOfPictures
|
||||
|
||||
init() {
|
||||
settings.observer = self
|
||||
}
|
||||
|
||||
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
|
||||
guard isRunning.value && locked == 0 else {
|
||||
func appendImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
|
||||
guard isRunning.value, !(delegate?.videoCodecWillDropFame(self) ?? false) else {
|
||||
return
|
||||
}
|
||||
if invalidateSession {
|
||||
session = VTSessionMode.compression.makeSession(self)
|
||||
}
|
||||
session?.inputBuffer(
|
||||
_ = session?.encodeFrame(
|
||||
imageBuffer,
|
||||
presentationTimeStamp: presentationTimeStamp,
|
||||
duration: duration
|
||||
) { [unowned self] status, _, sampleBuffer in
|
||||
guard let sampleBuffer, status == noErr else {
|
||||
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
return
|
||||
}
|
||||
self.formatDescription = sampleBuffer.formatDescription
|
||||
self.delegate?.videoCodec(self, didOutput: sampleBuffer)
|
||||
formatDescription = sampleBuffer.formatDescription
|
||||
delegate?.videoCodec(self, didOutput: sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
func inputBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||
guard isRunning.value else {
|
||||
return
|
||||
}
|
||||
if invalidateSession {
|
||||
session = VTSessionMode.decompression.makeSession(self)
|
||||
needsSync.mutate { $0 = true }
|
||||
|
@ -272,30 +123,26 @@ public class VideoCodec {
|
|||
if !sampleBuffer.isNotSync {
|
||||
needsSync.mutate { $0 = false }
|
||||
}
|
||||
session?.inputBuffer(sampleBuffer) { [unowned self] status, _, imageBuffer, presentationTimeStamp, duration in
|
||||
guard let imageBuffer = imageBuffer, status == noErr else {
|
||||
_ = session?.decodeFrame(sampleBuffer) { [unowned self] status, _, imageBuffer, presentationTimeStamp, duration in
|
||||
guard let imageBuffer, status == noErr else {
|
||||
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
return
|
||||
}
|
||||
|
||||
var timingInfo = CMSampleTimingInfo(
|
||||
duration: duration,
|
||||
presentationTimeStamp: presentationTimeStamp,
|
||||
decodeTimeStamp: .invalid
|
||||
decodeTimeStamp: sampleBuffer.decodeTimeStamp
|
||||
)
|
||||
|
||||
var videoFormatDescription: CMVideoFormatDescription?
|
||||
var status = CMVideoFormatDescriptionCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: imageBuffer,
|
||||
formatDescriptionOut: &videoFormatDescription
|
||||
)
|
||||
|
||||
guard status == noErr else {
|
||||
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
return
|
||||
}
|
||||
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
status = CMSampleBufferCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
|
@ -307,50 +154,12 @@ public class VideoCodec {
|
|||
sampleTiming: &timingInfo,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard let buffer = sampleBuffer, status == noErr else {
|
||||
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
|
||||
return
|
||||
}
|
||||
|
||||
if self.isBaseline {
|
||||
self.delegate?.videoCodec(self, didOutput: buffer)
|
||||
} else {
|
||||
self.buffers.append(buffer)
|
||||
self.buffers.sort {
|
||||
$0.presentationTimeStamp < $1.presentationTimeStamp
|
||||
delegate?.videoCodec(self, didOutput: buffer)
|
||||
}
|
||||
if self.minimumGroupOfPictures <= buffers.count {
|
||||
self.delegate?.videoCodec(self, didOutput: buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func options() -> Set<VTSessionOption> {
|
||||
let isBaseline = profileLevel.contains("Baseline")
|
||||
var options = Set<VTSessionOption>([
|
||||
.init(key: .realTime, value: kCFBooleanTrue),
|
||||
.init(key: .profileLevel, value: profileLevel as NSObject),
|
||||
.init(key: .averageBitRate, value: NSNumber(value: bitrate)),
|
||||
.init(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate)),
|
||||
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
|
||||
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
|
||||
.init(key: .pixelTransferProperties, value: [
|
||||
"ScalingMode": scalingMode.rawValue
|
||||
] as NSObject)
|
||||
])
|
||||
#if os(OSX)
|
||||
if enabledHardwareEncoder {
|
||||
options.insert(.init(key: .encoderID, value: VideoCodec.encoderName))
|
||||
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
|
||||
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
|
||||
}
|
||||
#endif
|
||||
if !isBaseline {
|
||||
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
|
||||
}
|
||||
return options
|
||||
}
|
||||
|
||||
#if os(iOS)
|
||||
|
@ -382,7 +191,6 @@ extension VideoCodec: Running {
|
|||
public func startRunning() {
|
||||
lockQueue.async {
|
||||
self.isRunning.mutate { $0 = true }
|
||||
OSAtomicAnd32Barrier(0, &self.locked)
|
||||
#if os(iOS)
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
|
@ -405,8 +213,6 @@ extension VideoCodec: Running {
|
|||
self.session = nil
|
||||
self.invalidateSession = true
|
||||
self.needsSync.mutate { $0 = true }
|
||||
self.buffers.removeAll()
|
||||
self.lastImageBuffer = nil
|
||||
self.formatDescription = nil
|
||||
#if os(iOS)
|
||||
NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
|
||||
|
|
|
@ -0,0 +1,175 @@
|
|||
import Foundation
|
||||
import VideoToolbox
|
||||
|
||||
/// The VideoCodecSettings class specifying video compression settings.
|
||||
public struct VideoCodecSettings: Codable {
|
||||
/// The defulat value.
|
||||
public static let `default` = VideoCodecSettings()
|
||||
|
||||
/// A bitRate mode that affectes how to encode the video source.
|
||||
public enum BitRateMode: String, Codable {
|
||||
/// The average bit rate.
|
||||
case average
|
||||
/// The constant bit rate.
|
||||
@available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
|
||||
case constant
|
||||
|
||||
var key: VTSessionOptionKey {
|
||||
if #available(iOS 16.0, tvOS 16.0, macOS 13.0, *) {
|
||||
switch self {
|
||||
case .average:
|
||||
return .averageBitRate
|
||||
case .constant:
|
||||
return .constantBitRate
|
||||
}
|
||||
}
|
||||
return .averageBitRate
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The scaling mode.
|
||||
* - seealso: https://developer.apple.com/documentation/videotoolbox/kvtpixeltransferpropertykey_scalingmode
|
||||
* - seealso: https://developer.apple.com/documentation/videotoolbox/vtpixeltransfersession/pixel_transfer_properties/scaling_mode_constants
|
||||
*/
|
||||
public enum ScalingMode: String, Codable {
|
||||
/// kVTScalingMode_Normal
|
||||
case normal = "Normal"
|
||||
/// kVTScalingMode_Letterbox
|
||||
case letterbox = "Letterbox"
|
||||
/// kVTScalingMode_CropSourceToCleanAperture
|
||||
case cropSourceToCleanAperture = "CropSourceToCleanAperture"
|
||||
/// kVTScalingMode_Trim
|
||||
case trim = "Trim"
|
||||
}
|
||||
|
||||
/// The type of the VideoCodec supports format.
|
||||
enum Format: Codable {
|
||||
case h264
|
||||
case hevc
|
||||
|
||||
#if os(macOS)
|
||||
var encoderID: NSString {
|
||||
switch self {
|
||||
case .h264:
|
||||
#if arch(arm64)
|
||||
return NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc")
|
||||
#else
|
||||
return NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva")
|
||||
#endif
|
||||
case .hevc:
|
||||
return NSString(string: "com.apple.videotoolbox.videoencoder.ave.hevc")
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
var codecType: UInt32 {
|
||||
switch self {
|
||||
case .h264:
|
||||
return kCMVideoCodecType_H264
|
||||
case .hevc:
|
||||
return kCMVideoCodecType_HEVC
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Specifies the video size of encoding video.
|
||||
public var videoSize: VideoSize
|
||||
/// Specifies the bitrate.
|
||||
public var bitRate: UInt32
|
||||
/// Specifies the keyframeInterval.
|
||||
public var maxKeyFrameIntervalDuration: Int32
|
||||
/// Specifies the scalingMode.
|
||||
public var scalingMode: ScalingMode
|
||||
// swiftlint:disable discouraged_optional_boolean
|
||||
/// Specifies the allowFrameRecording.
|
||||
public var allowFrameReordering: Bool?
|
||||
/// Specifies the bitRateMode.
|
||||
public var bitRateMode: BitRateMode
|
||||
/// Specifies the H264 profileLevel.
|
||||
public var profileLevel: String {
|
||||
didSet {
|
||||
if profileLevel.contains("HEVC") {
|
||||
format = .hevc
|
||||
} else {
|
||||
format = .h264
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE) for macOS.
|
||||
public var isHardwareEncoderEnabled = true
|
||||
|
||||
var format: Format = .h264
|
||||
var expectedFrameRate: Float64 = IOMixer.defaultFrameRate
|
||||
|
||||
/// Creates a new VideoCodecSettings instance.
|
||||
public init(
|
||||
videoSize: VideoSize = .init(width: 854, height: 480),
|
||||
profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
|
||||
bitRate: UInt32 = 640 * 1000,
|
||||
maxKeyFrameIntervalDuration: Int32 = 2,
|
||||
scalingMode: ScalingMode = .trim,
|
||||
bitRateMode: BitRateMode = .average,
|
||||
allowFrameReordering: Bool? = nil,
|
||||
isHardwareEncoderEnabled: Bool = true
|
||||
) {
|
||||
self.videoSize = videoSize
|
||||
self.profileLevel = profileLevel
|
||||
self.bitRate = bitRate
|
||||
self.maxKeyFrameIntervalDuration = maxKeyFrameIntervalDuration
|
||||
self.scalingMode = scalingMode
|
||||
self.bitRateMode = bitRateMode
|
||||
self.allowFrameReordering = allowFrameReordering
|
||||
self.isHardwareEncoderEnabled = isHardwareEncoderEnabled
|
||||
if profileLevel.contains("HEVC") {
|
||||
self.format = .hevc
|
||||
}
|
||||
}
|
||||
|
||||
func invalidateSession(_ rhs: VideoCodecSettings) -> Bool {
|
||||
return !(videoSize == rhs.videoSize &&
|
||||
maxKeyFrameIntervalDuration == rhs.maxKeyFrameIntervalDuration &&
|
||||
scalingMode == rhs.scalingMode &&
|
||||
allowFrameReordering == rhs.allowFrameReordering &&
|
||||
bitRateMode == rhs.bitRateMode &&
|
||||
profileLevel == rhs.profileLevel &&
|
||||
isHardwareEncoderEnabled == rhs.isHardwareEncoderEnabled
|
||||
)
|
||||
}
|
||||
|
||||
func apply(_ codec: VideoCodec, rhs: VideoCodecSettings) {
|
||||
if bitRate != rhs.bitRate {
|
||||
let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitRate))
|
||||
if let status = codec.session?.setOption(option), status != noErr {
|
||||
codec.delegate?.videoCodec(codec, errorOccurred: .failedToSetOption(status: status, option: option))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func options() -> Set<VTSessionOption> {
|
||||
let isBaseline = profileLevel.contains("Baseline")
|
||||
var options = Set<VTSessionOption>([
|
||||
.init(key: .realTime, value: kCFBooleanTrue),
|
||||
.init(key: .profileLevel, value: profileLevel as NSObject),
|
||||
.init(key: bitRateMode.key, value: NSNumber(value: bitRate)),
|
||||
// It seemes that VT supports the range 0 to 30.
|
||||
.init(key: .expectedFrameRate, value: NSNumber(value: (expectedFrameRate <= 30) ? expectedFrameRate : 0)),
|
||||
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
|
||||
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
|
||||
.init(key: .pixelTransferProperties, value: [
|
||||
"ScalingMode": scalingMode.rawValue
|
||||
] as NSObject)
|
||||
])
|
||||
#if os(macOS)
|
||||
if isHardwareEncoderEnabled {
|
||||
options.insert(.init(key: .encoderID, value: format.encoderID))
|
||||
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
|
||||
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
|
||||
}
|
||||
#endif
|
||||
if !isBaseline {
|
||||
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
|
||||
}
|
||||
return options
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import Foundation
|
||||
|
||||
/// The VideoSize class represents video width and height.
|
||||
public struct VideoSize: Equatable, Codable {
|
||||
/// The video width.
|
||||
public let width: Int32
|
||||
/// The video height.
|
||||
public let height: Int32
|
||||
|
||||
/// Creates a VideoSize object.
|
||||
public init(width: Int32, height: Int32) {
|
||||
self.width = width
|
||||
self.height = height
|
||||
}
|
||||
|
||||
/// Swap width for height.
|
||||
public func swap() -> VideoSize {
|
||||
return VideoSize(width: height, height: width)
|
||||
}
|
||||
}
|
|
@ -1,34 +1,22 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
#if os(iOS)
|
||||
#if os(iOS) || os(macOS)
|
||||
extension AVCaptureDevice {
|
||||
func videoFormat(width: Int32, height: Int32, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
|
||||
if #available(iOS 13.0, *), isMultiCamSupported {
|
||||
func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
|
||||
if isMultiCamSupported {
|
||||
return formats.first {
|
||||
$0.isMultiCamSupported && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
|
||||
$0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
|
||||
} ?? formats.last {
|
||||
$0.isMultiCamSupported && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
|
||||
$0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
|
||||
}
|
||||
} else {
|
||||
return formats.first {
|
||||
width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
|
||||
$0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
|
||||
} ?? formats.last {
|
||||
$0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
|
||||
$0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#if os(macOS)
|
||||
extension AVCaptureDevice {
|
||||
func videoFormat(width: Int32, height: Int32, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
|
||||
return formats.first {
|
||||
width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
|
||||
} ?? formats.last {
|
||||
$0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
#if targetEnvironment(macCatalyst)
|
||||
extension AVCaptureSession {
|
||||
var isMultitaskingCameraAccessSupported: Bool {
|
||||
get {
|
||||
false
|
||||
}
|
||||
// swiftlint:disable unused_setter_value
|
||||
set {
|
||||
logger.warn("isMultitaskingCameraAccessSupported is unavailabled in Mac Catalyst.")
|
||||
}
|
||||
}
|
||||
|
||||
var isMultitaskingCameraAccessEnabled: Bool {
|
||||
get {
|
||||
false
|
||||
}
|
||||
// swiftlint:disable unused_setter_value
|
||||
set {
|
||||
logger.warn("isMultitaskingCameraAccessEnabled is unavailabled in Mac Catalyst.")
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
|
@ -16,4 +16,9 @@ extension CMBlockBuffer {
|
|||
}
|
||||
return Data(bytes: buffer!, count: length)
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
func copyDataBytes(to buffer: UnsafeMutableRawPointer) -> OSStatus {
|
||||
return CMBlockBufferCopyDataBytes(self, atOffset: 0, dataLength: dataLength, destination: buffer)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import CoreMedia
|
|||
import Foundation
|
||||
|
||||
extension CMFormatDescription {
|
||||
func `extension`(by key: String) -> [String: AnyObject]? {
|
||||
CMFormatDescriptionGetExtension(self, extensionKey: key as CFString) as? [String: AnyObject]
|
||||
var _mediaType: CMMediaType {
|
||||
CMFormatDescriptionGetMediaType(self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import Accelerate
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
|
||||
extension CMSampleBuffer {
|
||||
|
|
|
@ -8,21 +8,4 @@ extension CMVideoFormatDescription {
|
|||
var dimensions: CMVideoDimensions {
|
||||
CMVideoFormatDescriptionGetDimensions(self)
|
||||
}
|
||||
|
||||
static func create(pixelBuffer: CVPixelBuffer) -> CMVideoFormatDescription? {
|
||||
var formatDescription: CMFormatDescription?
|
||||
let status: OSStatus = CMVideoFormatDescriptionCreate(
|
||||
allocator: kCFAllocatorDefault,
|
||||
codecType: kCMVideoCodecType_422YpCbCr8,
|
||||
width: Int32(pixelBuffer.width),
|
||||
height: Int32(pixelBuffer.height),
|
||||
extensions: nil,
|
||||
formatDescriptionOut: &formatDescription
|
||||
)
|
||||
guard status == noErr else {
|
||||
logger.warn("\(status)")
|
||||
return nil
|
||||
}
|
||||
return formatDescription
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,8 @@ extension CVPixelBuffer {
|
|||
let yScale = Float(roi.height) / Float(inputImageBuffer.height)
|
||||
let scaleFactor = (xScale < yScale) ? xScale : yScale
|
||||
var scaledInputImageBuffer = inputImageBuffer.scale(scaleFactor)
|
||||
scaledInputImageBuffer.cornerRadius(radius)
|
||||
var shape = ShapeFactory.shared.cornerRadius(CGSize(width: CGFloat(scaledInputImageBuffer.width), height: CGFloat(scaledInputImageBuffer.height)), cornerRadius: radius)
|
||||
vImageSelectChannels_ARGB8888(&shape, &scaledInputImageBuffer, &scaledInputImageBuffer, 0x8, vImage_Flags(kvImageNoFlags))
|
||||
defer {
|
||||
scaledInputImageBuffer.free()
|
||||
}
|
||||
|
@ -52,7 +53,7 @@ extension CVPixelBuffer {
|
|||
}
|
||||
|
||||
@discardableResult
|
||||
func split(_ pixelBuffer: CVPixelBuffer?, direction: vImage_Buffer.TransformDirection) -> Self {
|
||||
func split(_ pixelBuffer: CVPixelBuffer?, direction: ImageTransform) -> Self {
|
||||
guard var inputImageBuffer = try? pixelBuffer?.makevImage_Buffer(format: &Self.format) else {
|
||||
return self
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import CoreMedia
|
||||
import Foundation
|
||||
|
||||
extension Data {
|
||||
|
@ -9,4 +10,37 @@ extension Data {
|
|||
return [UInt8](UnsafeBufferPointer(start: pointer, count: count))
|
||||
}
|
||||
}
|
||||
|
||||
func makeBlockBuffer(advancedBy: Int = 0) -> CMBlockBuffer? {
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
let length = count - advancedBy
|
||||
return withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> CMBlockBuffer? in
|
||||
guard let baseAddress = buffer.baseAddress else {
|
||||
return nil
|
||||
}
|
||||
guard CMBlockBufferCreateWithMemoryBlock(
|
||||
allocator: kCFAllocatorDefault,
|
||||
memoryBlock: nil,
|
||||
blockLength: length,
|
||||
blockAllocator: nil,
|
||||
customBlockSource: nil,
|
||||
offsetToData: 0,
|
||||
dataLength: length,
|
||||
flags: 0,
|
||||
blockBufferOut: &blockBuffer) == noErr else {
|
||||
return nil
|
||||
}
|
||||
guard let blockBuffer else {
|
||||
return nil
|
||||
}
|
||||
guard CMBlockBufferReplaceDataBytes(
|
||||
with: baseAddress.advanced(by: advancedBy),
|
||||
blockBuffer: blockBuffer,
|
||||
offsetIntoDestination: 0,
|
||||
dataLength: length) == noErr else {
|
||||
return nil
|
||||
}
|
||||
return blockBuffer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,11 @@ extension VTCompressionSession {
|
|||
|
||||
extension VTCompressionSession: VTSessionConvertible {
|
||||
// MARK: VTSessionConvertible
|
||||
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) {
|
||||
@discardableResult
|
||||
@inline(__always)
|
||||
func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus {
|
||||
var flags: VTEncodeInfoFlags = []
|
||||
VTCompressionSessionEncodeFrame(
|
||||
return VTCompressionSessionEncodeFrame(
|
||||
self,
|
||||
imageBuffer: imageBuffer,
|
||||
presentationTimeStamp: presentationTimeStamp,
|
||||
|
@ -22,7 +24,10 @@ extension VTCompressionSession: VTSessionConvertible {
|
|||
)
|
||||
}
|
||||
|
||||
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) {
|
||||
@discardableResult
|
||||
@inline(__always)
|
||||
func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus {
|
||||
return noErr
|
||||
}
|
||||
|
||||
func invalidate() {
|
||||
|
|
|
@ -7,12 +7,17 @@ extension VTDecompressionSession: VTSessionConvertible {
|
|||
._EnableTemporalProcessing
|
||||
]
|
||||
|
||||
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) {
|
||||
@discardableResult
|
||||
@inline(__always)
|
||||
func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus {
|
||||
return noErr
|
||||
}
|
||||
|
||||
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) {
|
||||
@discardableResult
|
||||
@inline(__always)
|
||||
func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus {
|
||||
var flagsOut: VTDecodeInfoFlags = []
|
||||
VTDecompressionSessionDecodeFrame(
|
||||
return VTDecompressionSessionDecodeFrame(
|
||||
self,
|
||||
sampleBuffer: sampleBuffer,
|
||||
flags: Self.defaultDecodeFlags,
|
||||
|
|
|
@ -1,54 +1,9 @@
|
|||
import Accelerate
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
import Foundation
|
||||
|
||||
extension vImage_Buffer {
|
||||
enum TransformDirection {
|
||||
case north
|
||||
case south
|
||||
case east
|
||||
case west
|
||||
|
||||
var opposite: TransformDirection {
|
||||
switch self {
|
||||
case .north:
|
||||
return .south
|
||||
case .south:
|
||||
return .north
|
||||
case .east:
|
||||
return .west
|
||||
case .west:
|
||||
return .east
|
||||
}
|
||||
}
|
||||
|
||||
func tx(_ width: Double) -> Double {
|
||||
switch self {
|
||||
case .north:
|
||||
return 0.0
|
||||
case .south:
|
||||
return Double.leastNonzeroMagnitude
|
||||
case .east:
|
||||
return width / 2
|
||||
case .west:
|
||||
return -(width / 2)
|
||||
}
|
||||
}
|
||||
|
||||
func ty(_ height: Double) -> Double {
|
||||
switch self {
|
||||
case .north:
|
||||
return height / 2
|
||||
case .south:
|
||||
return -(height / 2)
|
||||
case .east:
|
||||
return Double.leastNonzeroMagnitude
|
||||
case .west:
|
||||
return 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init?(height: vImagePixelCount, width: vImagePixelCount, pixelBits: UInt32, flags: vImage_Flags) {
|
||||
self.init()
|
||||
guard vImageBuffer_Init(
|
||||
|
@ -65,6 +20,11 @@ extension vImage_Buffer {
|
|||
mutating func copy(to cvPixelBuffer: CVPixelBuffer, format: inout vImage_CGImageFormat) -> vImage_Error {
|
||||
let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer).takeRetainedValue()
|
||||
vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB())
|
||||
defer {
|
||||
if let dictionary = CVBufferGetAttachments(cvPixelBuffer, .shouldNotPropagate) {
|
||||
CVBufferSetAttachments(cvPixelBuffer, dictionary, .shouldPropagate)
|
||||
}
|
||||
}
|
||||
return vImageBuffer_CopyToCVPixelBuffer(
|
||||
&self,
|
||||
&format,
|
||||
|
@ -95,34 +55,6 @@ extension vImage_Buffer {
|
|||
return imageBuffer
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
mutating func cornerRadius(_ radius: CGFloat) -> Self {
|
||||
guard 0 < radius else {
|
||||
return self
|
||||
}
|
||||
let buffer = data.assumingMemoryBound(to: Pixel_8.self)
|
||||
for x in 0 ..< Int(width) {
|
||||
for y in 0 ..< Int(height) {
|
||||
let index = y * rowBytes + x * 4
|
||||
var dx = CGFloat(min(x, Int(width) - x))
|
||||
var dy = CGFloat(min(y, Int(height) - y))
|
||||
if dx == 0 && dy == 0 {
|
||||
buffer[index] = 0
|
||||
continue
|
||||
}
|
||||
if radius < dx || radius < dy {
|
||||
continue
|
||||
}
|
||||
dx = radius - dx
|
||||
dy = radius - dy
|
||||
if radius < round(sqrt(dx * dx + dy * dy)) {
|
||||
buffer[index] = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
return self
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
mutating func over(_ src: inout vImage_Buffer, origin: CGPoint = .zero) -> Self {
|
||||
let start = Int(origin.y) * rowBytes + Int(origin.x) * 4
|
||||
|
@ -144,13 +76,15 @@ extension vImage_Buffer {
|
|||
}
|
||||
|
||||
@discardableResult
|
||||
mutating func split(_ buffer: inout vImage_Buffer, direction: TransformDirection) -> Self {
|
||||
mutating func split(_ buffer: inout vImage_Buffer, direction: ImageTransform) -> Self {
|
||||
buffer.transform(direction.opposite)
|
||||
var shape = ShapeFactory.shared.split(CGSize(width: CGFloat(width), height: CGFloat(height)), direction: direction.opposite)
|
||||
vImageSelectChannels_ARGB8888(&shape, &buffer, &buffer, 0x8, vImage_Flags(kvImageNoFlags))
|
||||
transform(direction)
|
||||
guard vImageAlphaBlend_ARGB8888(
|
||||
&self,
|
||||
&buffer,
|
||||
&self,
|
||||
&self,
|
||||
vImage_Flags(kvImageDoNotTile)
|
||||
) == kvImageNoError else {
|
||||
return self
|
||||
|
@ -158,7 +92,7 @@ extension vImage_Buffer {
|
|||
return self
|
||||
}
|
||||
|
||||
private mutating func transform(_ direction: TransformDirection) {
|
||||
private mutating func transform(_ direction: ImageTransform) {
|
||||
let backgroundColor: [Pixel_8] = [0, 255, 255, 255]
|
||||
var vImageTransform = vImage_CGAffineTransform(
|
||||
a: 1,
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import Accelerate
|
||||
import Foundation
|
||||
|
||||
extension vImage_CGImageFormat {
|
||||
@available(iOS, obsoleted: 13.0)
|
||||
@available(tvOS, obsoleted: 13.0)
|
||||
@available(macOS, obsoleted: 10.15)
|
||||
init?(cgImage: CGImage) {
|
||||
guard
|
||||
let colorSpace = cgImage.colorSpace else {
|
||||
return nil
|
||||
}
|
||||
self = vImage_CGImageFormat(
|
||||
bitsPerComponent: UInt32(cgImage.bitsPerComponent),
|
||||
bitsPerPixel: UInt32(cgImage.bitsPerPixel),
|
||||
colorSpace: Unmanaged.passRetained(colorSpace),
|
||||
bitmapInfo: cgImage.bitmapInfo,
|
||||
version: 0,
|
||||
decode: nil,
|
||||
renderingIntent: cgImage.renderingIntent)
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports aac packet types.
|
||||
public enum FLVAACPacketType: UInt8 {
|
||||
enum FLVAACPacketType: UInt8 {
|
||||
/// The sequence data.
|
||||
case seq = 0
|
||||
/// The raw data.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports avc packet types.
|
||||
public enum FLVAVCPacketType: UInt8 {
|
||||
enum FLVAVCPacketType: UInt8 {
|
||||
/// The sequence data.
|
||||
case seq = 0
|
||||
/// The NAL unit data.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import AVFoundation
|
||||
|
||||
/// The type of flv supports audio codecs.
|
||||
public enum FLVAudioCodec: UInt8 {
|
||||
enum FLVAudioCodec: UInt8 {
|
||||
/// The PCM codec.
|
||||
case pcm = 0
|
||||
/// The ADPCM codec.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports video frame types.
|
||||
public enum FLVFrameType: UInt8 {
|
||||
enum FLVFrameType: UInt8 {
|
||||
/// The keyframe.
|
||||
case key = 1
|
||||
/// The inter frame.
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
import AVFoundation
|
||||
|
||||
/// The FLVReader is used to read the contents of a FLV file.
|
||||
public final class FLVReader {
|
||||
/// The header of a FLV.
|
||||
public static let header = Data([0x46, 0x4C, 0x56, 1])
|
||||
|
||||
/// The headerSize of a FLV.
|
||||
static let headerSize: Int = 11
|
||||
|
||||
/// The url of a FLV file.
|
||||
public let url: URL
|
||||
private var currentOffSet: UInt64 = 0
|
||||
private var fileHandle: FileHandle?
|
||||
|
||||
/// Initializes and returns a newly allocated reader.
|
||||
public init(url: URL) {
|
||||
do {
|
||||
self.url = url
|
||||
fileHandle = try FileHandle(forReadingFrom: url)
|
||||
fileHandle?.seek(toFileOffset: 13)
|
||||
currentOffSet = 13
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns data by FLVTag.
|
||||
public func getData(_ tag: FLVTag) -> Data? {
|
||||
fileHandle?.seek(toFileOffset: tag.offset)
|
||||
return fileHandle?.readData(ofLength: Int(UInt64(tag.dataSize)))
|
||||
}
|
||||
}
|
||||
|
||||
extension FLVReader: IteratorProtocol {
|
||||
// MARK: IteratorProtocol
|
||||
public func next() -> FLVTag? {
|
||||
guard let fileHandle: FileHandle = fileHandle else {
|
||||
return nil
|
||||
}
|
||||
var tag: FLVTag!
|
||||
fileHandle.seek(toFileOffset: currentOffSet)
|
||||
let data: Data = fileHandle.readData(ofLength: FLVReader.headerSize)
|
||||
guard !data.isEmpty else {
|
||||
return nil
|
||||
}
|
||||
switch data[0] {
|
||||
case 8:
|
||||
tag = FLVAudioTag(data: data)
|
||||
case 9:
|
||||
tag = FLVVideoTag(data: data)
|
||||
case 18:
|
||||
tag = FLVDataTag(data: data)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
tag.readData(fileHandle)
|
||||
tag.offset = currentOffSet + UInt64(FLVReader.headerSize)
|
||||
currentOffSet += UInt64(FLVReader.headerSize) + UInt64(tag.dataSize) + 4
|
||||
return tag
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports audio sound rates.
|
||||
public enum FLVSoundRate: UInt8 {
|
||||
enum FLVSoundRate: UInt8 {
|
||||
/// The sound rate of 5,500.0kHz.
|
||||
case kHz5_5 = 0
|
||||
/// Ths sound rate of 11,000.0kHz.
|
||||
|
@ -10,7 +10,7 @@ public enum FLVSoundRate: UInt8 {
|
|||
case kHz44 = 3
|
||||
|
||||
/// The float typed value.
|
||||
public var floatValue: Float64 {
|
||||
var floatValue: Float64 {
|
||||
switch self {
|
||||
case .kHz5_5:
|
||||
return 5500
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports audio sound size.
|
||||
public enum FLVSoundSize: UInt8 {
|
||||
enum FLVSoundSize: UInt8 {
|
||||
/// The 8bit sound.
|
||||
case snd8bit = 0
|
||||
/// The 16bit sound.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/// The type of flv supports audio sound channel type..
|
||||
public enum FLVSoundType: UInt8 {
|
||||
enum FLVSoundType: UInt8 {
|
||||
/// The mono sound.
|
||||
case mono = 0
|
||||
/// The stereo sound.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import Foundation
|
||||
|
||||
/// The type of flv tag.
|
||||
public enum FLVTagType: UInt8 {
|
||||
enum FLVTagType: UInt8 {
|
||||
/// The Audio tag,
|
||||
case audio = 8
|
||||
/// The Video tag.
|
||||
|
@ -29,127 +29,3 @@ public enum FLVTagType: UInt8 {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
/// The interface of FLV tag.
|
||||
public protocol FLVTag: CustomDebugStringConvertible {
|
||||
/// The type of this tag.
|
||||
var tagType: FLVTagType { get set }
|
||||
/// The length of data int the field.
|
||||
var dataSize: UInt32 { get set }
|
||||
/// The timestamp in milliseconds.
|
||||
var timestamp: UInt32 { get set }
|
||||
/// The extension of the timestamp.
|
||||
var timestampExtended: UInt8 { get set }
|
||||
/// The streamId, always 0.
|
||||
var streamId: UInt32 { get set }
|
||||
/// The data offset of a flv file.
|
||||
var offset: UInt64 { get set }
|
||||
|
||||
/// Initialize a new object.
|
||||
init()
|
||||
/// Read data of fileHandler.
|
||||
mutating func readData(_ fileHandler: FileHandle)
|
||||
}
|
||||
|
||||
extension FLVTag {
|
||||
var headerSize: Int {
|
||||
tagType.headerSize
|
||||
}
|
||||
|
||||
init?(data: Data) {
|
||||
self.init()
|
||||
let buffer = ByteArray(data: data)
|
||||
do {
|
||||
tagType = FLVTagType(rawValue: try buffer.readUInt8()) ?? .data
|
||||
dataSize = try buffer.readUInt24()
|
||||
timestamp = try buffer.readUInt24()
|
||||
timestampExtended = try buffer.readUInt8()
|
||||
streamId = try buffer.readUInt24()
|
||||
buffer.clear()
|
||||
} catch {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: CustomDebugStringConvertible
|
||||
public var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
/// A structure that defines the FLVTag of Data.
|
||||
public struct FLVDataTag: FLVTag {
|
||||
public var tagType: FLVTagType = .data
|
||||
public var dataSize: UInt32 = 0
|
||||
public var timestamp: UInt32 = 0
|
||||
public var timestampExtended: UInt8 = 0
|
||||
public var streamId: UInt32 = 0
|
||||
public var offset: UInt64 = 0
|
||||
|
||||
public init() {
|
||||
}
|
||||
|
||||
public mutating func readData(_ fileHandler: FileHandle) {
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
/// A structure that defines the FLVTag of an audio.
|
||||
public struct FLVAudioTag: FLVTag {
|
||||
public var tagType: FLVTagType = .audio
|
||||
public var dataSize: UInt32 = 0
|
||||
public var timestamp: UInt32 = 0
|
||||
public var timestampExtended: UInt8 = 0
|
||||
public var streamId: UInt32 = 0
|
||||
public var offset: UInt64 = 0
|
||||
/// Specifies the codec of audio.
|
||||
public var codec: FLVAudioCodec = .unknown
|
||||
/// Specifies the sound of rate.
|
||||
public var soundRate: FLVSoundRate = .kHz5_5
|
||||
/// Specifies the sound of size.
|
||||
public var soundSize: FLVSoundSize = .snd8bit
|
||||
/// Specifies the sound of type.
|
||||
public var soundType: FLVSoundType = .mono
|
||||
|
||||
public init() {
|
||||
}
|
||||
|
||||
public mutating func readData(_ fileHandler: FileHandle) {
|
||||
let data: Data = fileHandler.readData(ofLength: headerSize)
|
||||
codec = FLVAudioCodec(rawValue: data[0] >> 4) ?? .unknown
|
||||
soundRate = FLVSoundRate(rawValue: (data[0] & 0b00001100) >> 2) ?? .kHz5_5
|
||||
soundSize = FLVSoundSize(rawValue: (data[0] & 0b00000010) >> 1) ?? .snd8bit
|
||||
soundType = FLVSoundType(rawValue: data[0] & 0b00000001) ?? .mono
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
/// A structure that defines the FLVTag of am video.
|
||||
public struct FLVVideoTag: FLVTag {
|
||||
public var tagType: FLVTagType = .video
|
||||
public var dataSize: UInt32 = 0
|
||||
public var timestamp: UInt32 = 0
|
||||
public var timestampExtended: UInt8 = 0
|
||||
public var streamId: UInt32 = 0
|
||||
public var offset: UInt64 = 0
|
||||
/// Specifies the frame type of video.
|
||||
public var frameType: FLVFrameType = .command
|
||||
/// Specifies the codec of video.
|
||||
public var codec: FLVVideoCodec = .unknown
|
||||
/// Specifies the avc packet type.
|
||||
public var avcPacketType: FLVAVCPacketType = .eos
|
||||
/// Specifies the composition time.
|
||||
public var compositionTime: Int32 = 0
|
||||
|
||||
public init() {
|
||||
}
|
||||
|
||||
public mutating func readData(_ fileHandler: FileHandle) {
|
||||
let data: Data = fileHandler.readData(ofLength: headerSize)
|
||||
frameType = FLVFrameType(rawValue: data[0] >> 4) ?? .command
|
||||
codec = FLVVideoCodec(rawValue: data[0] & 0b00001111) ?? .unknown
|
||||
avcPacketType = FLVAVCPacketType(rawValue: data[1]) ?? .eos
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import Foundation
|
||||
|
||||
/// The type of flv supports video codecs.
|
||||
public enum FLVVideoCodec: UInt8 {
|
||||
enum FLVVideoCodec: UInt8 {
|
||||
/// The JPEG codec.
|
||||
case jpeg = 1
|
||||
/// The Sorenson H263 codec.
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
import Foundation
|
||||
|
||||
enum FLVVideoFourCC: UInt32 {
|
||||
case av1 = 0x61763031 // { 'a', 'v', '0', '1' }
|
||||
case vp9 = 0x76703039 // { 'v', 'p', '0', '9' }
|
||||
case hevc = 0x68766331 // { 'h', 'v', 'c', '1' }
|
||||
|
||||
var isSupported: Bool {
|
||||
switch self {
|
||||
case .av1:
|
||||
return false
|
||||
case .vp9:
|
||||
return false
|
||||
case .hevc:
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
import Foundation
|
||||
|
||||
enum FLVVideoPacketType: UInt8 {
|
||||
case sequenceStart = 0
|
||||
case codedFrames = 1
|
||||
case sequenceEnd = 2
|
||||
case codedFramesX = 3
|
||||
case metadata = 4
|
||||
case mpeg2TSSequenceStart = 5
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
protocol BaseDescriptor: Equatable, DataConvertible, CustomDebugStringConvertible {
|
||||
var tag: UInt8 { get }
|
||||
var size: UInt32 { get }
|
||||
}
|
||||
|
||||
extension BaseDescriptor {
|
||||
func writeSize(_ byteArray: ByteArray) {
|
||||
let bytes = UInt32(byteArray.position - 5).bigEndian.data.bytes
|
||||
byteArray.position = 1
|
||||
for i in 0..<bytes.count - 1 {
|
||||
byteArray.writeUInt8(bytes[i] | 0x80)
|
||||
}
|
||||
if let last = bytes.last {
|
||||
byteArray.writeUInt8(last)
|
||||
}
|
||||
}
|
||||
|
||||
func readSize(_ byteArray: ByteArray) throws -> UInt32 {
|
||||
var size: UInt32 = 0
|
||||
var length: UInt8 = 0
|
||||
repeat {
|
||||
length = try byteArray.readUInt8()
|
||||
size += size << 7 | UInt32(length & 0x7F)
|
||||
} while ((length & 0x80) != 0)
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
||||
extension BaseDescriptor {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct DecoderConfigDescriptor: BaseDescriptor {
|
||||
static let tag: UInt8 = 0x04
|
||||
// MARK: BaseDescriptor
|
||||
let tag: UInt8 = Self.tag
|
||||
var size: UInt32 = 0
|
||||
// MARK: DecoderConfigDescriptor
|
||||
var objectTypeIndication: UInt8 = 0
|
||||
var streamType: UInt8 = 0
|
||||
var upStream = false
|
||||
var bufferSizeDB: UInt32 = 0
|
||||
var maxBitrate: UInt32 = 0
|
||||
var avgBitrate: UInt32 = 0
|
||||
var decSpecificInfo = DecoderSpecificInfo()
|
||||
var profileLevelIndicationIndexDescriptor = ProfileLevelIndicationIndexDescriptor()
|
||||
}
|
||||
|
||||
extension DecoderConfigDescriptor: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt8(tag)
|
||||
.writeUInt32(0)
|
||||
.writeUInt8(objectTypeIndication)
|
||||
.writeUInt8(streamType << 2 | (upStream ? 1 : 0) << 1 | 1)
|
||||
.writeUInt24(bufferSizeDB)
|
||||
.writeUInt32(maxBitrate)
|
||||
.writeUInt32(avgBitrate)
|
||||
.writeBytes(decSpecificInfo.data)
|
||||
.writeBytes(profileLevelIndicationIndexDescriptor.data)
|
||||
writeSize(buffer)
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
_ = try buffer.readUInt8()
|
||||
size = try readSize(buffer)
|
||||
objectTypeIndication = try buffer.readUInt8()
|
||||
let first = try buffer.readUInt8()
|
||||
streamType = (first >> 2)
|
||||
upStream = (first & 2) != 0
|
||||
bufferSizeDB = try buffer.readUInt24()
|
||||
maxBitrate = try buffer.readUInt32()
|
||||
avgBitrate = try buffer.readUInt32()
|
||||
let position = buffer.position
|
||||
decSpecificInfo.data = try buffer.readBytes(buffer.bytesAvailable)
|
||||
buffer.position = position + Int(decSpecificInfo.size) + 5
|
||||
profileLevelIndicationIndexDescriptor.data = try buffer.readBytes(buffer.bytesAvailable)
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct DecoderSpecificInfo: BaseDescriptor {
|
||||
static let tag: UInt8 = 0x05
|
||||
// MARK: BaseDescriptor
|
||||
let tag: UInt8 = Self.tag
|
||||
var size: UInt32 = 0
|
||||
// MARK: DecoderConfigDescriptor
|
||||
private var _data = Data()
|
||||
}
|
||||
|
||||
extension DecoderSpecificInfo: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt8(tag)
|
||||
.writeUInt32(0)
|
||||
.writeBytes(_data)
|
||||
writeSize(buffer)
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
_ = try buffer.readUInt8()
|
||||
size = try readSize(buffer)
|
||||
_data = try buffer.readBytes(Int(size))
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct ESDescriptor: BaseDescriptor {
|
||||
static let tag: UInt8 = 0x03
|
||||
// MARK: BaseDescriptor
|
||||
let tag: UInt8 = Self.tag
|
||||
var size: UInt32 = 0
|
||||
// MARK: ESDescriptor
|
||||
var ES_ID: UInt16 = 0
|
||||
var streamDependenceFlag = false
|
||||
var URLFlag = false
|
||||
var OCRstreamFlag = false
|
||||
var streamPriority: UInt8 = 0
|
||||
var dependsOn_ES_ID: UInt16 = 0
|
||||
var URLLength: UInt8 = 0
|
||||
var URLstring: String = ""
|
||||
var OCR_ES_Id: UInt16 = 0
|
||||
var decConfigDescr = DecoderConfigDescriptor()
|
||||
var slConfigDescr = SLConfigDescriptor()
|
||||
}
|
||||
|
||||
extension ESDescriptor: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt8(tag)
|
||||
.writeUInt32(0)
|
||||
.writeUInt16(ES_ID)
|
||||
.writeUInt8((streamDependenceFlag ? 1 : 0) << 7 | (URLFlag ? 1 : 0) << 6 | streamPriority)
|
||||
if streamDependenceFlag {
|
||||
buffer.writeUInt16(dependsOn_ES_ID)
|
||||
}
|
||||
if URLFlag {
|
||||
buffer
|
||||
.writeUInt8(URLLength)
|
||||
.writeUTF8Bytes(URLstring)
|
||||
}
|
||||
if OCRstreamFlag {
|
||||
buffer.writeUInt16(OCR_ES_Id)
|
||||
}
|
||||
buffer.writeBytes(decConfigDescr.data)
|
||||
buffer.writeBytes(slConfigDescr.data)
|
||||
writeSize(buffer)
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
_ = try buffer.readUInt8()
|
||||
size = try readSize(buffer)
|
||||
ES_ID = try buffer.readUInt16()
|
||||
let first = try buffer.readUInt8()
|
||||
streamDependenceFlag = (first & 0x80) != 0
|
||||
URLFlag = (first & 0x40) != 0
|
||||
streamPriority = (first & 0x1F)
|
||||
if streamDependenceFlag {
|
||||
dependsOn_ES_ID = try buffer.readUInt16()
|
||||
}
|
||||
if URLFlag {
|
||||
URLLength = try buffer.readUInt8()
|
||||
URLstring = try buffer.readUTF8Bytes(Int(URLLength))
|
||||
}
|
||||
if OCRstreamFlag {
|
||||
OCR_ES_Id = try buffer.readUInt16()
|
||||
}
|
||||
var position = buffer.position
|
||||
decConfigDescr.data = try buffer.readBytes(buffer.bytesAvailable)
|
||||
position += 5 + Int(decConfigDescr.size)
|
||||
buffer.position = position
|
||||
slConfigDescr.data = try buffer.readBytes(buffer.bytesAvailable)
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct ElementaryStreamSpecificData {
|
||||
static let fixedHeaderSize: Int = 5
|
||||
|
||||
var streamType: UInt8 = 0
|
||||
var elementaryPID: UInt16 = 0
|
||||
var ESInfoLength: UInt16 = 0
|
||||
var ESDescriptors = Data()
|
||||
|
||||
init() {
|
||||
}
|
||||
|
||||
init?(_ data: Data) {
|
||||
self.data = data
|
||||
}
|
||||
}
|
||||
|
||||
extension ElementaryStreamSpecificData: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
ByteArray()
|
||||
.writeUInt8(streamType)
|
||||
.writeUInt16(elementaryPID | 0xe000)
|
||||
.writeUInt16(ESInfoLength | 0xf000)
|
||||
.writeBytes(ESDescriptors)
|
||||
.data
|
||||
}
|
||||
set {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
do {
|
||||
streamType = try buffer.readUInt8()
|
||||
elementaryPID = try buffer.readUInt16() & 0x0fff
|
||||
ESInfoLength = try buffer.readUInt16() & 0x01ff
|
||||
ESDescriptors = try buffer.readBytes(Int(ESInfoLength))
|
||||
} catch {
|
||||
logger.error("\(buffer)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension ElementaryStreamSpecificData: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
enum ElementaryStreamType: UInt8 {
|
||||
case mpeg1Video = 0x01
|
||||
case mpeg2Video = 0x02
|
||||
case mpeg1Audio = 0x03
|
||||
case mpeg2Audio = 0x04
|
||||
case mpeg2TabledData = 0x05
|
||||
case mpeg2PacketizedData = 0x06
|
||||
|
||||
case adtsaac = 0x0F
|
||||
case h263 = 0x10
|
||||
|
||||
case h264 = 0x1B
|
||||
case h265 = 0x24
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
import AVFoundation
|
||||
import VideoToolbox
|
||||
|
||||
enum NALType: UInt8 {
|
||||
case unspec = 0
|
||||
case slice = 1 // P frame
|
||||
case dpa = 2
|
||||
case dpb = 3
|
||||
case dpc = 4
|
||||
case idr = 5 // I frame
|
||||
case sei = 6
|
||||
case sps = 7
|
||||
case pps = 8
|
||||
case aud = 9
|
||||
case eoseq = 10
|
||||
case eostream = 11
|
||||
case fill = 12
|
||||
}
|
||||
|
||||
// MARK: -
|
||||
struct NALUnit {
|
||||
var refIdc: UInt8 = 0
|
||||
var type: NALType = .unspec
|
||||
var payload = Data()
|
||||
}
|
||||
|
||||
extension NALUnit: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
ByteArray()
|
||||
.writeUInt8(refIdc << 5 | type.rawValue)
|
||||
.writeBytes(payload)
|
||||
.data
|
||||
}
|
||||
set {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
do {
|
||||
let byte: UInt8 = try buffer.readUInt8()
|
||||
refIdc = byte & 0x60 >> 5
|
||||
type = NALType(rawValue: byte & 0x31) ?? .unspec
|
||||
payload = try buffer.readBytes(buffer.bytesAvailable)
|
||||
} catch {
|
||||
logger.error("\(buffer)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct ProfileLevelIndicationIndexDescriptor: BaseDescriptor {
|
||||
static let tag: UInt8 = 0x14
|
||||
// MARK: BaseDescriptor
|
||||
let tag: UInt8 = Self.tag
|
||||
var size: UInt32 = 0
|
||||
// MARK: ProfileLevelIndicationIndexDescriptor
|
||||
var profileLevelIndicationIndex: UInt8 = 0
|
||||
}
|
||||
|
||||
extension ProfileLevelIndicationIndexDescriptor: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt8(tag)
|
||||
.writeUInt32(0)
|
||||
.writeUInt8(profileLevelIndicationIndex)
|
||||
writeSize(buffer)
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
_ = try buffer.readUInt8()
|
||||
size = try readSize(buffer)
|
||||
profileLevelIndicationIndex = try buffer.readUInt8()
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct SLConfigDescriptor: BaseDescriptor {
|
||||
// MARK: BaseDescriptor
|
||||
let tag: UInt8 = 0x06
|
||||
var size: UInt32 = 0
|
||||
// MARK: SLConfigDescriptor
|
||||
var predefined: UInt8 = 0
|
||||
}
|
||||
|
||||
extension SLConfigDescriptor: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt8(tag)
|
||||
.writeUInt32(0)
|
||||
.writeUInt8(predefined)
|
||||
writeSize(buffer)
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
_ = try buffer.readUInt8()
|
||||
size = try readSize(buffer)
|
||||
predefined = try buffer.readUInt8()
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-15 5.3.4.1.2
|
||||
struct MP4AVCConfigurationBox: MP4BoxConvertible {
|
||||
// MARK: MP4BoxConvertible
|
||||
var size: UInt32 = 0
|
||||
let type: String = "avcC"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
// MARK: MP4AVCConfigurationBox
|
||||
var config = AVCConfigurationRecord()
|
||||
}
|
||||
|
||||
extension MP4AVCConfigurationBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeBytes(config.data)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
config = AVCConfigurationRecord(data: try buffer.readBytes(buffer.bytesAvailable))
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let avcC = MP4Box.Name<MP4PixelAspectRatioBox>(rawValue: "avcC")
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4AudioSampleEntry: MP4SampleEntry {
|
||||
static let channelCount: UInt16 = 2
|
||||
static let sampleSize: UInt16 = 16
|
||||
// MARK: MP4SampleEntry
|
||||
var size: UInt32 = 0
|
||||
var type: String = ""
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var dataReferenceIndex: UInt16 = 0
|
||||
// MARK: MP4AudioSampleEntry
|
||||
var channelCount: UInt16 = Self.channelCount
|
||||
var sampleSize: UInt16 = Self.sampleSize
|
||||
var sampleRate: UInt32 = 0
|
||||
}
|
||||
|
||||
extension MP4AudioSampleEntry: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeBytes(.init(repeating: 0, count: 6)) // const unsigned int(8)[6] reserved = 0
|
||||
.writeUInt16(dataReferenceIndex)
|
||||
.writeUInt32(0)
|
||||
.writeUInt32(0) // const unsigned int(32)[2] reserved = 0
|
||||
.writeUInt16(channelCount)
|
||||
.writeUInt16(sampleSize)
|
||||
.writeUInt16(0) // unsigned int(16) pre_defined = 0
|
||||
.writeUInt16(0) // const unsigned int(16) reserved = 0
|
||||
.writeUInt32(sampleRate << 16)
|
||||
for child in children {
|
||||
buffer.writeBytes(child.data)
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
type = try buffer.readUTF8Bytes(4)
|
||||
buffer.position += 6
|
||||
dataReferenceIndex = try buffer.readUInt16()
|
||||
buffer.position += 8
|
||||
channelCount = try buffer.readUInt16()
|
||||
sampleSize = try buffer.readUInt16()
|
||||
buffer.position += 4
|
||||
sampleRate = try buffer.readUInt32() >> 16
|
||||
children.removeAll()
|
||||
while 0 < buffer.bytesAvailable {
|
||||
let size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
buffer.position -= 8
|
||||
var entry = MP4Box()
|
||||
entry.data = try buffer.readBytes(Int(size))
|
||||
children.append(entry)
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let mp4a = MP4Box.Name<MP4AudioSampleEntry>(rawValue: "mp4a")
|
||||
static let mlpa = MP4Box.Name<MP4AudioSampleEntry>(rawValue: "mlpa")
|
||||
}
|
|
@ -1,84 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4Box: MP4BoxConvertible {
|
||||
static let containers: Set<String> = [
|
||||
"cmov",
|
||||
"ctts",
|
||||
"edts",
|
||||
"iods",
|
||||
"junk",
|
||||
"mdia",
|
||||
"minf",
|
||||
"moov",
|
||||
"pict",
|
||||
"pnot",
|
||||
"rmda",
|
||||
"rmra",
|
||||
"skip",
|
||||
"stbl",
|
||||
"trak",
|
||||
"uuid",
|
||||
"wide",
|
||||
"moof",
|
||||
"traf"
|
||||
]
|
||||
|
||||
class Names {
|
||||
}
|
||||
|
||||
final class Name<T: MP4BoxConvertible>: Names, Hashable, RawRepresentable {
|
||||
let rawValue: String
|
||||
// swiftlint:disable nesting
|
||||
typealias RawValue = String
|
||||
|
||||
init(rawValue: String) {
|
||||
self.rawValue = rawValue
|
||||
}
|
||||
}
|
||||
|
||||
var size: UInt32 = 0
|
||||
var type: String = ""
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
private var _data = Data()
|
||||
}
|
||||
|
||||
extension MP4Box: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
_data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
_data = newValue
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
type = try buffer.readUTF8Bytes(4)
|
||||
if Self.containers.contains(type) {
|
||||
children.removeAll()
|
||||
while 0 < buffer.bytesAvailable {
|
||||
let size = try buffer.readInt32()
|
||||
_ = try buffer.readBytes(4)
|
||||
buffer.position -= 8
|
||||
var child = MP4Box()
|
||||
child.data = try buffer.readBytes(Int(size))
|
||||
children.append(child)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let trak = MP4Box.Name<MP4Box>(rawValue: "trak")
|
||||
}
|
||||
|
||||
extension MP4Box: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
protocol MP4BoxConvertible: DataConvertible, CustomXmlStringConvertible {
|
||||
var size: UInt32 { get }
|
||||
var type: String { get }
|
||||
var offset: UInt64 { get set }
|
||||
var children: [MP4BoxConvertible] { get }
|
||||
|
||||
init()
|
||||
func getBoxes<T>(by name: MP4Box.Name<T>) -> [T]
|
||||
}
|
||||
|
||||
extension MP4BoxConvertible {
|
||||
var xmlString: String {
|
||||
guard !children.isEmpty else {
|
||||
return "<\(type) size=\"\(size)\" offset=\"\(offset)\" />"
|
||||
}
|
||||
var tags: [String] = []
|
||||
for child in children {
|
||||
tags.append(child.xmlString)
|
||||
}
|
||||
return "<\(type) size=\"\(size)\" offset=\"\(offset)\">\(tags.joined())</\(type)>"
|
||||
}
|
||||
|
||||
func getBoxes<T>(by name: MP4Box.Name<T>) -> [T] {
|
||||
var list: [T] = []
|
||||
for child in children {
|
||||
if name.rawValue == child.type {
|
||||
if let box = child as? T {
|
||||
list.append(box)
|
||||
} else {
|
||||
var box = T()
|
||||
box.data = child.data
|
||||
list.append(box)
|
||||
}
|
||||
}
|
||||
if !child.children.isEmpty {
|
||||
list += child.getBoxes(by: name)
|
||||
}
|
||||
}
|
||||
return list
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4ChunkOffsetBox: MP4FullBox {
|
||||
static let version: UInt8 = 0
|
||||
static let flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "stco"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = Self.version
|
||||
var flags: UInt32 = Self.flags
|
||||
// MARK: MP4ChunkOffsetBox
|
||||
var entries: [UInt32] = []
|
||||
}
|
||||
|
||||
extension MP4ChunkOffsetBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(UInt32(entries.count))
|
||||
for entry in entries {
|
||||
buffer
|
||||
.writeUInt32(entry)
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
let numberOfEntries = try buffer.readUInt32()
|
||||
entries.removeAll()
|
||||
for _ in 0..<numberOfEntries {
|
||||
entries.append(try buffer.readUInt32())
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let stco = MP4Box.Name<MP4ChunkOffsetBox>(rawValue: "stco")
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-12 5th 8.7.2.2
|
||||
struct MP4DataEntryUrlBox: MP4FullBox {
|
||||
static let version: UInt8 = 0
|
||||
static let flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "url "
|
||||
var offset: UInt64 = 0
|
||||
var version: UInt8 = Self.version
|
||||
var flags: UInt32 = Self.flags
|
||||
var children: [MP4BoxConvertible] = []
|
||||
// MARK: MP4DataEntryUrlBox
|
||||
var location: String = ""
|
||||
}
|
||||
|
||||
extension MP4DataEntryUrlBox: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUTF8Bytes(location)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
location = try buffer.readUTF8Bytes(buffer.bytesAvailable)
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let url = MP4Box.Name<MP4DataEntryUrlBox>(rawValue: "url ")
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4EditListBox: MP4FullBox {
|
||||
static let flags: UInt32 = 0
|
||||
|
||||
struct Entry: Equatable, CustomDebugStringConvertible {
|
||||
let segmentDuration: UInt64
|
||||
let mediaTime: UInt64
|
||||
let mediaRateInteger: Int16
|
||||
let mediaRateFraction: Int16
|
||||
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "elst"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = Self.flags
|
||||
// MARK: MP4EditListBox
|
||||
var entries: [Entry] = []
|
||||
}
|
||||
|
||||
extension MP4EditListBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(UInt32(entries.count))
|
||||
for entry in entries {
|
||||
if version == 0 {
|
||||
buffer
|
||||
.writeUInt32(UInt32(entry.segmentDuration))
|
||||
.writeUInt32(UInt32(entry.mediaTime))
|
||||
.writeInt16(entry.mediaRateInteger)
|
||||
.writeInt16(entry.mediaRateFraction)
|
||||
} else {
|
||||
buffer
|
||||
.writeUInt64(entry.segmentDuration)
|
||||
.writeUInt64(entry.mediaTime)
|
||||
.writeInt16(entry.mediaRateInteger)
|
||||
.writeInt16(entry.mediaRateFraction)
|
||||
}
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
let numberOfEntries = try buffer.readUInt32()
|
||||
entries.removeAll()
|
||||
for _ in 0..<numberOfEntries {
|
||||
if version == 0 {
|
||||
entries.append(Entry(
|
||||
segmentDuration: UInt64(try buffer.readUInt32()),
|
||||
mediaTime: UInt64(try buffer.readUInt32()),
|
||||
mediaRateInteger: try buffer.readInt16(),
|
||||
mediaRateFraction: try buffer.readInt16()
|
||||
))
|
||||
} else {
|
||||
entries.append(Entry(
|
||||
segmentDuration: try buffer.readUInt64(),
|
||||
mediaTime: try buffer.readUInt64(),
|
||||
mediaRateInteger: try buffer.readInt16(),
|
||||
mediaRateFraction: try buffer.readInt16()
|
||||
))
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let elst = MP4Box.Name<MP4EditListBox>(rawValue: "elst")
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/**
|
||||
- seealso: https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html#//apple_ref/doc/uid/TP40000939-CH205-124774
|
||||
*/
|
||||
struct MP4ElementaryStreamDescriptorBox: MP4FullBox {
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "esds"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var descriptor = ESDescriptor()
|
||||
}
|
||||
|
||||
extension MP4ElementaryStreamDescriptorBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeBytes(descriptor.data)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
descriptor.data = try buffer.readBytes(buffer.bytesAvailable)
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let esds = MP4Box.Name<MP4ElementaryStreamDescriptorBox>(rawValue: "esds")
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
enum MP4File {
|
||||
struct Builder {
|
||||
private var ftyp: MP4FileTypeBox?
|
||||
private var moov: MP4Box?
|
||||
|
||||
mutating func setFileTypeBox(_ ftyp: MP4FileTypeBox?) -> Self {
|
||||
self.ftyp = ftyp
|
||||
return self
|
||||
}
|
||||
|
||||
mutating func setMoovieBox(_ moov: MP4Box?) -> Self {
|
||||
self.moov = moov
|
||||
return self
|
||||
}
|
||||
|
||||
func build() -> MP4Box {
|
||||
var box = MP4Box()
|
||||
if let ftyp = ftyp {
|
||||
box.children.append(ftyp)
|
||||
}
|
||||
if let moov = moov {
|
||||
box.children.append(moov)
|
||||
}
|
||||
return box
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
final class MP4FileReader: MP4ReaderConvertible {
|
||||
var fileType: MP4FileTypeBox {
|
||||
root.getBoxes(by: .ftyp).first ?? MP4FileTypeBox()
|
||||
}
|
||||
var tracks: [MP4TrackReader] = []
|
||||
|
||||
private var root = MP4Box()
|
||||
private let fileHandle: FileHandle
|
||||
|
||||
init(forReadingFrom url: URL) throws {
|
||||
fileHandle = try FileHandle(forReadingFrom: url)
|
||||
}
|
||||
|
||||
func execute() -> Self {
|
||||
do {
|
||||
var currentOffset = root.offset
|
||||
let length = fileHandle.seekToEndOfFile()
|
||||
root.children.removeAll()
|
||||
repeat {
|
||||
fileHandle.seek(toFileOffset: currentOffset)
|
||||
let buffer = ByteArray(data: fileHandle.readData(ofLength: 8))
|
||||
let size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
fileHandle.seek(toFileOffset: currentOffset)
|
||||
var child = MP4Box()
|
||||
child.data = fileHandle.readData(ofLength: Int(size))
|
||||
root.children.append(child)
|
||||
currentOffset += UInt64(size)
|
||||
} while currentOffset < length
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
return self
|
||||
}
|
||||
|
||||
func getBoxes<T: MP4BoxConvertible>(by name: MP4Box.Name<T>) -> [T] {
|
||||
return root.getBoxes(by: name)
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4FileReader: CustomDebugStringConvertible {
|
||||
var debugDescription: String {
|
||||
return root.debugDescription
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4FileReader: CustomXmlStringConvertible {
|
||||
var xmlString: String {
|
||||
return root.xmlString
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4FileTypeBox: MP4BoxConvertible {
|
||||
// MARK: MP4BoxConvertible
|
||||
var size: UInt32 = 0
|
||||
var type: String = ""
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
// MARK: MP4MediaHeaderBox
|
||||
var majorBrand: UInt32 = 0
|
||||
var minorVersion: UInt32 = 0
|
||||
var compatibleBrands: [UInt32] = []
|
||||
}
|
||||
|
||||
extension MP4FileTypeBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt32(majorBrand)
|
||||
.writeUInt32(minorVersion)
|
||||
for brand in compatibleBrands {
|
||||
buffer.writeUInt32(brand)
|
||||
}
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
type = try buffer.readUTF8Bytes(4)
|
||||
majorBrand = try buffer.readUInt32()
|
||||
minorVersion = try buffer.readUInt32()
|
||||
compatibleBrands.removeAll()
|
||||
while 0 < buffer.bytesAvailable {
|
||||
compatibleBrands.append(try buffer.readUInt32())
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let styp = MP4Box.Name<MP4FileTypeBox>(rawValue: "styp")
|
||||
static let ftyp = MP4Box.Name<MP4FileTypeBox>(rawValue: "ftyp")
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
protocol MP4FragmentedWriterDelegate: AnyObject {
|
||||
func writer(_ writer: MP4FragmentedWriter, didSegmentChanged segment: MP4Box)
|
||||
}
|
||||
|
||||
final class MP4FragmentedWriter: MP4WriterConvertible {
|
||||
private var segment = MP4Box()
|
||||
private(set) var mapping = MP4Box()
|
||||
|
||||
private var audio = MP4FragmentedTrafWriter()
|
||||
private var video = MP4FragmentedTrafWriter()
|
||||
|
||||
weak var delegate: MP4FragmentedWriterDelegate?
|
||||
}
|
||||
|
||||
extension MP4FragmentedWriter: AudioCodecDelegate {
|
||||
// MARK: AudioCodecDelegate
|
||||
func audioCodec(_ codec: AudioCodec, didSet formatDescription: CMFormatDescription?) {
|
||||
}
|
||||
|
||||
func audioCodec(_ codec: AudioCodec, didOutput sample: UnsafeMutableAudioBufferListPointer, presentationTimeStamp: CMTime) {
|
||||
}
|
||||
}
|
||||
|
||||
final class MP4FragmentedTrafWriter {
|
||||
private var tkhd = MP4TrackFragmentHeaderBox()
|
||||
private var trun = MP4TrackRunBox()
|
||||
private var tfdt = MP4TrackRunBox()
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
protocol MP4FullBox: MP4BoxConvertible {
|
||||
var version: UInt8 { get }
|
||||
var flags: UInt32 { get }
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-12 5th 8.4.3.2
|
||||
struct MP4HandlerBox: MP4FullBox {
|
||||
static let version: UInt8 = 0
|
||||
static let flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "hdlr"
|
||||
var offset: UInt64 = 0
|
||||
var version: UInt8 = Self.version
|
||||
var flags: UInt32 = Self.flags
|
||||
var children: [MP4BoxConvertible] = []
|
||||
// MARK: MP4HandlerBox
|
||||
var handlerType: UInt32 = 0
|
||||
var name: String = ""
|
||||
}
|
||||
|
||||
extension MP4HandlerBox: DataConvertible {
|
||||
// MARK: DataConvertible
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(0) // pre_defined
|
||||
.writeUInt32(handlerType)
|
||||
.writeUInt32(0) // reserved
|
||||
.writeUInt32(0) // reserved
|
||||
.writeUInt32(0) // reserved
|
||||
.writeUTF8Bytes(name)
|
||||
.writeUTF8Bytes("\0")
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
buffer.position += 4 // pre_defined
|
||||
handlerType = try buffer.readUInt32()
|
||||
buffer.position += 4 // reserved
|
||||
buffer.position += 4 // reserved
|
||||
buffer.position += 4 // reserved
|
||||
name = try buffer.readUTF8Bytes(buffer.bytesAvailable - 1)
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let hdlr = MP4Box.Name<MP4HandlerBox>(rawValue: "hdlr")
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4MediaHeaderBox: MP4FullBox {
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "mdhd"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = 0
|
||||
// MARK: MP4MediaHeaderBox
|
||||
var creationTime: UInt64 = 0
|
||||
var modificationTime: UInt64 = 0
|
||||
var timeScale: UInt32 = 0
|
||||
var duration: UInt64 = 0
|
||||
var language: [UInt8] = [0, 0, 0]
|
||||
}
|
||||
|
||||
extension MP4MediaHeaderBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
if version == 0 {
|
||||
buffer
|
||||
.writeUInt32(UInt32(creationTime))
|
||||
.writeUInt32(UInt32(modificationTime))
|
||||
.writeUInt32(timeScale)
|
||||
.writeUInt32(UInt32(duration))
|
||||
} else {
|
||||
buffer
|
||||
.writeUInt64(creationTime)
|
||||
.writeUInt64(modificationTime)
|
||||
.writeUInt32(timeScale)
|
||||
.writeUInt64(duration)
|
||||
}
|
||||
buffer
|
||||
.writeUInt16(
|
||||
UInt16(language[0]) << 10 |
|
||||
UInt16(language[1]) << 5 |
|
||||
UInt16(language[2])
|
||||
)
|
||||
.writeUInt16(0) // pre_defined = 0
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
if version == 0 {
|
||||
creationTime = UInt64(try buffer.readUInt32())
|
||||
modificationTime = UInt64(try buffer.readUInt32())
|
||||
timeScale = try buffer.readUInt32()
|
||||
duration = UInt64(try buffer.readUInt32())
|
||||
} else {
|
||||
creationTime = try buffer.readUInt64()
|
||||
modificationTime = try buffer.readUInt64()
|
||||
timeScale = try buffer.readUInt32()
|
||||
duration = try buffer.readUInt64()
|
||||
}
|
||||
let lang = try buffer.readUInt16()
|
||||
language = [
|
||||
UInt8((lang & 0x7C00) >> 10),
|
||||
UInt8((lang & 0x3E0) >> 5),
|
||||
UInt8(lang & 0x1F)
|
||||
]
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let mdhd = MP4Box.Name<MP4MediaHeaderBox>(rawValue: "mdhd")
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
enum MP4MovieFragmentBox {
|
||||
struct Builder {
|
||||
private var mfhd: MP4MovieFragmentHeaderBox?
|
||||
private var traf: [MP4Box] = []
|
||||
|
||||
mutating func setMovieFragmentHeaderBox(_ mfhd: MP4MovieFragmentHeaderBox?) -> Self {
|
||||
self.mfhd = mfhd
|
||||
return self
|
||||
}
|
||||
|
||||
mutating func addTrackFragmentBox(_ traf: MP4Box?) -> Self {
|
||||
guard let traf = traf else {
|
||||
return self
|
||||
}
|
||||
self.traf.append(traf)
|
||||
return self
|
||||
}
|
||||
|
||||
func build() -> MP4Box {
|
||||
var box = MP4Box()
|
||||
if let mfhd = mfhd {
|
||||
box.children.append(mfhd)
|
||||
}
|
||||
for t in traf {
|
||||
box.children.append(t)
|
||||
}
|
||||
return box
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4MovieFragmentHeaderBox: MP4FullBox {
|
||||
static let version: UInt8 = 0
|
||||
static let flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "mfhd"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
let version: UInt8 = Self.version
|
||||
let flags: UInt32 = Self.flags
|
||||
// MARK: MP4MovieFragmentHeaderBox
|
||||
var sequenceNumber: UInt32 = 0
|
||||
}
|
||||
|
||||
extension MP4MovieFragmentHeaderBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(sequenceNumber)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
buffer.position += 4
|
||||
sequenceNumber = try buffer.readUInt32()
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let mfhd = MP4Box.Name<MP4MovieFragmentHeaderBox>(rawValue: "mfhd")
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4MovieHeaderBox: MP4FullBox {
|
||||
static let rate: Int32 = 0x00010000
|
||||
static let volume: Int16 = 0x0100
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "mvhd"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = 0
|
||||
// MARK: MP4MediaHeaderBox
|
||||
var creationTime: UInt64 = 0
|
||||
var modificationTime: UInt64 = 0
|
||||
var timeScale: UInt32 = 0
|
||||
var duration: UInt64 = 0
|
||||
var rate: Int32 = Self.rate
|
||||
var volume: Int16 = Self.volume
|
||||
var matrix: [Int32] = []
|
||||
var nextTrackID: UInt32 = 0
|
||||
}
|
||||
|
||||
extension MP4MovieHeaderBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
if version == 0 {
|
||||
buffer
|
||||
.writeUInt32(UInt32(creationTime))
|
||||
.writeUInt32(UInt32(modificationTime))
|
||||
.writeUInt32(timeScale)
|
||||
.writeUInt32(UInt32(duration))
|
||||
} else {
|
||||
buffer
|
||||
.writeUInt64(creationTime)
|
||||
.writeUInt64(modificationTime)
|
||||
.writeUInt32(timeScale)
|
||||
.writeUInt64(duration)
|
||||
}
|
||||
buffer
|
||||
.writeInt32(rate)
|
||||
.writeInt16(volume)
|
||||
.writeInt16(0)
|
||||
.writeUInt32(0)
|
||||
.writeUInt32(0)
|
||||
for m in matrix {
|
||||
buffer.writeInt32(m)
|
||||
}
|
||||
buffer
|
||||
.writeInt32(0)
|
||||
.writeInt32(0)
|
||||
.writeInt32(0)
|
||||
.writeInt32(0)
|
||||
.writeInt32(0)
|
||||
.writeInt32(0)
|
||||
.writeUInt32(nextTrackID)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
if version == 0 {
|
||||
creationTime = UInt64(try buffer.readUInt32())
|
||||
modificationTime = UInt64(try buffer.readUInt32())
|
||||
timeScale = try buffer.readUInt32()
|
||||
duration = UInt64(try buffer.readUInt32())
|
||||
} else {
|
||||
creationTime = try buffer.readUInt64()
|
||||
modificationTime = try buffer.readUInt64()
|
||||
timeScale = try buffer.readUInt32()
|
||||
duration = try buffer.readUInt64()
|
||||
}
|
||||
rate = try buffer.readInt32()
|
||||
volume = try buffer.readInt16()
|
||||
buffer.position += 2 // const bit(16) reserved
|
||||
buffer.position += 8 // const unsigned int(32)[2] reserved
|
||||
matrix.removeAll()
|
||||
for _ in 0..<9 {
|
||||
matrix.append(try buffer.readInt32())
|
||||
}
|
||||
buffer.position += 24 // bit(32)[6] pre_defined = 0
|
||||
nextTrackID = try buffer.readUInt32()
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let mvhd = MP4Box.Name<MP4MovieHeaderBox>(rawValue: "mvhd")
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-12 5th 12.1.4.2
|
||||
struct MP4PixelAspectRatioBox: MP4BoxConvertible {
|
||||
// MARK: MP4BoxConvertible
|
||||
var size: UInt32 = 0
|
||||
let type: String = "pasp"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
// MARK: MP4PixelAspectRatioBox
|
||||
var hSpacing: UInt32 = 0
|
||||
var vSpacing: UInt32 = 0
|
||||
}
|
||||
|
||||
extension MP4PixelAspectRatioBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt32(hSpacing)
|
||||
.writeUInt32(vSpacing)
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
hSpacing = try buffer.readUInt32()
|
||||
vSpacing = try buffer.readUInt32()
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let pasp = MP4Box.Name<MP4PixelAspectRatioBox>(rawValue: "pasp")
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
import AVFoundation
|
||||
|
||||
final class MP4Reader: MP4ReaderConvertible {
|
||||
let fileType: MP4FileTypeBox
|
||||
let tracks: [MP4TrackReader]
|
||||
|
||||
init(fileType: MP4FileTypeBox, tracks: [MP4TrackReader]) {
|
||||
self.fileType = fileType
|
||||
self.tracks = tracks
|
||||
}
|
||||
}
|
||||
|
||||
final class MP4TrackReader {
|
||||
struct MP4SampleIterator: IteratorProtocol {
|
||||
// swiftlint:disable nesting
|
||||
typealias Element = UInt8
|
||||
|
||||
private var cursor: Int = 0
|
||||
private let reader: MP4TrackReader
|
||||
|
||||
init(reader: MP4TrackReader) {
|
||||
self.reader = reader
|
||||
}
|
||||
|
||||
mutating func next() -> Element? {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func makeIterator() -> MP4SampleIterator {
|
||||
return MP4SampleIterator(reader: self)
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
protocol MP4ReaderConvertible: AnyObject {
|
||||
var fileType: MP4FileTypeBox { get }
|
||||
var tracks: [MP4TrackReader] { get }
|
||||
|
||||
func execute() -> Self
|
||||
}
|
||||
|
||||
extension MP4ReaderConvertible {
|
||||
func execute() -> Self {
|
||||
return self
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
struct MP4SampleDescriptionBox: MP4FullBox {
|
||||
static let audio: Set<String> = ["mp4a"]
|
||||
static let video: Set<String> = ["mp4v", "s263", "avc1"]
|
||||
|
||||
static func makeEntry(by type: String) -> MP4SampleEntry? {
|
||||
switch true {
|
||||
case video.contains(type):
|
||||
return MP4VisualSampleEntry()
|
||||
case audio.contains(type):
|
||||
return MP4AudioSampleEntry()
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
static let flags: UInt32 = 0
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "stsd"
|
||||
var offset: UInt64 = 0
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = Self.flags
|
||||
// MARK: MP4SampleDescriptionBox
|
||||
var children: [MP4BoxConvertible] = []
|
||||
}
|
||||
|
||||
extension MP4SampleDescriptionBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(UInt32(children.count))
|
||||
for child in children {
|
||||
buffer.writeBytes(child.data)
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
let numberOfEntries = try buffer.readUInt32()
|
||||
children.removeAll()
|
||||
for _ in 0..<numberOfEntries {
|
||||
let size = try buffer.readUInt32()
|
||||
let type = try buffer.readUTF8Bytes(4)
|
||||
buffer.position -= 8
|
||||
var entry = Self.makeEntry(by: type)
|
||||
entry?.data = try buffer.readBytes(Int(size))
|
||||
if let entry = entry {
|
||||
children.append(entry)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let stsd = MP4Box.Name<MP4SampleDescriptionBox>(rawValue: "stsd")
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
protocol MP4SampleEntry: MP4BoxConvertible {
|
||||
var dataReferenceIndex: UInt16 { get }
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-12 5th 8.7.3.2.1
|
||||
struct MP4SampleSizeBox: MP4FullBox {
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "stsz"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = 0
|
||||
// MARK: MP4SampleSizeBox
|
||||
var sampleSize: UInt32 = 0
|
||||
var entries: [UInt32] = []
|
||||
}
|
||||
|
||||
extension MP4SampleSizeBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(sampleSize)
|
||||
.writeUInt32(UInt32(entries.count))
|
||||
for entry in entries {
|
||||
buffer
|
||||
.writeUInt32(entry)
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
version = try buffer.readUInt8()
|
||||
flags = try buffer.readUInt24()
|
||||
sampleSize = try buffer.readUInt32()
|
||||
entries.removeAll()
|
||||
let numberOfEntries = try buffer.readUInt32()
|
||||
if sampleSize == 0 {
|
||||
for _ in 0..<numberOfEntries {
|
||||
entries.append(try buffer.readUInt32())
|
||||
}
|
||||
} else {
|
||||
entries.append(sampleSize)
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let stsz = MP4Box.Name<MP4SampleSizeBox>(rawValue: "stsz")
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
import Foundation
|
||||
|
||||
/// ISO/IEC 14496-12 5th 8.7.4.2
|
||||
struct MP4SampleToChunkBox: MP4FullBox {
|
||||
struct Entry: Equatable, CustomDebugStringConvertible {
|
||||
let firstChunk: UInt32
|
||||
let samplesPerChunk: UInt32
|
||||
let sampleDescriptionIndex: UInt32
|
||||
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
||||
// MARK: MP4FullBox
|
||||
var size: UInt32 = 0
|
||||
let type: String = "stsc"
|
||||
var offset: UInt64 = 0
|
||||
var children: [MP4BoxConvertible] = []
|
||||
var version: UInt8 = 0
|
||||
var flags: UInt32 = 0
|
||||
// MARK: MP4SampleToChunkBox
|
||||
var entries: [Entry] = []
|
||||
}
|
||||
|
||||
extension MP4SampleToChunkBox: DataConvertible {
|
||||
var data: Data {
|
||||
get {
|
||||
let buffer = ByteArray()
|
||||
.writeUInt32(size)
|
||||
.writeUTF8Bytes(type)
|
||||
.writeUInt8(version)
|
||||
.writeUInt24(flags)
|
||||
.writeUInt32(UInt32(entries.count))
|
||||
for entry in entries {
|
||||
buffer
|
||||
.writeUInt32(entry.firstChunk)
|
||||
.writeUInt32(entry.samplesPerChunk)
|
||||
.writeUInt32(entry.sampleDescriptionIndex)
|
||||
}
|
||||
let size = buffer.position
|
||||
buffer.position = 0
|
||||
buffer.writeUInt32(UInt32(size))
|
||||
return buffer.data
|
||||
}
|
||||
set {
|
||||
do {
|
||||
let buffer = ByteArray(data: newValue)
|
||||
size = try buffer.readUInt32()
|
||||
_ = try buffer.readUTF8Bytes(4)
|
||||
buffer.position += 4
|
||||
let numberOfEntries: UInt32 = try buffer.readUInt32()
|
||||
entries.removeAll()
|
||||
for _ in 0..<numberOfEntries {
|
||||
entries.append(Entry(
|
||||
firstChunk: try buffer.readUInt32(),
|
||||
samplesPerChunk: try buffer.readUInt32(),
|
||||
sampleDescriptionIndex: try buffer.readUInt32()
|
||||
))
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension MP4Box.Names {
|
||||
static let stsc = MP4Box.Name<MP4SampleToChunkBox>(rawValue: "stsc")
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue