Compare commits

...

180 Commits
1.4.0 ... main

Author SHA1 Message Date
shogo4405 1d851afe32 Bump to up 1.5.2. 2023-06-16 02:43:30 +09:00
shogo4405 74ca73795a fix tvOS compile error 2023-06-14 22:55:23 +09:00
shogo4405 b383042509
Merge pull request #1216 from shogo4405/feature/capture-session-other-thread
fixed #1209 AVCaptureSession startRunning] should be called from background thread.
2023-06-12 01:44:01 +09:00
shogo4405 56e817669f fixed #1209 2023-06-12 01:27:37 +09:00
shogo4405 442077e218
Merge pull request #1214 from shogo4405/feature/fix-crash-audio-samplebuffer
Fix Rare crash of makeSampleBuffer
2023-06-10 01:09:38 +09:00
shogo4405 0d3e7b7572
Merge pull request #1215 from shogo4405/feature/Improved-compatibility-rtmp
Improved RTMPDataMessage.
2023-06-08 00:24:48 +09:00
shogo4405 7867c44a27 Fix crash unsupported AVAudioFormat. 2023-06-08 00:22:47 +09:00
shogo4405 cfba9b6be2 Improved RTMPDataMessage. 2023-06-07 21:42:41 +09:00
shogo4405 c6901ba7a1 Fix can't compile next Xcode. 2023-06-07 20:26:52 +09:00
shogo4405 0994741b7c Remove no code. 2023-06-05 22:40:00 +09:00
shogo4405 2310c2403c
Merge pull request #1213 from shogo4405/feature/muted-properties
fixed When I set pause and resume, mute will not work bug
2023-05-31 00:15:25 +09:00
shogo4405 876ccf1699 fixed #998 2023-05-31 00:12:37 +09:00
shogo4405 f6489f866a
Merge pull request #1211 from shogo4405/dependabot/bundler/fastlane-2.213.0
Bump fastlane from 2.212.2 to 2.213.0
2023-05-30 20:02:16 +09:00
dependabot[bot] b5c59a7424
Bump fastlane from 2.212.2 to 2.213.0
Bumps [fastlane](https://github.com/fastlane/fastlane) from 2.212.2 to 2.213.0.
- [Release notes](https://github.com/fastlane/fastlane/releases)
- [Commits](https://github.com/fastlane/fastlane/compare/fastlane/2.212.2...fastlane/2.213.0)

---
updated-dependencies:
- dependency-name: fastlane
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-05-28 00:57:38 +00:00
shogo4405 dd1899c8e0
Merge pull request #1207 from shogo4405/feature/dettach-camera
Detach camera device stream#attachCamera(nil)
2023-05-27 12:56:03 +09:00
shogo4405 1b1d613950 fixed #1205 2023-05-25 21:39:06 +09:00
shogo4405 52ad7d6ad4
Update README.md 2023-05-21 20:47:28 +09:00
shogo4405 1cebb9c91c Bump to up 1.5.1 2023-05-21 15:25:25 +09:00
shogo4405 591e090925
Merge pull request #1201 from shogo4405/feature/fix-recording
fixed #1198 fix can't save local recording.
2023-05-14 21:49:12 +09:00
shogo4405 5c09825119 fix failed test. 2023-05-14 21:33:15 +09:00
shogo4405 5d4787383a fixed #1198 fix can't save local recording. 2023-05-14 20:49:30 +09:00
shogo4405 6cef14c59d validate videocodecid 2023-05-13 14:46:53 +09:00
shogo4405 eef9eb0cb5 ExistentialAny 2023-05-07 21:27:41 +09:00
shogo4405 8675b37473 Smoothing rtmp playback. 2023-05-07 14:37:12 +09:00
shogo4405 b252405083
Update README.md 2023-05-06 02:30:30 +09:00
shogo4405 d72ae256da
Merge pull request #1195 from shogo4405/feature/rtmp-h265
Support RTMP with HEVC Enhancing RTMP, FLV.
2023-05-06 02:19:10 +09:00
shogo4405 cd57781dd3 Support RTMP with HEVC Enhancing RTMP, FLV. 2023-05-06 01:53:29 +09:00
shogo4405 28da84c16c
Merge pull request #1196 from shogo4405/fix/video-codec-drop-frame
Fix dropframe delegate.
2023-05-05 20:28:21 +09:00
shogo4405 39c03c3e17 Fix dropframe delegate. 2023-05-05 20:27:03 +09:00
shogo4405 7b4f896c16 Suppress warnings. 2023-05-05 20:09:22 +09:00
shogo4405 efa5c5e980 Remove RTMPReaderTests case. 2023-05-05 16:38:12 +09:00
shogo4405 da0abce59d Remove FLVReader class. 2023-05-04 15:49:02 +09:00
shogo4405 915758d0ec Bump to up 1.5.0. 2023-05-03 16:51:52 +09:00
shogo4405 f7f6ee42b0 Relocate the nested type is defined. 2023-05-03 16:25:01 +09:00
shogo4405 fdc9aa3272 Update README.md 2023-04-30 15:03:02 +09:00
shogo4405 263689ffaa Swift-DocC 2023-04-29 16:11:21 +09:00
shogo4405 434f6df596 Merge branch '1.4.x' 2023-04-29 14:50:04 +09:00
shogo4405 3d0e6fe470 Bump to up 1.4.6. 2023-04-29 14:45:18 +09:00
shogo4405 8dfdfd991c fix tvOS compile error. 2023-04-26 23:33:56 +09:00
shogo4405 954a3c85f4 add document lane. 2023-04-26 23:30:58 +09:00
shogo4405 da2fd3c8df fix tvOS compile error. 2023-04-26 22:42:56 +09:00
shogo4405 c84dce18ba fixed #1177 2023-04-26 21:48:57 +09:00
shogo4405 0f543dddbd add drop frame mechanism. 2023-04-26 20:27:48 +09:00
shogo4405 e14c6f0ecf
Merge pull request #1192 from shogo4405/feature/cancel-connections
RTMPNWSoccket will be gracefully close.
2023-04-26 18:29:32 +09:00
shogo4405 d5fd16b334 RTMPNWSoccket will be gracefully close. 2023-04-24 00:04:42 +09:00
shogo4405 7f3bb91ac4
Merge pull request #1188 from shogo4405/macos-catalyst
Support MacCatalyst for SPM.
2023-04-23 15:21:01 +09:00
shogo4405 ca931ec15e Support Mac Catalyst. 2023-04-23 15:08:28 +09:00
shogo4405 61c0715328
Merge pull request #1190 from shogo4405/dependabot/bundler/fastlane-2.212.2
Bump fastlane from 2.212.1 to 2.212.2
2023-04-23 14:12:09 +09:00
shogo4405 727a2af70f
Merge pull request #1191 from shogo4405/dependabot/bundler/cocoapods-1.12.1
Bump cocoapods from 1.12.0 to 1.12.1
2023-04-23 14:12:00 +09:00
dependabot[bot] 9913bbfea7
Bump cocoapods from 1.12.0 to 1.12.1
Bumps [cocoapods](https://github.com/CocoaPods/CocoaPods) from 1.12.0 to 1.12.1.
- [Release notes](https://github.com/CocoaPods/CocoaPods/releases)
- [Changelog](https://github.com/CocoaPods/CocoaPods/blob/master/CHANGELOG.md)
- [Commits](https://github.com/CocoaPods/CocoaPods/compare/1.12.0...1.12.1)

---
updated-dependencies:
- dependency-name: cocoapods
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-23 00:57:32 +00:00
dependabot[bot] fc76b0886c
Bump fastlane from 2.212.1 to 2.212.2
Bumps [fastlane](https://github.com/fastlane/fastlane) from 2.212.1 to 2.212.2.
- [Release notes](https://github.com/fastlane/fastlane/releases)
- [Commits](https://github.com/fastlane/fastlane/compare/fastlane/2.212.1...fastlane/2.212.2)

---
updated-dependencies:
- dependency-name: fastlane
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-23 00:57:02 +00:00
shogo4405 3ad1f17e63 Migration frameRate 60 VideoCodecSettings. 2023-04-20 22:54:06 +09:00
shogo4405 e44bf9187e Merge branch '1.4.x' 2023-04-20 22:50:12 +09:00
shogo4405 534076025b
Merge pull request #1186 from shogo4405/feature/resume-capture-session
Resume AVAudioSession.
2023-04-19 23:00:53 +09:00
shogo4405 252ab16434 refs #1184 Resume AVCaptureSession if possible. 2023-04-19 12:33:30 +09:00
shogo4405 42f060a8b3
Merge pull request #1182 from shogo4405/feature/fix-set-framerate-60
fixed #1157 Support 60/fps.
2023-04-16 01:17:54 +09:00
shogo4405 8ef80f98fb fixed #1157 Support 60/fps. 2023-04-16 01:05:03 +09:00
shogo4405 dc06244df2 Update Example project. 2023-04-15 21:45:50 +09:00
shogo4405 05897ff5e7
Merge pull request #1179 from shogo4405/feature/use-network-framework
Switched to using the Network.framework.
2023-04-15 16:38:34 +09:00
shogo4405 a3a9e85c89
Merge pull request #1180 from shogo4405/feature/update-default-codec-sizez
Change default size. It's 2023!
2023-04-15 16:38:24 +09:00
shogo4405 7ca0185652 refs #1178 fix crash RTMPURL without "/" 2023-04-15 16:35:47 +09:00
shogo4405 3449811213 Change default size. It's 2023! 2023-04-15 16:00:28 +09:00
shogo4405 08979c7dfd Switched to using the Network.framework. 2023-04-15 15:35:22 +09:00
shogo4405 0951831663 Revert "Suppress warnings."
This reverts commit 69f0f1415f.
2023-04-14 00:06:47 +09:00
shogo4405 39a489ca6a Update README.md 2023-04-13 23:15:36 +09:00
shogo4405 e96705580d Suppress warnings. 2023-04-05 23:30:21 +09:00
shogo4405 d1f78c9e70 fixed #1171 2023-04-05 23:20:47 +09:00
shogo4405 701cd70d76 Merge branch '1.4.x' 2023-04-01 18:48:13 +09:00
shogo4405 a4ee619245 Bump to up 1.4.5. 2023-04-01 18:35:41 +09:00
shogo4405 7494034938
Merge pull request #1169 from shogo4405/feature/update-logboard
Bump to Logboard up 2.3.1
2023-04-01 16:36:10 +09:00
shogo4405 bae06f303d Bump to Logboard up 2.3.1 2023-04-01 16:28:28 +09:00
shogo4405 69f0f1415f Suppress warnings. 2023-04-01 14:31:34 +09:00
shogo4405 50ae37d59c fixed #1168 2023-03-31 00:38:17 +09:00
shogo4405 986e6417b5
Merge pull request #1162 from shogo4405/feature/fix-audio-out-of-sync
fix audio desynchronization on camera switch
2023-03-28 22:00:50 +09:00
shogo4405 da030cc5f2
Merge pull request #1164 from shogo4405/feature/audio-background-mode
Supports background audio mode framework level.
2023-03-26 15:17:36 +09:00
shogo4405 c07d57e343 Supports background audio mode. 2023-03-24 00:10:29 +09:00
shogo4405 897010e9ed fix audio desynchronization on camera switch 2023-03-23 23:57:58 +09:00
shogo4405 28ae653209 fix can't compile SwiftPMSupport. 2023-03-23 23:48:30 +09:00
shogo4405 b1c2c27534 Merge branch '1.4.x' 2023-03-23 23:23:03 +09:00
shogo4405 0bcfe9aea1
Merge pull request #1163 from shogo4405/revert-1161-feature/fix-audio-background
Revert "Support publish with audio in background iOS."
2023-03-23 23:09:16 +09:00
shogo4405 fa9347379d
Revert "Support publish with audio in background iOS." 2023-03-23 23:08:43 +09:00
shogo4405 7efcc6f4d7 Remove unused property. 2023-03-22 01:58:20 +09:00
shogo4405 e4651e0889 Refactor IOMixer#audioEngineHolder. 2023-03-21 22:05:16 +09:00
shogo4405 9b8b709a61
Merge pull request #1161 from shogo4405/feature/fix-audio-background
Support publish with audio in background iOS.
2023-03-21 21:32:26 +09:00
shogo4405 1158799a97 Support publish with audio in background iOS. 2023-03-21 21:18:00 +09:00
shogo4405 d470ada4cd refs bb54f7ad 2023-03-21 13:43:00 +09:00
shogo4405 0be9a79090 fix forced to mono. 2023-03-20 19:13:19 +09:00
shogo4405 068308c6a2
Merge pull request #1160 from shogo4405/feature/net-stream-delegate
Redesign RTMPConnecitonDelegate and NetStreamDelegate.
2023-03-20 18:12:14 +09:00
shogo4405 bb54f7ada3 Redesign RTMPConnecitonDelegate and RTMPStreamDelegate. 2023-03-20 17:45:00 +09:00
shogo4405 b4ae19fc2a Reuse AVAudioCompressedBuffer. 2023-03-20 17:13:23 +09:00
shogo4405 1da2b6f5c6 Signature change VideoCodecDelegate. 2023-03-19 17:54:25 +09:00
shogo4405 f997eac673 Fix AudioCodec race condition. 2023-03-19 17:39:58 +09:00
shogo4405 74e167c4b2
Merge pull request #1159 from shogo4405/feature/suppress-warnings
Suppress warnings CMVideoFormatDescriptionCreateFromH264ParameterSets.
2023-03-19 16:11:36 +09:00
shogo4405 37d3e41079
Merge pull request #1158 from shogo4405/feature/change-method-signature-samplebuffer
Change method signature appendSampleBuffer.
2023-03-19 15:53:06 +09:00
shogo4405 b219c27648 Suppress warnings CMVideoFormatDescriptionCreateFromH264ParameterSets. 2023-03-19 15:44:38 +09:00
shogo4405 9dda67511e Update ADTSReader infinity loop. 2023-03-19 15:41:17 +09:00
shogo4405 6222479abc Change method signature appendSampleBuffer. 2023-03-19 14:39:57 +09:00
shogo4405 622e67449d Add ADTSReader. 2023-03-17 01:20:06 +09:00
shogo4405 40d6d5c2b5
Merge pull request #1155 from shogo4405/dependabot/bundler/activesupport-7.0.4.3
Bump activesupport from 7.0.4.2 to 7.0.4.3
2023-03-16 22:55:58 +09:00
dependabot[bot] c05722ade2
Bump activesupport from 7.0.4.2 to 7.0.4.3
Bumps [activesupport](https://github.com/rails/rails) from 7.0.4.2 to 7.0.4.3.
- [Release notes](https://github.com/rails/rails/releases)
- [Changelog](https://github.com/rails/rails/blob/v7.0.4.3/activesupport/CHANGELOG.md)
- [Commits](https://github.com/rails/rails/compare/v7.0.4.2...v7.0.4.3)

---
updated-dependencies:
- dependency-name: activesupport
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-16 01:36:45 +00:00
shogo4405 29306a5e18 fix AudioCodec cracked sound. 2023-03-15 22:18:53 +09:00
shogo4405 0ba6595cdb
Update README.md 2023-03-14 22:31:28 +09:00
shogo4405 f223eaa71e
Merge pull request #1153 from shogo4405/feature/update-readme
Added handling sample of Adaptive bitrate streaming.
2023-03-14 22:06:40 +09:00
shogo4405 edb94bf580 Added handling sample of Adaptive bitrate streaming. 2023-03-14 22:04:22 +09:00
shogo4405 3c7b5b7147 Merge branch '1.4.x' 2023-03-14 00:15:22 +09:00
shogo4405 a89613b840 Bump to up 1.4.4. 2023-03-13 23:55:54 +09:00
shogo4405 5fe41172cd fix TSReader NALFileFormating. 2023-03-12 15:44:33 +09:00
shogo4405 542c9026d3 fix typo 2023-03-12 01:23:55 +09:00
shogo4405 357a16c358 Advanced AudioCodec. 2023-03-12 00:24:03 +09:00
shogo4405 909823740f Add IOMixer playback feature. 2023-03-11 18:35:54 +09:00
shogo4405 fdd9a2a958 Support startCode length = 3. 2023-03-08 22:32:20 +09:00
shogo4405 37d27f1f90
Merge pull request #1150 from shogo4405/dependabot/bundler/cocoapods-1.12.0
Bump cocoapods from 1.11.3 to 1.12.0
2023-03-05 12:25:07 +09:00
dependabot[bot] 21e9d4bce9
Bump cocoapods from 1.11.3 to 1.12.0
Bumps [cocoapods](https://github.com/CocoaPods/CocoaPods) from 1.11.3 to 1.12.0.
- [Release notes](https://github.com/CocoaPods/CocoaPods/releases)
- [Changelog](https://github.com/CocoaPods/CocoaPods/blob/master/CHANGELOG.md)
- [Commits](https://github.com/CocoaPods/CocoaPods/compare/1.11.3...1.12.0)

---
updated-dependencies:
- dependency-name: cocoapods
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-05 00:56:56 +00:00
shogo4405 76dc71fecb Update TSReader. 2023-03-05 01:26:49 +09:00
shogo4405 38e7440b04 Merge branch '1.4.x' 2023-03-03 22:39:59 +09:00
shogo4405 cc7c578a3a
Merge pull request #1148 from shogo4405/feature/audio-codec-2
Refactor AudioCodec with the AVAudioConverter.
2023-03-02 00:02:14 +09:00
shogo4405 a22a51080d Refactor AudioCodec with the AVAudioConverter. 2023-03-01 23:42:51 +09:00
shogo4405 513079bb5c
Merge pull request #1146 from shogo4405/feature/fix-ignore-fps
Set a device frameRate when unsupportedDeviceActiveFormat.
2023-02-26 19:15:21 +09:00
shogo4405 1ae79f5b4e Set a device frameRate when unsupportedDeviceActiveFormat. 2023-02-26 18:51:19 +09:00
shogo4405 005eba1036
Merge pull request #1145 from shogo4405/feature/fix-cant-restart-playback
Can't playback rtmpStream.close() after rtmpStream.play().
2023-02-26 17:22:20 +09:00
shogo4405 00843a3cf9 Can't playback rtmpStream.close() after rtmpStream.play(). 2023-02-26 17:05:10 +09:00
shogo4405 ad3d88a593 Refactor RTMPVideoMessage. 2023-02-26 16:10:00 +09:00
shogo4405 408b6b5886
Merge pull request #1144 from shogo4405/dependabot/bundler/fastlane-2.212.1
Bump fastlane from 2.211.0 to 2.212.1
2023-02-26 11:21:58 +09:00
dependabot[bot] ac09af04bf
Bump fastlane from 2.211.0 to 2.212.1
Bumps [fastlane](https://github.com/fastlane/fastlane) from 2.211.0 to 2.212.1.
- [Release notes](https://github.com/fastlane/fastlane/releases)
- [Commits](https://github.com/fastlane/fastlane/compare/fastlane/2.211.0...fastlane/2.212.1)

---
updated-dependencies:
- dependency-name: fastlane
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-26 00:57:24 +00:00
shogo4405 0715282a55 camelize properties. 2023-02-25 19:02:54 +09:00
shogo4405 e2ef1ae0c0 Naming rule RTMPSocketDelegate. 2023-02-25 17:07:48 +09:00
shogo4405 d44d1187af Make public to TSReader. 2023-02-25 15:55:50 +09:00
shogo4405 0f8fd4569f Rename Responder to RTMPResponder. 2023-02-18 16:28:54 +09:00
shogo4405 83e55bee8e
Merge pull request #1140 from shogo4405/feature/remove-settings
Compliant codable for VideoCodecSettings and AudioCodecSettings.
2023-02-18 15:51:47 +09:00
shogo4405 c2b37f416f Compliant codable for VideoCodecSettings and AudioCodecSettings. 2023-02-18 15:36:51 +09:00
shogo4405 67db4b55fd Bump to up 1.4.3. 2023-02-16 21:00:01 +09:00
shogo4405 b9bcf572aa Rename Groups and Files. 2023-02-16 08:17:05 +09:00
shogo4405 1e1ae9d97a
Merge pull request #1139 from shogo4405/feature/remove-unnecessary-codes
Remove MP4 classes.
2023-02-16 06:42:44 +09:00
shogo4405 cea6194b17 Remove MP4 classes. 2023-02-16 05:42:41 +09:00
shogo4405 be347428a8
Merge pull request #1137 from shogo4405/feature/meta-data
fixed #1103 Add audiosamplerate.
2023-02-12 23:12:05 +09:00
shogo4405 de39939755 fixed #1103 Add audiosamplerate. 2023-02-12 22:25:40 +09:00
shogo4405 18132ec3c3
Merge pull request #1136 from shogo4405/feature/inheritance-iovideo-unit
fixed #1130 Single instance IOVideoUnit and IOAudioUnit.
2023-02-11 18:26:00 +09:00
shogo4405 0a17faddda fixed #1130 Single instance IOVideoUnit and IOAudioUnit. 2023-02-11 18:03:00 +09:00
shogo4405 a3fde220a7 add H264Profile definition. 2023-02-09 23:21:54 +09:00
shogo4405 26b8b1cdf6 refs #1135 add TestCase. 2023-02-09 20:38:34 +09:00
shogo4405 0058acaebc
Merge pull request #1129 from shogo4405/feature/feature-constant-bit-rate
Support VideoCode constantBitRate mode.
2023-01-31 22:11:15 +09:00
shogo4405 b7d55c66f5 Support VideoCode constantBitRate mode. 2023-01-31 21:34:35 +09:00
shogo4405 060129d8b7 Add IOMixerTests. 2023-01-29 18:24:59 +09:00
shogo4405 912b5cafd6 fix Can't free RTMPConneciton. 2023-01-29 18:06:11 +09:00
shogo4405 9c3fe4a084
Merge pull request #1128 from shogo4405/feature/support-tls-options
Support "rtmps" RTMPNWSocket without setPrameter option.
2023-01-29 17:40:01 +09:00
shogo4405 728bc444f7 fix can't work GitHub Action. 2023-01-29 17:12:41 +09:00
shogo4405 b7bd39e501 Support "rtmps" RTMPNWSocket without setPrameter option. 2023-01-29 16:49:08 +09:00
shogo4405 6870656f4a Remove VideoCodec#lock property. 2023-01-29 15:46:03 +09:00
shogo4405 7d03dd6829 refs #1077 Support NetStream#outputBufferSize property. 2023-01-29 14:56:54 +09:00
shogo4405 00abb4cf23 Update Test suite. 2023-01-29 10:26:05 +09:00
shogo4405 1325264e4d Remove unnecessary classes. 2023-01-29 09:17:42 +09:00
shogo4405 66c3a0837b refs #1080 Turn it off "Metal API Validation". 2023-01-28 15:18:20 +09:00
shogo4405 96bcff339e Rename XXXBuffers. 2023-01-23 01:15:56 +09:00
shogo4405 dcde99d9b4
Merge pull request #1126 from shogo4405/dependabot/bundler/activesupport-6.1.7.1
Bump activesupport from 6.1.7 to 6.1.7.1
2023-01-21 13:38:00 +09:00
dependabot[bot] dfe0d1a280
Bump activesupport from 6.1.7 to 6.1.7.1
Bumps [activesupport](https://github.com/rails/rails) from 6.1.7 to 6.1.7.1.
- [Release notes](https://github.com/rails/rails/releases)
- [Changelog](https://github.com/rails/rails/blob/v7.0.4.1/activesupport/CHANGELOG.md)
- [Commits](https://github.com/rails/rails/compare/v6.1.7...v6.1.7.1)

---
updated-dependencies:
- dependency-name: activesupport
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-21 04:01:27 +00:00
shogo4405 ec24c44b94 Bump to up 1.4.2. 2023-01-16 03:42:39 +09:00
shogo4405 dbb57f225a fix MemoryLeak when playback with main or high profile. 2023-01-16 02:27:04 +09:00
shogo4405 94c98eea69 Improved compatibility playback. 2023-01-15 08:05:18 +09:00
shogo4405 385c16a913
Update README.md 2023-01-14 16:16:46 +09:00
shogo4405 57eccf99e6 Update API documents. 2023-01-14 03:52:32 +09:00
shogo4405 9c1d7a6838
Merge pull request #1123 from shogo4405/feature/add-screencapturekit
Support ScreenCaptureKit on macOS
2023-01-13 23:12:07 +09:00
shogo4405 c67564cb46 add ScreenCaptureKit feature. 2023-01-13 22:33:34 +09:00
shogo4405 7c8726f65a
Merge pull request #1122 from shogo4405/featre/adjustment-split-view
Adjust MultiCamCapture splitView position.
2023-01-12 23:23:12 +09:00
shogo4405 07562eda58 Adjust MultiCam CMSampleBuffer. 2023-01-12 23:05:27 +09:00
shogo4405 d627d7f87b
Merge pull request #1121 from shogo4405/revert-1106-patch-1
Revert "Fix: add audio sample rate"
2023-01-11 01:10:10 +09:00
shogo4405 218eb51b10
Revert "Fix: add audio sample rate" 2023-01-11 01:07:39 +09:00
shogo4405 73bedb5eec Fix same image pip on macOS 2023-01-09 22:21:20 +09:00
shogo4405 cdedd898b4 Update macOS example project. 2023-01-09 21:54:22 +09:00
shogo4405 c1686fdccf Refactor ShapeFactory. 2023-01-09 15:24:42 +09:00
shogo4405 be03719072 Update README.md 2023-01-09 15:24:42 +09:00
shogo4405 ec1f4f8216
Merge pull request #1116 from shogo4405/feature/enhanced-corner-radius
Improved performance PiP mode for the MultiCamCaptureSetting.
2023-01-03 17:28:33 +09:00
shogo4405 4837a789c5 Improved performance PiP mode for the MultiCamCaptureSetting. 2023-01-03 17:22:48 +09:00
shogo4405 4734a5f5a8 Decrease InputPorts 2022-12-31 19:22:31 +09:00
shogo4405 a5894dd58e
Merge pull request #1114 from shogo4405/feature/delegate-session
Add AVCaptureSession delegate methods.
2022-12-31 18:33:32 +09:00
shogo4405 4df8fdc98a Add AVCaptureSession delegate methods. 2022-12-31 17:58:31 +09:00
shogo4405 5a726c4d2f Bump to up 1.4.1. 2022-12-30 18:09:52 +09:00
shogo4405 8aeca904df
Merge pull request #1112 from shogo4405/feature/fix-spm-compile-error
Fix spm compile error 1.4.0
2022-12-30 17:57:14 +09:00
shogo4405 f8c43ba01e fix spm compile error. 2022-12-30 17:49:08 +09:00
shogo4405 aafe0e40a0 Recover the AVCaptureSession when mediaServicesWereReset. 2022-12-30 16:53:39 +09:00
shogo4405 38bf2afdce Update README.md 2022-12-30 16:09:37 +09:00
1769 changed files with 6852 additions and 115963 deletions

View File

@ -1 +1 @@
2.7.4 3.0.6

View File

@ -1 +1 @@
github "shogo4405/Logboard" ~> 2.3.0 github "shogo4405/Logboard" ~> 2.3.1

View File

@ -26,13 +26,16 @@ final class LiveViewController: UIViewController {
private var currentEffect: VideoEffect? private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0 private var retryCount: Int = 0
private var videoBitRate = VideoCodecSettings.default.bitRate
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
rtmpConnection.delegate = self
pipIntentView.layer.borderWidth = 1.0 pipIntentView.layer.borderWidth = 1.0
pipIntentView.layer.borderColor = UIColor.white.cgColor pipIntentView.layer.borderColor = UIColor.white.cgColor
pipIntentView.bounds = MultiCamCaptureSetting.default.regionOfInterest pipIntentView.bounds = MultiCamCaptureSettings.default.regionOfInterest
pipIntentView.isUserInteractionEnabled = true pipIntentView.isUserInteractionEnabled = true
view.addSubview(pipIntentView) view.addSubview(pipIntentView)
@ -40,14 +43,25 @@ final class LiveViewController: UIViewController {
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) { if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.videoOrientation = orientation rtmpStream.videoOrientation = orientation
} }
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = self
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000 rtmpStream.audioSettings = AudioCodecSettings(
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000 bitRate: 64 * 1000
)
rtmpStream.videoSettings = VideoCodecSettings(
videoSize: .init(width: 854, height: 480),
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: 640 * 1000,
maxKeyFrameIntervalDuration: 2,
scalingMode: .trim,
bitRateMode: .average,
allowFrameReordering: nil,
isHardwareEncoderEnabled: true
)
rtmpStream.mixer.recorder.delegate = self
videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
} }
@ -64,6 +78,7 @@ final class LiveViewController: UIViewController {
} }
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = true
rtmpStream.attachMultiCamera(front) rtmpStream.attachMultiCamera(front)
} }
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil) rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
@ -77,6 +92,11 @@ final class LiveViewController: UIViewController {
super.viewWillDisappear(animated) super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS") rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close() rtmpStream.close()
rtmpStream.attachAudio(nil)
rtmpStream.attachCamera(nil)
if #available(iOS 13.0, *) {
rtmpStream.attachMultiCamera(nil)
}
// swiftlint:disable notification_center_detachment // swiftlint:disable notification_center_detachment
NotificationCenter.default.removeObserver(self) NotificationCenter.default.removeObserver(self)
} }
@ -101,10 +121,11 @@ final class LiveViewController: UIViewController {
currentFrame.origin.x += deltaX currentFrame.origin.x += deltaX
currentFrame.origin.y += deltaY currentFrame.origin.y += deltaY
pipIntentView.frame = currentFrame pipIntentView.frame = currentFrame
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSetting( rtmpStream.multiCamCaptureSettings = MultiCamCaptureSettings(
mode: rtmpStream.multiCamCaptureSettings.mode, mode: rtmpStream.multiCamCaptureSettings.mode,
cornerRadius: 16.0, cornerRadius: 16.0,
regionOfInterest: currentFrame regionOfInterest: currentFrame,
direction: .east
) )
} }
} }
@ -112,10 +133,12 @@ final class LiveViewController: UIViewController {
@IBAction func rotateCamera(_ sender: UIButton) { @IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera") logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.videoCapture(for: 0)?.isVideoMirrored = position == .front
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in
logger.warn(error) logger.warn(error)
} }
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = currentPosition == .front
rtmpStream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in rtmpStream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in
logger.warn(error) logger.warn(error)
} }
@ -130,11 +153,11 @@ final class LiveViewController: UIViewController {
@IBAction func on(slider: UISlider) { @IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider { if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps" audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000 rtmpStream.audioSettings.bitRate = Int(slider.value * 1000)
} }
if slider == videoBitrateSlider { if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps" videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000 rtmpStream.videoSettings.bitRate = UInt32(slider.value * 1000)
} }
if slider == zoomSlider { if slider == zoomSlider {
let zoomFactor = CGFloat(slider.value) let zoomFactor = CGFloat(slider.value)
@ -273,6 +296,26 @@ final class LiveViewController: UIViewController {
} }
} }
extension LiveViewController: RTMPConnectionDelegate {
func connection(_ connection: RTMPConnection, publishInsufficientBWOccured stream: RTMPStream) {
// Adaptive bitrate streaming exsample. Please feedback me your good algorithm. :D
videoBitRate -= 32 * 1000
stream.videoSettings.bitRate = max(videoBitRate, 64 * 1000)
}
func connection(_ connection: RTMPConnection, publishSufficientBWOccured stream: RTMPStream) {
videoBitRate += 32 * 1000
stream.videoSettings.bitRate = min(videoBitRate, VideoCodecSettings.default.bitRate)
}
func connection(_ connection: RTMPConnection, updateStats stream: RTMPStream) {
}
func connection(_ connection: RTMPConnection, didClear stream: RTMPStream) {
videoBitRate = VideoCodecSettings.default.bitRate
}
}
extension LiveViewController: IORecorderDelegate { extension LiveViewController: IORecorderDelegate {
// MARK: IORecorderDelegate // MARK: IORecorderDelegate
func recorder(_ recorder: IORecorder, errorOccured error: IORecorder.Error) { func recorder(_ recorder: IORecorder, errorOccured error: IORecorder.Error) {

View File

@ -1,9 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="20037" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="49e-Tb-3d3"> <document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="21701" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="49e-Tb-3d3">
<device id="retina6_1" orientation="portrait" appearance="light"/> <device id="retina6_1" orientation="portrait" appearance="light"/>
<dependencies> <dependencies>
<deployment identifier="iOS"/> <deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="20020"/> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="21678"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/> <capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/> <capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
@ -22,7 +22,7 @@
<viewLayoutGuide key="safeArea" id="h8f-2Q-C5a"/> <viewLayoutGuide key="safeArea" id="h8f-2Q-C5a"/>
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/> <color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
</view> </view>
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="32" minValue="15" maxValue="120" translatesAutoresizingMaskIntoConstraints="NO" id="aKS-oc-LrT"> <slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="64" minValue="15" maxValue="120" translatesAutoresizingMaskIntoConstraints="NO" id="aKS-oc-LrT">
<rect key="frame" x="14" y="775" width="340" height="31"/> <rect key="frame" x="14" y="775" width="340" height="31"/>
<connections> <connections>
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="ICf-sz-Jsg"/> <action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="ICf-sz-Jsg"/>
@ -30,14 +30,14 @@
</connections> </connections>
</slider> </slider>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="LTk-1V-jZa"> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="LTk-1V-jZa">
<rect key="frame" x="259" y="44" width="54" height="30"/> <rect key="frame" x="259" y="48" width="54" height="30"/>
<state key="normal" title="Camera"/> <state key="normal" title="Camera"/>
<connections> <connections>
<action selector="rotateCamera:" destination="9pv-A4-QxB" eventType="touchDown" id="516-MC-1k2"/> <action selector="rotateCamera:" destination="9pv-A4-QxB" eventType="touchDown" id="516-MC-1k2"/>
</connections> </connections>
</button> </button>
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="2Sy-na-foy"> <segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="2Sy-na-foy">
<rect key="frame" x="206" y="82" width="200" height="32"/> <rect key="frame" x="206" y="86" width="200" height="32"/>
<constraints> <constraints>
<constraint firstAttribute="width" constant="200" id="RrQ-qe-7IF"/> <constraint firstAttribute="width" constant="200" id="RrQ-qe-7IF"/>
</constraints> </constraints>
@ -51,13 +51,13 @@
</connections> </connections>
</segmentedControl> </segmentedControl>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="FPS" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="YMl-Xb-JZb"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="FPS" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="YMl-Xb-JZb">
<rect key="frame" x="8" y="44" width="31" height="21"/> <rect key="frame" x="8" y="48" width="31" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/> <color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="1" translatesAutoresizingMaskIntoConstraints="NO" id="fbC-rC-wNg"> <segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="1" translatesAutoresizingMaskIntoConstraints="NO" id="fbC-rC-wNg">
<rect key="frame" x="206" y="121" width="200" height="32"/> <rect key="frame" x="206" y="125" width="200" height="32"/>
<constraints> <constraints>
<constraint firstAttribute="width" constant="200" id="BBl-Vi-PoJ"/> <constraint firstAttribute="width" constant="200" id="BBl-Vi-PoJ"/>
</constraints> </constraints>
@ -79,26 +79,26 @@
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="IS3-vj-jFX"/> <action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="IS3-vj-jFX"/>
</connections> </connections>
</slider> </slider>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="video 160/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="dLf-ee-K3I"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="video 640/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="dLf-ee-K3I">
<rect key="frame" x="236" y="732" width="116" height="21"/> <rect key="frame" x="233" y="732" width="119" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/> <color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="160" minValue="32" maxValue="1024" translatesAutoresizingMaskIntoConstraints="NO" id="4s5-OW-qAO"> <slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="640" minValue="32" maxValue="2048" translatesAutoresizingMaskIntoConstraints="NO" id="4s5-OW-qAO">
<rect key="frame" x="14" y="737" width="340" height="31"/> <rect key="frame" x="14" y="737" width="340" height="31"/>
<connections> <connections>
<action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="Nm5-Xr-jcw"/> <action selector="onSlider:" destination="9pv-A4-QxB" eventType="valueChanged" id="Nm5-Xr-jcw"/>
</connections> </connections>
</slider> </slider>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="audio 32/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="gR3-9k-qhK"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="audio 64/kbps" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="gR3-9k-qhK">
<rect key="frame" x="243.5" y="770" width="108.5" height="21"/> <rect key="frame" x="242.5" y="770" width="109.5" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/> <fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/> <color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="oVn-9L-n2U"> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="oVn-9L-n2U">
<rect key="frame" x="329" y="44" width="39" height="30"/> <rect key="frame" x="329" y="48" width="39" height="30"/>
<state key="normal" title="Torch"/> <state key="normal" title="Torch"/>
<connections> <connections>
<action selector="toggleTorch:" destination="9pv-A4-QxB" eventType="touchDown" id="gY1-x2-YlF"/> <action selector="toggleTorch:" destination="9pv-A4-QxB" eventType="touchDown" id="gY1-x2-YlF"/>
@ -123,7 +123,7 @@
</connections> </connections>
</button> </button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="yIo-MW-aK8"> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="yIo-MW-aK8">
<rect key="frame" x="384" y="44" width="30" height="30"/> <rect key="frame" x="384" y="48" width="30" height="30"/>
<state key="normal" title="❌"/> <state key="normal" title="❌"/>
<connections> <connections>
<action selector="onClose:" destination="9pv-A4-QxB" eventType="touchDown" id="d0Y-4e-dGf"/> <action selector="onClose:" destination="9pv-A4-QxB" eventType="touchDown" id="d0Y-4e-dGf"/>
@ -195,7 +195,7 @@
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews> <subviews>
<textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="Hiy-yh-Bwn"> <textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="Hiy-yh-Bwn">
<rect key="frame" x="8" y="52" width="394" height="34"/> <rect key="frame" x="8" y="56" width="394" height="34"/>
<fontDescription key="fontDescription" type="system" pointSize="14"/> <fontDescription key="fontDescription" type="system" pointSize="14"/>
<textInputTraits key="textInputTraits"/> <textInputTraits key="textInputTraits"/>
<connections> <connections>
@ -203,7 +203,7 @@
</connections> </connections>
</textField> </textField>
<textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="A5Y-FA-epc"> <textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" borderStyle="roundedRect" textAlignment="natural" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="A5Y-FA-epc">
<rect key="frame" x="8" y="94" width="246" height="34"/> <rect key="frame" x="8" y="98" width="246" height="34"/>
<fontDescription key="fontDescription" type="system" pointSize="14"/> <fontDescription key="fontDescription" type="system" pointSize="14"/>
<textInputTraits key="textInputTraits"/> <textInputTraits key="textInputTraits"/>
<connections> <connections>
@ -304,7 +304,7 @@
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews> <subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="p4J-1x-O1c"> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="p4J-1x-O1c">
<rect key="frame" x="369" y="768" width="30" height="30"/> <rect key="frame" x="369" y="734" width="30" height="30"/>
<color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/> <color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<constraints> <constraints>
<constraint firstAttribute="width" constant="30" id="8DG-lB-HSj"/> <constraint firstAttribute="width" constant="30" id="8DG-lB-HSj"/>
@ -317,7 +317,7 @@
</connections> </connections>
</button> </button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Nie-97-pLL"> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Nie-97-pLL">
<rect key="frame" x="369" y="723" width="30" height="30"/> <rect key="frame" x="369" y="689" width="30" height="30"/>
<color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/> <color key="backgroundColor" red="0.0" green="0.0" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="30" id="6VI-Zc-kOn"/> <constraint firstAttribute="height" constant="30" id="6VI-Zc-kOn"/>

View File

@ -21,19 +21,14 @@ final class PlaybackViewController: UIViewController {
override func viewWillAppear(_ animated: Bool) { override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear") logger.info("viewWillAppear")
super.viewWillAppear(animated) super.viewWillAppear(animated)
(view as? MTHKView)?.attachStream(rtmpStream) (view as? (any NetStreamDrawable))?.attachStream(rtmpStream)
(view as? PiPHKView)?.attachStream(rtmpStream) if #available(iOS 15.0, *), let layer = view.layer as? AVSampleBufferDisplayLayer {
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
if let layer = view.layer as? AVSampleBufferDisplayLayer, #available(iOS 15.0, *) {
pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: layer, playbackDelegate: self)) pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: layer, playbackDelegate: self))
} }
} }
override func viewWillDisappear(_ animated: Bool) { override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear") logger.info("viewWillDisappear")
// swiftlint:disable notification_center_detachment
NotificationCenter.default.removeObserver(self)
super.viewWillDisappear(animated) super.viewWillDisappear(animated)
} }
@ -61,7 +56,7 @@ final class PlaybackViewController: UIViewController {
@objc @objc
private func rtmpStatusHandler(_ notification: Notification) { private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification) let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else { guard let data = e.data as? ASObject, let code = data["code"] as? String else {
return return
} }
logger.info(code) logger.info(code)
@ -102,16 +97,6 @@ final class PlaybackViewController: UIViewController {
rtmpStream.receiveVideo = true rtmpStream.receiveVideo = true
} }
} }
@objc
private func didInterruptionNotification(_ notification: Notification) {
logger.info(notification)
}
@objc
private func didRouteChangeNotification(_ notification: Notification) {
logger.info(notification)
}
} }
extension PlaybackViewController: AVPictureInPictureSampleBufferPlaybackDelegate { extension PlaybackViewController: AVPictureInPictureSampleBufferPlaybackDelegate {

View File

@ -1,6 +1,6 @@
struct Preference { struct Preference {
static var defaultInstance = Preference() static var defaultInstance = Preference()
var uri: String? = "rtmp://192.168.1.10/live" var uri: String? = "rtmp://192.168.1.6/live"
var streamName: String? = "live" var streamName: String? = "live"
} }

View File

@ -43,21 +43,17 @@ open class SampleHandler: RPBroadcastSampleHandler {
case .video: case .video:
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) { if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description) let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [ rtmpStream.videoSettings.videoSize = .init(width: dimensions.width, height: dimensions.height)
.width: dimensions.width,
.height: dimensions.height,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
} }
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video) rtmpStream.appendSampleBuffer(sampleBuffer)
case .audioMic: case .audioMic:
isMirophoneOn = true isMirophoneOn = true
if CMSampleBufferDataIsReady(sampleBuffer) { if CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .audio) rtmpStream.appendSampleBuffer(sampleBuffer)
} }
case .audioApp: case .audioApp:
if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) { if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .audio) rtmpStream.appendSampleBuffer(sampleBuffer)
} }
@unknown default: @unknown default:
break break

View File

@ -17,8 +17,8 @@ final class ViewModel: ObservableObject {
private var retryCount: Int = 0 private var retryCount: Int = 0
@Published var published = false @Published var published = false
@Published var zoomLevel: CGFloat = 1.0 @Published var zoomLevel: CGFloat = 1.0
@Published var videoRate: CGFloat = 160.0 @Published var videoRate = CGFloat(VideoCodecSettings.default.bitRate / 1000)
@Published var audioRate: CGFloat = 32.0 @Published var audioRate = CGFloat(AudioCodecSettings.default.bitRate / 1000)
@Published var fps: String = "FPS" @Published var fps: String = "FPS"
private var nc = NotificationCenter.default private var nc = NotificationCenter.default
@ -65,10 +65,7 @@ final class ViewModel: ObservableObject {
rtmpStream.videoOrientation = orientation rtmpStream.videoOrientation = orientation
} }
rtmpStream.sessionPreset = .hd1280x720 rtmpStream.sessionPreset = .hd1280x720
rtmpStream.videoSettings = [ rtmpStream.videoSettings.videoSize = .init(width: 720, height: 1280)
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = self rtmpStream.mixer.recorder.delegate = self
nc.publisher(for: UIDevice.orientationDidChangeNotification, object: nil) nc.publisher(for: UIDevice.orientationDidChangeNotification, object: nil)
@ -195,11 +192,11 @@ final class ViewModel: ObservableObject {
} }
func changeVideoRate(level: CGFloat) { func changeVideoRate(level: CGFloat) {
rtmpStream.videoSettings[.bitrate] = level * 1000 rtmpStream.videoSettings.bitRate = UInt32(level * 1000)
} }
func changeAudioRate(level: CGFloat) { func changeAudioRate(level: CGFloat) {
rtmpStream.audioSettings[.bitrate] = level * 1000 rtmpStream.audioSettings.bitRate = Int(level * 1000)
} }
@objc @objc

View File

@ -1,7 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="17701" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="B8D-0N-5wS"> <document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="21507" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="4Lp-xV-zxC">
<dependencies> <dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="17701"/> <deployment identifier="macosx"/>
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="21507"/>
<capability name="NSView safe area layout guides" minToolsVersion="12.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies> </dependencies>
<scenes> <scenes>
@ -34,31 +36,10 @@
</objects> </objects>
<point key="canvasLocation" x="75" y="0.0"/> <point key="canvasLocation" x="75" y="0.0"/>
</scene> </scene>
<!--Window Controller--> <!--Camera Publish View Controller-->
<scene sceneID="R2V-B0-nI4">
<objects>
<windowController id="B8D-0N-5wS" customClass="MainWindowController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<window key="window" title="Window" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" releasedWhenClosed="NO" visibleAtLaunch="NO" animationBehavior="default" id="IQv-IB-iLA">
<windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
<windowPositionMask key="initialPositionMask" leftStrut="YES" rightStrut="YES" topStrut="YES" bottomStrut="YES"/>
<rect key="contentRect" x="196" y="240" width="480" height="270"/>
<rect key="screenRect" x="0.0" y="0.0" width="1680" height="1027"/>
<connections>
<outlet property="delegate" destination="B8D-0N-5wS" id="Q4g-rY-pfC"/>
</connections>
</window>
<connections>
<segue destination="XfG-lQ-9wD" kind="relationship" relationship="window.shadowedContentViewController" id="cq2-FE-JQM"/>
</connections>
</windowController>
<customObject id="Oky-zY-oP4" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="75" y="250"/>
</scene>
<!--Main View Controller-->
<scene sceneID="hIz-AP-VOD"> <scene sceneID="hIz-AP-VOD">
<objects> <objects>
<viewController id="XfG-lQ-9wD" customClass="MainViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController"> <viewController storyboardIdentifier="CameraPublishViewController" id="XfG-lQ-9wD" customClass="CameraPublishViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" wantsLayer="YES" id="m2S-Jp-Qdl"> <view key="view" wantsLayer="YES" id="m2S-Jp-Qdl">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/> <rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<autoresizingMask key="autoresizingMask"/> <autoresizingMask key="autoresizingMask"/>
@ -77,10 +58,7 @@
</connections> </connections>
</button> </button>
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="Hjj-Fo-QAo"> <popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="Hjj-Fo-QAo">
<rect key="frame" x="257" y="226" width="207" height="25"/> <rect key="frame" x="425" y="226" width="39" height="25"/>
<constraints>
<constraint firstAttribute="width" constant="200" id="ufS-dM-mn2"/>
</constraints>
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="lef-XS-nIm"> <popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="lef-XS-nIm">
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/> <behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
<font key="font" metaFont="menu"/> <font key="font" metaFont="menu"/>
@ -91,10 +69,7 @@
</connections> </connections>
</popUpButton> </popUpButton>
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="vfl-SO-iw0"> <popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="vfl-SO-iw0">
<rect key="frame" x="257" y="196" width="207" height="25"/> <rect key="frame" x="425" y="196" width="39" height="25"/>
<constraints>
<constraint firstAttribute="width" constant="200" id="Pi0-Q3-jXO"/>
</constraints>
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="FVb-fk-AdX"> <popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="FVb-fk-AdX">
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/> <behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
<font key="font" metaFont="menu"/> <font key="font" metaFont="menu"/>
@ -154,22 +129,27 @@
</subviews> </subviews>
<constraints> <constraints>
<constraint firstAttribute="bottom" secondItem="8aC-9s-bew" secondAttribute="bottom" constant="20" id="0Cc-JK-ooG"/> <constraint firstAttribute="bottom" secondItem="8aC-9s-bew" secondAttribute="bottom" constant="20" id="0Cc-JK-ooG"/>
<constraint firstItem="8aC-9s-bew" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="lB9-7R-daQ" secondAttribute="trailing" constant="8" symbolic="YES" id="2xn-8b-V4J"/>
<constraint firstAttribute="trailing" secondItem="vfl-SO-iw0" secondAttribute="trailing" constant="20" id="9IB-O9-pG4"/> <constraint firstAttribute="trailing" secondItem="vfl-SO-iw0" secondAttribute="trailing" constant="20" id="9IB-O9-pG4"/>
<constraint firstAttribute="trailing" secondItem="iLC-eL-Nn7" secondAttribute="trailing" constant="20" id="D3J-tH-bAk"/> <constraint firstAttribute="trailing" secondItem="iLC-eL-Nn7" secondAttribute="trailing" constant="20" id="D3J-tH-bAk"/>
<constraint firstItem="9g2-aW-5KE" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" id="GEH-yz-mrh"/> <constraint firstItem="9g2-aW-5KE" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" id="GEH-yz-mrh"/>
<constraint firstAttribute="trailing" secondItem="CIE-H2-55S" secondAttribute="trailing" constant="20" id="HAA-kt-OKg"/> <constraint firstAttribute="trailing" secondItem="CIE-H2-55S" secondAttribute="trailing" constant="20" id="HAA-kt-OKg"/>
<constraint firstItem="CIE-H2-55S" firstAttribute="top" secondItem="iLC-eL-Nn7" secondAttribute="bottom" constant="10" id="NCq-Mi-4Nc"/> <constraint firstItem="CIE-H2-55S" firstAttribute="top" secondItem="iLC-eL-Nn7" secondAttribute="bottom" constant="10" id="NCq-Mi-4Nc"/>
<constraint firstItem="Wuc-0E-MpH" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="RBg-L3-2bO"/> <constraint firstItem="Wuc-0E-MpH" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="RBg-L3-2bO"/>
<constraint firstItem="vfl-SO-iw0" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="akJ-Nz-JcV"/>
<constraint firstItem="Hjj-Fo-QAo" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="bUT-0e-MAJ"/> <constraint firstItem="Hjj-Fo-QAo" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="bUT-0e-MAJ"/>
<constraint firstItem="Wuc-0E-MpH" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="cJf-Im-eBM"/> <constraint firstItem="Wuc-0E-MpH" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" constant="20" id="cJf-Im-eBM"/>
<constraint firstItem="vfl-SO-iw0" firstAttribute="top" secondItem="Hjj-Fo-QAo" secondAttribute="bottom" constant="10" id="cV1-7j-UCY"/> <constraint firstItem="vfl-SO-iw0" firstAttribute="top" secondItem="Hjj-Fo-QAo" secondAttribute="bottom" constant="10" id="cV1-7j-UCY"/>
<constraint firstItem="9g2-aW-5KE" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" id="cyh-4S-TRS"/> <constraint firstItem="9g2-aW-5KE" firstAttribute="top" secondItem="m2S-Jp-Qdl" secondAttribute="top" id="cyh-4S-TRS"/>
<constraint firstItem="Hjj-Fo-QAo" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="Wuc-0E-MpH" secondAttribute="trailing" constant="8" symbolic="YES" id="f5z-js-iaQ"/>
<constraint firstAttribute="trailing" secondItem="8aC-9s-bew" secondAttribute="trailing" constant="20" id="i6i-9B-SkI"/> <constraint firstAttribute="trailing" secondItem="8aC-9s-bew" secondAttribute="trailing" constant="20" id="i6i-9B-SkI"/>
<constraint firstAttribute="bottom" secondItem="lB9-7R-daQ" secondAttribute="bottom" constant="20" id="j60-Ve-mht"/> <constraint firstAttribute="bottom" secondItem="lB9-7R-daQ" secondAttribute="bottom" constant="20" id="j60-Ve-mht"/>
<constraint firstItem="iLC-eL-Nn7" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="nUd-Vt-Mxs"/>
<constraint firstItem="9g2-aW-5KE" firstAttribute="width" secondItem="m2S-Jp-Qdl" secondAttribute="width" id="nXs-El-9Z1"/> <constraint firstItem="9g2-aW-5KE" firstAttribute="width" secondItem="m2S-Jp-Qdl" secondAttribute="width" id="nXs-El-9Z1"/>
<constraint firstItem="8aC-9s-bew" firstAttribute="top" secondItem="CIE-H2-55S" secondAttribute="bottom" constant="10" id="oNA-aB-zP0"/> <constraint firstItem="8aC-9s-bew" firstAttribute="top" secondItem="CIE-H2-55S" secondAttribute="bottom" constant="10" id="oNA-aB-zP0"/>
<constraint firstItem="9g2-aW-5KE" firstAttribute="height" secondItem="m2S-Jp-Qdl" secondAttribute="height" id="p40-XI-4o1"/> <constraint firstItem="9g2-aW-5KE" firstAttribute="height" secondItem="m2S-Jp-Qdl" secondAttribute="height" id="p40-XI-4o1"/>
<constraint firstItem="lB9-7R-daQ" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="sLw-PG-Hgd"/> <constraint firstItem="lB9-7R-daQ" firstAttribute="leading" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" id="sLw-PG-Hgd"/>
<constraint firstItem="CIE-H2-55S" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="m2S-Jp-Qdl" secondAttribute="leading" constant="20" symbolic="YES" id="vyF-uI-1Cp"/>
<constraint firstAttribute="trailing" secondItem="Hjj-Fo-QAo" secondAttribute="trailing" constant="20" id="vzC-3x-4nE"/> <constraint firstAttribute="trailing" secondItem="Hjj-Fo-QAo" secondAttribute="trailing" constant="20" id="vzC-3x-4nE"/>
</constraints> </constraints>
</view> </view>
@ -185,5 +165,340 @@
</objects> </objects>
<point key="canvasLocation" x="75" y="655"/> <point key="canvasLocation" x="75" y="655"/>
</scene> </scene>
<!--Window Controller-->
<scene sceneID="uIN-fj-SfU">
<objects>
<windowController id="4Lp-xV-zxC" sceneMemberID="viewController">
<window key="window" title="Window" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" releasedWhenClosed="NO" visibleAtLaunch="NO" frameAutosaveName="" animationBehavior="default" id="3N9-7c-j7V">
<windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
<windowPositionMask key="initialPositionMask" leftStrut="YES" rightStrut="YES" topStrut="YES" bottomStrut="YES"/>
<rect key="contentRect" x="211" y="267" width="480" height="270"/>
<rect key="screenRect" x="0.0" y="0.0" width="1512" height="944"/>
<view key="contentView" id="w23-Ay-0Ti">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<autoresizingMask key="autoresizingMask"/>
</view>
<toolbar key="toolbar" implicitIdentifier="09D11707-F4A3-4FD5-970E-AC5832E91C2B" autosavesConfiguration="NO" displayMode="iconAndLabel" sizeMode="regular" id="Uxk-Q0-ROW">
<allowedToolbarItems/>
<defaultToolbarItems/>
</toolbar>
<connections>
<outlet property="delegate" destination="4Lp-xV-zxC" id="vCA-of-aRI"/>
</connections>
</window>
<connections>
<segue destination="NF7-WS-c3B" kind="relationship" relationship="window.shadowedContentViewController" id="Xsj-HD-e4r"/>
</connections>
</windowController>
<customObject id="qnS-t2-2hl" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="-457" y="604"/>
</scene>
<!--Main Split View Controller-->
<scene sceneID="ayw-Nc-hmj">
<objects>
<splitViewController id="NF7-WS-c3B" customClass="MainSplitViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<splitViewItems>
<splitViewItem canCollapse="YES" holdingPriority="260" behavior="sidebar" id="rWa-Cz-lZU"/>
<splitViewItem id="Uyi-Rm-rQN"/>
</splitViewItems>
<splitView key="splitView" dividerStyle="thin" vertical="YES" id="KOg-Sx-jxE">
<rect key="frame" x="0.0" y="0.0" width="450" height="300"/>
<autoresizingMask key="autoresizingMask"/>
</splitView>
<connections>
<segue destination="rX7-HP-XFe" kind="relationship" relationship="splitItems" id="L5L-nw-bp6"/>
<segue destination="GyZ-hD-VHK" kind="relationship" relationship="splitItems" id="BVl-mY-rOQ"/>
</connections>
</splitViewController>
<customObject id="YJG-VL-2Ch" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="-974" y="329"/>
</scene>
<!--Menu View Controller-->
<scene sceneID="YOR-fR-sUU">
<objects>
<viewController id="rX7-HP-XFe" customClass="MenuViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" misplaced="YES" id="sB5-Mo-WgH">
<rect key="frame" x="0.0" y="0.0" width="154" height="1072"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<scrollView autohidesScrollers="YES" horizontalLineScroll="24" horizontalPageScroll="10" verticalLineScroll="24" verticalPageScroll="10" usesPredominantAxisScrolling="NO" translatesAutoresizingMaskIntoConstraints="NO" id="U7X-zB-Ct9">
<rect key="frame" x="0.0" y="0.0" width="154" height="1684"/>
<clipView key="contentView" id="TC0-20-xwt">
<rect key="frame" x="1" y="1" width="152" height="1682"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<tableView verticalHuggingPriority="750" allowsExpansionToolTips="YES" columnAutoresizingStyle="lastColumnOnly" multipleSelection="NO" autosaveColumns="NO" rowHeight="24" rowSizeStyle="automatic" viewBased="YES" id="Zf5-MB-jdh">
<rect key="frame" x="0.0" y="0.0" width="152" height="1682"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<size key="intercellSpacing" width="17" height="0.0"/>
<color key="backgroundColor" name="controlBackgroundColor" catalog="System" colorSpace="catalog"/>
<color key="gridColor" name="gridColor" catalog="System" colorSpace="catalog"/>
<tableColumns>
<tableColumn identifier="AutomaticTableColumnIdentifier.0" width="140" minWidth="40" maxWidth="1000" id="RvV-a9-2Bt">
<tableHeaderCell key="headerCell" lineBreakMode="truncatingTail" borderStyle="border">
<color key="textColor" name="headerTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="headerColor" catalog="System" colorSpace="catalog"/>
</tableHeaderCell>
<textFieldCell key="dataCell" lineBreakMode="truncatingTail" selectable="YES" editable="YES" title="Text Cell" id="gul-Xr-O4P">
<font key="font" metaFont="system"/>
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="controlBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
<tableColumnResizingMask key="resizingMask" resizeWithTable="YES" userResizable="YES"/>
<prototypeCellViews>
<tableCellView id="TMq-ik-BLg">
<rect key="frame" x="8" y="0.0" width="135" height="24"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" horizontalCompressionResistancePriority="250" translatesAutoresizingMaskIntoConstraints="NO" id="nKL-O8-0aU">
<rect key="frame" x="0.0" y="4" width="135" height="16"/>
<textFieldCell key="cell" lineBreakMode="truncatingTail" sendsActionOnEndEditing="YES" title="Table View Cell" id="Lf1-hH-841">
<font key="font" usesAppearanceFont="YES"/>
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
</textField>
</subviews>
<constraints>
<constraint firstItem="nKL-O8-0aU" firstAttribute="leading" secondItem="TMq-ik-BLg" secondAttribute="leading" constant="2" id="Aum-kK-h0z"/>
<constraint firstItem="nKL-O8-0aU" firstAttribute="centerX" secondItem="TMq-ik-BLg" secondAttribute="centerX" id="NIe-lm-RIB"/>
<constraint firstItem="nKL-O8-0aU" firstAttribute="centerY" secondItem="TMq-ik-BLg" secondAttribute="centerY" id="ct0-oa-BwY"/>
</constraints>
<connections>
<outlet property="textField" destination="nKL-O8-0aU" id="4o7-5Z-5xG"/>
</connections>
</tableCellView>
</prototypeCellViews>
</tableColumn>
</tableColumns>
<connections>
<outlet property="dataSource" destination="rX7-HP-XFe" id="oT0-DO-XfB"/>
<outlet property="delegate" destination="rX7-HP-XFe" id="kak-CH-GFv"/>
</connections>
</tableView>
</subviews>
</clipView>
<scroller key="horizontalScroller" hidden="YES" wantsLayer="YES" verticalHuggingPriority="750" horizontal="YES" id="5e8-aJ-U5N">
<rect key="frame" x="1" y="1565" width="152" height="16"/>
<autoresizingMask key="autoresizingMask"/>
</scroller>
<scroller key="verticalScroller" hidden="YES" wantsLayer="YES" verticalHuggingPriority="750" horizontal="NO" id="eRz-M7-DJS">
<rect key="frame" x="224" y="17" width="15" height="102"/>
<autoresizingMask key="autoresizingMask"/>
</scroller>
</scrollView>
</subviews>
<constraints>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="bottom" secondItem="9lo-11-SF8" secondAttribute="bottom" id="Efs-dl-fxX"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="top" secondItem="sB5-Mo-WgH" secondAttribute="top" id="I4r-iI-g6v"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="N0j-lF-qW4"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="Vbe-Eq-sRp"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="Z7z-AA-ydR"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="fRl-78-kln"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="trailing" secondItem="9lo-11-SF8" secondAttribute="trailing" id="gEO-m2-Qfk"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="leading" secondItem="9lo-11-SF8" secondAttribute="leading" id="gvA-u2-WJB"/>
<constraint firstItem="U7X-zB-Ct9" firstAttribute="bottom" secondItem="9lo-11-SF8" secondAttribute="bottom" id="nzb-Ve-sGC"/>
</constraints>
<viewLayoutGuide key="safeArea" id="9lo-11-SF8"/>
<viewLayoutGuide key="layoutMargins" id="wZT-hI-llD"/>
</view>
<connections>
<outlet property="tableView" destination="Zf5-MB-jdh" id="KWE-xw-xM5"/>
</connections>
</viewController>
<customObject id="dST-Xk-5EF" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="442" y="329"/>
</scene>
<!--View Controller-->
<scene sceneID="JZE-qq-5Gq">
<objects>
<viewController id="GyZ-hD-VHK" sceneMemberID="viewController">
<view key="view" id="VCU-ot-Zd4">
<rect key="frame" x="0.0" y="0.0" width="302" height="300"/>
<autoresizingMask key="autoresizingMask"/>
<viewLayoutGuide key="safeArea" id="40k-YB-9Sh"/>
<viewLayoutGuide key="layoutMargins" id="JeG-zt-ieP"/>
</view>
</viewController>
<customObject id="804-s7-rc2" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="-42" y="-408"/>
</scene>
<!--Playback View Controller-->
<scene sceneID="CUf-T6-3jm">
<objects>
<viewController storyboardIdentifier="RTMPPlaybackViewController" id="Lqg-9j-gZP" customClass="RTMPPlaybackViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" wantsLayer="YES" id="J9d-S9-Trt">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<openGLView wantsLayer="YES" useAuxiliaryDepthBufferStencil="NO" allowOffline="YES" wantsBestResolutionOpenGLSurface="YES" translatesAutoresizingMaskIntoConstraints="NO" id="Yr3-Li-WPD" customClass="MTHKView" customModule="HaishinKit">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
</openGLView>
<button verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="6iZ-Ur-gur">
<rect key="frame" x="378" y="13" width="89" height="32"/>
<buttonCell key="cell" type="push" title="Playback" bezelStyle="rounded" alignment="center" borderStyle="border" imageScaling="proportionallyDown" inset="2" id="W1x-8U-Phb">
<behavior key="behavior" pushIn="YES" lightByBackground="YES" lightByGray="YES"/>
<font key="font" metaFont="system"/>
</buttonCell>
<connections>
<action selector="didTappedPlayback:" target="Lqg-9j-gZP" id="ngd-dz-DdL"/>
</connections>
</button>
</subviews>
<constraints>
<constraint firstItem="Yr3-Li-WPD" firstAttribute="leading" secondItem="J9d-S9-Trt" secondAttribute="leading" id="Ah4-lE-cQH"/>
<constraint firstAttribute="trailing" secondItem="6iZ-Ur-gur" secondAttribute="trailing" constant="20" id="JfI-gL-PM3"/>
<constraint firstItem="Yr3-Li-WPD" firstAttribute="width" secondItem="J9d-S9-Trt" secondAttribute="width" id="og8-2D-xo1"/>
<constraint firstItem="Yr3-Li-WPD" firstAttribute="top" secondItem="J9d-S9-Trt" secondAttribute="top" id="rLV-Q5-UcE"/>
<constraint firstItem="Yr3-Li-WPD" firstAttribute="height" secondItem="J9d-S9-Trt" secondAttribute="height" id="xmd-Zw-bSA"/>
<constraint firstAttribute="bottom" secondItem="6iZ-Ur-gur" secondAttribute="bottom" constant="20" id="zwB-ve-FoP"/>
</constraints>
</view>
<connections>
<outlet property="lfView" destination="Yr3-Li-WPD" id="Kfd-7R-psD"/>
</connections>
</viewController>
<customObject id="51v-jx-wcj" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="75" y="655"/>
</scene>
<!--Stream Publish View Controller-->
<scene sceneID="qMT-Br-6MQ">
<objects>
<viewController storyboardIdentifier="SCStreamPublishViewController" id="dhX-nT-Doa" customClass="SCStreamPublishViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" wantsLayer="YES" id="fUT-bB-KBi">
<rect key="frame" x="0.0" y="0.0" width="480" height="270"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<button verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="Ko8-3C-ims">
<rect key="frame" x="388" y="13" width="79" height="32"/>
<buttonCell key="cell" type="push" title="Publish" bezelStyle="rounded" alignment="center" borderStyle="border" imageScaling="proportionallyDown" inset="2" id="PyO-G6-Sfr">
<behavior key="behavior" pushIn="YES" lightByBackground="YES" lightByGray="YES"/>
<font key="font" metaFont="system"/>
</buttonCell>
<connections>
<action selector="publishOrStop:" target="dhX-nT-Doa" id="X3P-mQ-808"/>
</connections>
</button>
<popUpButton verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="8gp-R8-K0u">
<rect key="frame" x="425" y="226" width="39" height="25"/>
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="beY-wH-sGF">
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
<font key="font" metaFont="menu"/>
<menu key="menu" id="45T-aX-cgp"/>
</popUpButtonCell>
<connections>
<action selector="selectCamera:" target="dhX-nT-Doa" id="F16-Nj-MQi"/>
</connections>
</popUpButton>
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="EUe-gL-Kfw">
<rect key="frame" x="20" y="20" width="300" height="21"/>
<constraints>
<constraint firstAttribute="width" constant="300" id="zUM-Oo-BRX"/>
</constraints>
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="nJx-eh-DMW">
<font key="font" metaFont="system"/>
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
</textField>
</subviews>
<constraints>
<constraint firstAttribute="trailing" secondItem="Ko8-3C-ims" secondAttribute="trailing" constant="20" id="3qF-0K-uUh"/>
<constraint firstAttribute="trailing" secondItem="8gp-R8-K0u" secondAttribute="trailing" constant="20" id="gxy-va-ciW"/>
<constraint firstAttribute="bottom" secondItem="Ko8-3C-ims" secondAttribute="bottom" constant="20" id="jvL-RJ-ajb"/>
<constraint firstItem="Ko8-3C-ims" firstAttribute="leading" relation="greaterThanOrEqual" secondItem="EUe-gL-Kfw" secondAttribute="trailing" constant="8" symbolic="YES" id="nK8-te-3iT"/>
<constraint firstItem="EUe-gL-Kfw" firstAttribute="leading" secondItem="fUT-bB-KBi" secondAttribute="leading" constant="20" id="pPn-nM-lBg"/>
<constraint firstAttribute="bottom" secondItem="EUe-gL-Kfw" secondAttribute="bottom" constant="20" id="pW0-88-rWT"/>
<constraint firstItem="8gp-R8-K0u" firstAttribute="top" secondItem="fUT-bB-KBi" secondAttribute="top" constant="20" id="rAd-HY-7d6"/>
</constraints>
</view>
<connections>
<outlet property="cameraPopUpButton" destination="8gp-R8-K0u" id="Y2e-oa-Q5i"/>
<outlet property="urlField" destination="EUe-gL-Kfw" id="v14-Ru-i7A"/>
</connections>
</viewController>
<customObject id="ZWx-by-4If" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="75" y="655"/>
</scene>
<!--Preference View Controller-->
<scene sceneID="R4z-ix-pWI">
<objects>
<viewController storyboardIdentifier="PreferenceViewController" id="9vk-iW-BZX" customClass="PreferenceViewController" customModule="Example_macOS" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" wantsLayer="YES" id="OQz-nx-Hf5">
<rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="muK-Fu-lNp">
<rect key="frame" x="16" y="419" width="300" height="21"/>
<constraints>
<constraint firstAttribute="width" constant="300" id="onn-Pa-e0H"/>
</constraints>
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="kM2-u6-oC5">
<font key="font" metaFont="system"/>
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
<connections>
<outlet property="delegate" destination="9vk-iW-BZX" id="TTT-Ka-3O4"/>
</connections>
</textField>
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="y43-vh-quB">
<rect key="frame" x="14" y="448" width="73" height="16"/>
<textFieldCell key="cell" lineBreakMode="clipping" title="RTMP URL:" id="a0O-iB-hpy">
<font key="font" metaFont="system"/>
<color key="textColor" name="labelColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
</textField>
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="WSq-ak-6dS">
<rect key="frame" x="14" y="395" width="87" height="16"/>
<textFieldCell key="cell" lineBreakMode="clipping" title="StreamName:" id="2cu-1b-UVj">
<font key="font" metaFont="system"/>
<color key="textColor" name="labelColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
</textField>
<textField verticalHuggingPriority="750" translatesAutoresizingMaskIntoConstraints="NO" id="YII-qB-iiW">
<rect key="frame" x="16" y="366" width="300" height="21"/>
<constraints>
<constraint firstAttribute="width" constant="300" id="03D-Ul-Uui"/>
</constraints>
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" selectable="YES" editable="YES" sendsActionOnEndEditing="YES" state="on" borderStyle="bezel" drawsBackground="YES" id="Jsz-le-a4U">
<font key="font" metaFont="system"/>
<color key="textColor" name="controlTextColor" catalog="System" colorSpace="catalog"/>
<color key="backgroundColor" name="textBackgroundColor" catalog="System" colorSpace="catalog"/>
</textFieldCell>
<connections>
<outlet property="delegate" destination="9vk-iW-BZX" id="tw1-fJ-VcE"/>
</connections>
</textField>
</subviews>
<constraints>
<constraint firstItem="y43-vh-quB" firstAttribute="top" secondItem="OQz-nx-Hf5" secondAttribute="top" constant="16" id="14X-aF-lac"/>
<constraint firstItem="YII-qB-iiW" firstAttribute="top" secondItem="WSq-ak-6dS" secondAttribute="bottom" constant="8" id="MBC-BB-Xt7"/>
<constraint firstItem="WSq-ak-6dS" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="Nk6-TS-ym8"/>
<constraint firstItem="WSq-ak-6dS" firstAttribute="top" secondItem="muK-Fu-lNp" secondAttribute="bottom" constant="8" id="VIc-O8-hqJ"/>
<constraint firstItem="muK-Fu-lNp" firstAttribute="top" secondItem="y43-vh-quB" secondAttribute="bottom" constant="8" id="Vri-fn-xBI"/>
<constraint firstItem="muK-Fu-lNp" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="WQs-OA-h3N"/>
<constraint firstItem="YII-qB-iiW" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="ZX6-KR-rH8"/>
<constraint firstItem="y43-vh-quB" firstAttribute="leading" secondItem="OQz-nx-Hf5" secondAttribute="leading" constant="16" id="aNe-eB-guK"/>
<constraint firstAttribute="trailing" relation="greaterThanOrEqual" secondItem="y43-vh-quB" secondAttribute="trailing" constant="20" symbolic="YES" id="keo-Ad-GB2"/>
</constraints>
</view>
<connections>
<outlet property="streamNameField" destination="YII-qB-iiW" id="uPq-i5-fmx"/>
<outlet property="urlField" destination="muK-Fu-lNp" id="3hV-RH-2Gx"/>
</connections>
</viewController>
<customObject id="ARY-aJ-9uG" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="-131" y="109"/>
</scene>
</scenes> </scenes>
</document> </document>

View File

@ -12,28 +12,38 @@ extension NSPopUpButton {
} }
} }
final class MainViewController: NSViewController { final class CameraPublishViewController: NSViewController {
var rtmpConnection = RTMPConnection()
var rtmpStream: RTMPStream!
var httpService = HLSService(
domain: "local", type: HTTPService.type, name: "", port: HTTPService.defaultPort
)
var httpStream = HTTPStream()
@IBOutlet private weak var lfView: MTHKView! @IBOutlet private weak var lfView: MTHKView!
@IBOutlet private weak var audioPopUpButton: NSPopUpButton! @IBOutlet private weak var audioPopUpButton: NSPopUpButton!
@IBOutlet private weak var cameraPopUpButton: NSPopUpButton! @IBOutlet private weak var cameraPopUpButton: NSPopUpButton!
@IBOutlet private weak var urlField: NSTextField! @IBOutlet private weak var urlField: NSTextField!
@IBOutlet private weak var segmentedControl: NSSegmentedControl! @IBOutlet private weak var segmentedControl: NSSegmentedControl!
private var currentStream: NetStream? {
willSet {
currentStream?.attachCamera(nil)
currentStream?.attachMultiCamera(nil)
currentStream?.attachAudio(nil)
}
didSet {
currentStream?.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
currentStream?.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
}
}
private var rtmpConnection = RTMPConnection()
private lazy var rtmpStream: RTMPStream = {
let rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
return rtmpStream
}()
private var httpService = HLSService(
domain: "local", type: HTTPService.type, name: "", port: HTTPService.defaultPort
)
private var httpStream = HTTPStream()
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
urlField.stringValue = Preference.defaultInstance.uri ?? "" urlField.stringValue = Preference.defaultInstance.uri ?? ""
audioPopUpButton?.present(mediaType: .audio) audioPopUpButton?.present(mediaType: .audio)
cameraPopUpButton?.present(mediaType: .video) cameraPopUpButton?.present(mediaType: .video)
} }
@ -42,7 +52,13 @@ final class MainViewController: NSViewController {
super.viewWillAppear() super.viewWillAppear()
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)) rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video)) rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
var devices = AVCaptureDevice.devices(for: .video)
devices.removeFirst()
if let device = devices.first {
rtmpStream.attachMultiCamera(device)
}
lfView?.attachStream(rtmpStream) lfView?.attachStream(rtmpStream)
currentStream = rtmpStream
} }
// swiftlint:disable block_based_kvo // swiftlint:disable block_based_kvo
@ -62,14 +78,12 @@ final class MainViewController: NSViewController {
// Publish // Publish
if sender.title == "Publish" { if sender.title == "Publish" {
sender.title = "Stop" sender.title = "Stop"
// Optional. If you don't specify; the frame size will be the current H264Encoder default of 480x272 // Optional. If you don't specify; the frame size will be the current H264Encoder default of 480x272
// rtmpStream.videoSettings = [ // rtmpStream.videoSettings = [
// .profileLevel: kVTProfileLevel_H264_High_AutoLevel, // .profileLevel: kVTProfileLevel_H264_High_AutoLevel,
// .width: 1920, // .width: 1920,
// .height: 1280, // .height: 1280,
// ] // ]
segmentedControl.isEnabled = false segmentedControl.isEnabled = false
switch segmentedControl.selectedSegment { switch segmentedControl.selectedSegment {
case 0: case 0:
@ -106,51 +120,27 @@ final class MainViewController: NSViewController {
} }
@IBAction private func mirror(_ sender: AnyObject) { @IBAction private func mirror(_ sender: AnyObject) {
rtmpStream.videoCapture(for: 0)?.isVideoMirrored.toggle() currentStream?.videoCapture(for: 0)?.isVideoMirrored.toggle()
} }
@IBAction private func selectAudio(_ sender: AnyObject) { @IBAction private func selectAudio(_ sender: AnyObject) {
let device: AVCaptureDevice? = DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio) let device = DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)
switch segmentedControl.selectedSegment { currentStream?.attachAudio(device)
case 0:
rtmpStream.attachAudio(device)
httpStream.attachAudio(nil)
case 1:
rtmpStream.attachAudio(nil)
httpStream.attachAudio(device)
default:
break
}
} }
@IBAction private func selectCamera(_ sender: AnyObject) { @IBAction private func selectCamera(_ sender: AnyObject) {
let device: AVCaptureDevice? = DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video) let device = DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video)
switch segmentedControl.selectedSegment { currentStream?.attachCamera(device)
case 0:
rtmpStream.attachCamera(device)
httpStream.attachCamera(nil)
case 1:
rtmpStream.attachCamera(nil)
httpStream.attachCamera(device)
default:
break
}
} }
@IBAction private func modeChanged(_ sender: NSSegmentedControl) { @IBAction private func modeChanged(_ sender: NSSegmentedControl) {
switch sender.selectedSegment { switch sender.selectedSegment {
case 0: case 0:
httpStream.attachAudio(nil) currentStream = rtmpStream
httpStream.attachCamera(nil)
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
lfView.attachStream(rtmpStream) lfView.attachStream(rtmpStream)
urlField.stringValue = Preference.defaultInstance.uri ?? "" urlField.stringValue = Preference.defaultInstance.uri ?? ""
case 1: case 1:
rtmpStream.attachAudio(nil) currentStream = httpStream
rtmpStream.attachCamera(nil)
httpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
httpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
lfView.attachStream(httpStream) lfView.attachStream(httpStream)
urlField.stringValue = "http://{ipAddress}:8080/hello/playlist.m3u8" urlField.stringValue = "http://{ipAddress}:8080/hello/playlist.m3u8"
default: default:
@ -169,7 +159,7 @@ final class MainViewController: NSViewController {
logger.info(data) logger.info(data)
switch code { switch code {
case RTMPConnection.Code.connectSuccess.rawValue: case RTMPConnection.Code.connectSuccess.rawValue:
rtmpStream?.publish(Preference.defaultInstance.streamName) rtmpStream.publish(Preference.defaultInstance.streamName)
default: default:
break break
} }

View File

@ -0,0 +1,7 @@
import Foundation
extension NSObject {
class var className: String {
return "\(self)"
}
}

View File

@ -0,0 +1,6 @@
import AppKit
import Foundation
extension NSStoryboard.Name {
static let main: NSStoryboard.Name = "Main"
}

View File

@ -0,0 +1,13 @@
import AppKit
import Foundation
extension NSViewController {
class var identifier: NSStoryboard.SceneIdentifier {
return className
}
class func getUIViewController() -> NSViewController {
let storyboard = NSStoryboard(name: .main, bundle: Bundle.main)
return storyboard.instantiateController(withIdentifier: identifier) as! NSViewController
}
}

View File

@ -0,0 +1,8 @@
import AppKit
import Foundation
final class MainSplitViewController: NSSplitViewController {
override func viewDidLoad() {
super.viewDidLoad()
}
}

View File

@ -0,0 +1,56 @@
import AppKit
import Foundation
final class MenuViewController: NSViewController {
@IBOutlet private weak var tableView: NSTableView! {
didSet {
}
}
struct Menu {
let title: String
let factory: () -> NSViewController
}
private lazy var menus: [Menu] = {
var menus: [Menu] = [
.init(title: "Publish Test", factory: { CameraPublishViewController.getUIViewController() }),
.init(title: "RTMP Playback Test", factory: { RTMPPlaybackViewController.getUIViewController() })
]
menus.append(.init(title: "SCStream Publish Test", factory: { SCStreamPublishViewController.getUIViewController() }))
menus.append(.init(title: "Preference", factory: { PreferenceViewController.getUIViewController() }))
return menus
}()
override func viewDidAppear() {
super.viewDidAppear()
let indexSet = NSIndexSet(index: 0)
tableView.selectRowIndexes(indexSet as IndexSet, byExtendingSelection: false)
}
}
extension MenuViewController: NSTableViewDataSource {
func numberOfRows(in tableView: NSTableView) -> Int {
return menus.count
}
func tableViewSelectionDidChange(_ notification: Notification) {
guard tableView.selectedRow != -1 else {
return
}
guard let splitViewController = parent as? NSSplitViewController else {
return
}
splitViewController.splitViewItems[1] = NSSplitViewItem(viewController: menus[tableView.selectedRow].factory())
}
}
extension MenuViewController: NSTableViewDelegate {
func tableView(_ tableView: NSTableView, viewFor tableColumn: NSTableColumn?, row: Int) -> NSView? {
guard let identifier = tableColumn?.identifier, let cellView = tableView.makeView(withIdentifier: identifier, owner: self) as? NSTableCellView else {
return nil
}
cellView.textField?.stringValue = menus[row].title
return cellView
}
}

View File

@ -0,0 +1,27 @@
import AppKit
import Foundation
final class PreferenceViewController: NSViewController {
@IBOutlet private weak var urlField: NSTextField!
@IBOutlet private weak var streamNameField: NSTextField!
override func viewDidLoad() {
super.viewDidLoad()
urlField.stringValue = Preference.defaultInstance.uri ?? ""
streamNameField.stringValue = Preference.defaultInstance.streamName ?? ""
}
}
extension PreferenceViewController: NSTextFieldDelegate {
func controlTextDidChange(_ obj: Notification) {
guard let textFile = obj.object as? NSTextField else {
return
}
if textFile == urlField {
Preference.defaultInstance.uri = textFile.stringValue
}
if textFile == streamNameField {
Preference.defaultInstance.streamName = textFile.stringValue
}
}
}

View File

@ -0,0 +1,45 @@
import AppKit
import Foundation
import HaishinKit
final class RTMPPlaybackViewController: NSViewController {
@IBOutlet private weak var lfView: MTHKView!
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
override func viewDidLoad() {
super.viewDidLoad()
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpStream = RTMPStream(connection: rtmpConnection)
lfView.attachStream(rtmpStream)
}
@IBAction private func didTappedPlayback(_ button: NSButton) {
if button.title == "Stop" {
rtmpConnection.close()
button.title = "Playback"
} else {
if let uri = Preference.defaultInstance.uri {
rtmpConnection.connect(uri)
button.title = "Stop"
}
}
}
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard
let data: ASObject = e.data as? ASObject,
let code: String = data["code"] as? String else {
return
}
logger.info(data)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
rtmpStream?.play(Preference.defaultInstance.streamName)
default:
break
}
}
}

View File

@ -0,0 +1,102 @@
import AppKit
import Foundation
import HaishinKit
#if canImport(ScreenCaptureKit)
import ScreenCaptureKit
#endif
class SCStreamPublishViewController: NSViewController {
@IBOutlet private weak var cameraPopUpButton: NSPopUpButton!
@IBOutlet private weak var urlField: NSTextField!
private var currentStream: NetStream?
private var rtmpConnection = RTMPConnection()
private lazy var rtmpStream: RTMPStream = {
let rtmpStream = RTMPStream(connection: rtmpConnection)
return rtmpStream
}()
private var _stream: Any?
@available(macOS 12.3, *)
private var stream: SCStream? {
get {
_stream as? SCStream
}
set {
_stream = newValue
Task {
try? newValue?.addStreamOutput(rtmpStream, type: .screen, sampleHandlerQueue: DispatchQueue.main)
if #available(macOS 13.0, *) {
try? newValue?.addStreamOutput(rtmpStream, type: .audio, sampleHandlerQueue: DispatchQueue.main)
}
try? await newValue?.startCapture()
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
urlField.stringValue = Preference.defaultInstance.uri ?? ""
if #available(macOS 12.3, *) {
Task {
try await SCShareableContent.current.windows.forEach {
cameraPopUpButton.addItem(withTitle: $0.owningApplication?.applicationName ?? "")
}
}
}
}
override func viewWillAppear() {
super.viewWillAppear()
currentStream = rtmpStream
}
@IBAction private func selectCamera(_ sender: AnyObject) {
if #available(macOS 12.3, *) {
Task {
guard let window = try? await SCShareableContent.current.windows.first(where: { $0.owningApplication?.applicationName == cameraPopUpButton.title }) else {
return
}
let filter = SCContentFilter(desktopIndependentWindow: window)
let configuration = SCStreamConfiguration()
configuration.width = Int(window.frame.width)
configuration.height = Int(window.frame.height)
configuration.showsCursor = true
self.stream = SCStream(filter: filter, configuration: configuration, delegate: nil)
}
}
}
@IBAction private func publishOrStop(_ sender: NSButton) {
// Publish
if sender.title == "Publish" {
sender.title = "Stop"
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri ?? "")
return
}
// Stop
sender.title = "Publish"
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.close()
return
}
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard
let data: ASObject = e.data as? ASObject,
let code: String = data["code"] as? String else {
return
}
logger.info(data)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
rtmpStream.publish(Preference.defaultInstance.streamName)
default:
break
}
}
}

View File

@ -2,6 +2,5 @@ source 'https://rubygems.org'
gem 'cocoapods' gem 'cocoapods'
gem 'fastlane' gem 'fastlane'
gem 'jazzy'
gem 'synx' gem 'synx'

View File

@ -1,15 +1,14 @@
GEM GEM
remote: https://rubygems.org/ remote: https://rubygems.org/
specs: specs:
CFPropertyList (3.0.5) CFPropertyList (3.0.6)
rexml rexml
activesupport (6.1.7) activesupport (7.0.4.3)
concurrent-ruby (~> 1.0, >= 1.0.2) concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2) i18n (>= 1.6, < 2)
minitest (>= 5.1) minitest (>= 5.1)
tzinfo (~> 2.0) tzinfo (~> 2.0)
zeitwerk (~> 2.3) addressable (2.8.4)
addressable (2.8.1)
public_suffix (>= 2.0.2, < 6.0) public_suffix (>= 2.0.2, < 6.0)
algoliasearch (1.27.5) algoliasearch (1.27.5)
httpclient (~> 2.8, >= 2.8.3) httpclient (~> 2.8, >= 2.8.3)
@ -17,16 +16,16 @@ GEM
artifactory (3.0.15) artifactory (3.0.15)
atomos (0.1.3) atomos (0.1.3)
aws-eventstream (1.2.0) aws-eventstream (1.2.0)
aws-partitions (1.684.0) aws-partitions (1.771.0)
aws-sdk-core (3.168.4) aws-sdk-core (3.173.1)
aws-eventstream (~> 1, >= 1.0.2) aws-eventstream (~> 1, >= 1.0.2)
aws-partitions (~> 1, >= 1.651.0) aws-partitions (~> 1, >= 1.651.0)
aws-sigv4 (~> 1.5) aws-sigv4 (~> 1.5)
jmespath (~> 1, >= 1.6.1) jmespath (~> 1, >= 1.6.1)
aws-sdk-kms (1.61.0) aws-sdk-kms (1.64.0)
aws-sdk-core (~> 3, >= 3.165.0) aws-sdk-core (~> 3, >= 3.165.0)
aws-sigv4 (~> 1.1) aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.117.2) aws-sdk-s3 (1.122.0)
aws-sdk-core (~> 3, >= 3.165.0) aws-sdk-core (~> 3, >= 3.165.0)
aws-sdk-kms (~> 1) aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.4) aws-sigv4 (~> 1.4)
@ -35,15 +34,15 @@ GEM
babosa (1.0.4) babosa (1.0.4)
claide (1.1.0) claide (1.1.0)
clamp (0.6.5) clamp (0.6.5)
cocoapods (1.11.3) cocoapods (1.12.1)
addressable (~> 2.8) addressable (~> 2.8)
claide (>= 1.0.2, < 2.0) claide (>= 1.0.2, < 2.0)
cocoapods-core (= 1.11.3) cocoapods-core (= 1.12.1)
cocoapods-deintegrate (>= 1.0.3, < 2.0) cocoapods-deintegrate (>= 1.0.3, < 2.0)
cocoapods-downloader (>= 1.4.0, < 2.0) cocoapods-downloader (>= 1.6.0, < 2.0)
cocoapods-plugins (>= 1.0.0, < 2.0) cocoapods-plugins (>= 1.0.0, < 2.0)
cocoapods-search (>= 1.0.0, < 2.0) cocoapods-search (>= 1.0.0, < 2.0)
cocoapods-trunk (>= 1.4.0, < 2.0) cocoapods-trunk (>= 1.6.0, < 2.0)
cocoapods-try (>= 1.1.0, < 2.0) cocoapods-try (>= 1.1.0, < 2.0)
colored2 (~> 3.1) colored2 (~> 3.1)
escape (~> 0.0.4) escape (~> 0.0.4)
@ -51,10 +50,10 @@ GEM
gh_inspector (~> 1.0) gh_inspector (~> 1.0)
molinillo (~> 0.8.0) molinillo (~> 0.8.0)
nap (~> 1.0) nap (~> 1.0)
ruby-macho (>= 1.0, < 3.0) ruby-macho (>= 2.3.0, < 3.0)
xcodeproj (>= 1.21.0, < 2.0) xcodeproj (>= 1.21.0, < 2.0)
cocoapods-core (1.11.3) cocoapods-core (1.12.1)
activesupport (>= 5.0, < 7) activesupport (>= 5.0, < 8)
addressable (~> 2.8) addressable (~> 2.8)
algoliasearch (~> 1.0) algoliasearch (~> 1.0)
concurrent-ruby (~> 1.1) concurrent-ruby (~> 1.1)
@ -77,7 +76,7 @@ GEM
colorize (0.8.1) colorize (0.8.1)
commander (4.6.0) commander (4.6.0)
highline (~> 2.0.0) highline (~> 2.0.0)
concurrent-ruby (1.1.10) concurrent-ruby (1.2.2)
declarative (0.0.20) declarative (0.0.20)
digest-crc (0.6.4) digest-crc (0.6.4)
rake (>= 12.0.0, < 14.0.0) rake (>= 12.0.0, < 14.0.0)
@ -88,8 +87,8 @@ GEM
escape (0.0.4) escape (0.0.4)
ethon (0.16.0) ethon (0.16.0)
ffi (>= 1.15.0) ffi (>= 1.15.0)
excon (0.95.0) excon (0.99.0)
faraday (1.10.2) faraday (1.10.3)
faraday-em_http (~> 1.0) faraday-em_http (~> 1.0)
faraday-em_synchrony (~> 1.0) faraday-em_synchrony (~> 1.0)
faraday-excon (~> 1.1) faraday-excon (~> 1.1)
@ -117,8 +116,8 @@ GEM
faraday-retry (1.0.3) faraday-retry (1.0.3)
faraday_middleware (1.2.0) faraday_middleware (1.2.0)
faraday (~> 1.0) faraday (~> 1.0)
fastimage (2.2.6) fastimage (2.2.7)
fastlane (2.211.0) fastlane (2.213.0)
CFPropertyList (>= 2.3, < 4.0.0) CFPropertyList (>= 2.3, < 4.0.0)
addressable (>= 2.8, < 3.0.0) addressable (>= 2.8, < 3.0.0)
artifactory (~> 3.0) artifactory (~> 3.0)
@ -142,7 +141,7 @@ GEM
json (< 3.0.0) json (< 3.0.0)
jwt (>= 2.1.0, < 3) jwt (>= 2.1.0, < 3)
mini_magick (>= 4.9.4, < 5.0.0) mini_magick (>= 4.9.4, < 5.0.0)
multipart-post (~> 2.0.0) multipart-post (>= 2.0.0, < 3.0.0)
naturally (~> 2.2) naturally (~> 2.2)
optparse (~> 0.1.1) optparse (~> 0.1.1)
plist (>= 3.1.0, < 4.0.0) plist (>= 3.1.0, < 4.0.0)
@ -161,9 +160,9 @@ GEM
fourflusher (2.3.1) fourflusher (2.3.1)
fuzzy_match (2.0.4) fuzzy_match (2.0.4)
gh_inspector (1.1.3) gh_inspector (1.1.3)
google-apis-androidpublisher_v3 (0.32.0) google-apis-androidpublisher_v3 (0.42.0)
google-apis-core (>= 0.9.1, < 2.a) google-apis-core (>= 0.11.0, < 2.a)
google-apis-core (0.9.2) google-apis-core (0.11.0)
addressable (~> 2.5, >= 2.5.1) addressable (~> 2.5, >= 2.5.1)
googleauth (>= 0.16.2, < 2.a) googleauth (>= 0.16.2, < 2.a)
httpclient (>= 2.8.1, < 3.a) httpclient (>= 2.8.1, < 3.a)
@ -172,10 +171,10 @@ GEM
retriable (>= 2.0, < 4.a) retriable (>= 2.0, < 4.a)
rexml rexml
webrick webrick
google-apis-iamcredentials_v1 (0.16.0) google-apis-iamcredentials_v1 (0.17.0)
google-apis-core (>= 0.9.1, < 2.a) google-apis-core (>= 0.11.0, < 2.a)
google-apis-playcustomapp_v1 (0.12.0) google-apis-playcustomapp_v1 (0.13.0)
google-apis-core (>= 0.9.1, < 2.a) google-apis-core (>= 0.11.0, < 2.a)
google-apis-storage_v1 (0.19.0) google-apis-storage_v1 (0.19.0)
google-apis-core (>= 0.9.0, < 2.a) google-apis-core (>= 0.9.0, < 2.a)
google-cloud-core (1.6.0) google-cloud-core (1.6.0)
@ -183,7 +182,7 @@ GEM
google-cloud-errors (~> 1.0) google-cloud-errors (~> 1.0)
google-cloud-env (1.6.0) google-cloud-env (1.6.0)
faraday (>= 0.17.3, < 3.0) faraday (>= 0.17.3, < 3.0)
google-cloud-errors (1.3.0) google-cloud-errors (1.3.1)
google-cloud-storage (1.44.0) google-cloud-storage (1.44.0)
addressable (~> 2.8) addressable (~> 2.8)
digest-crc (~> 0.4) digest-crc (~> 0.4)
@ -192,7 +191,7 @@ GEM
google-cloud-core (~> 1.6) google-cloud-core (~> 1.6)
googleauth (>= 0.16.2, < 2.a) googleauth (>= 0.16.2, < 2.a)
mini_mime (~> 1.0) mini_mime (~> 1.0)
googleauth (1.3.0) googleauth (1.5.2)
faraday (>= 0.17.3, < 3.a) faraday (>= 0.17.3, < 3.a)
jwt (>= 1.4, < 3.0) jwt (>= 1.4, < 3.0)
memoist (~> 0.16) memoist (~> 0.16)
@ -203,42 +202,27 @@ GEM
http-cookie (1.0.5) http-cookie (1.0.5)
domain_name (~> 0.5) domain_name (~> 0.5)
httpclient (2.8.3) httpclient (2.8.3)
i18n (1.12.0) i18n (1.13.0)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
jazzy (0.14.3)
cocoapods (~> 1.5)
mustache (~> 1.1)
open4 (~> 1.3)
redcarpet (~> 3.4)
rexml (~> 3.2)
rouge (>= 2.0.6, < 4.0)
sassc (~> 2.1)
sqlite3 (~> 1.3)
xcinvoke (~> 0.3.0)
jmespath (1.6.2) jmespath (1.6.2)
json (2.6.3) json (2.6.3)
jwt (2.6.0) jwt (2.7.0)
liferaft (0.0.6)
memoist (0.16.2) memoist (0.16.2)
mini_magick (4.12.0) mini_magick (4.12.0)
mini_mime (1.1.2) mini_mime (1.1.2)
mini_portile2 (2.8.1) minitest (5.18.0)
minitest (5.16.3)
molinillo (0.8.0) molinillo (0.8.0)
multi_json (1.15.0) multi_json (1.15.0)
multipart-post (2.0.0) multipart-post (2.3.0)
mustache (1.1.1)
nanaimo (0.3.0) nanaimo (0.3.0)
nap (1.1.0) nap (1.1.0)
naturally (2.2.1) naturally (2.2.1)
netrc (0.11.0) netrc (0.11.0)
open4 (1.3.4)
optparse (0.1.1) optparse (0.1.1)
os (1.1.4) os (1.1.4)
plist (3.6.0) plist (3.7.0)
public_suffix (4.0.7) public_suffix (4.0.7)
rake (13.0.6) rake (13.0.6)
redcarpet (3.5.1)
representable (3.2.0) representable (3.2.0)
declarative (< 0.1.0) declarative (< 0.1.0)
trailblazer-option (>= 0.1.1, < 0.2.0) trailblazer-option (>= 0.1.1, < 0.2.0)
@ -249,19 +233,15 @@ GEM
ruby-macho (2.5.1) ruby-macho (2.5.1)
ruby2_keywords (0.0.5) ruby2_keywords (0.0.5)
rubyzip (2.3.2) rubyzip (2.3.2)
sassc (2.4.0)
ffi (~> 1.9)
security (0.1.3) security (0.1.3)
signet (0.17.0) signet (0.17.0)
addressable (~> 2.8) addressable (~> 2.8)
faraday (>= 0.17.5, < 3.a) faraday (>= 0.17.5, < 3.a)
jwt (>= 1.5, < 3.0) jwt (>= 1.5, < 3.0)
multi_json (~> 1.10) multi_json (~> 1.10)
simctl (1.6.8) simctl (1.6.10)
CFPropertyList CFPropertyList
naturally naturally
sqlite3 (1.5.4)
mini_portile2 (~> 2.8.0)
synx (0.2.1) synx (0.2.1)
clamp (~> 0.6) clamp (~> 0.6)
colorize (~> 0.7) colorize (~> 0.7)
@ -276,17 +256,15 @@ GEM
tty-cursor (~> 0.7) tty-cursor (~> 0.7)
typhoeus (1.4.0) typhoeus (1.4.0)
ethon (>= 0.9.0) ethon (>= 0.9.0)
tzinfo (2.0.5) tzinfo (2.0.6)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
uber (0.1.0) uber (0.1.0)
unf (0.1.4) unf (0.1.4)
unf_ext unf_ext
unf_ext (0.0.8.2) unf_ext (0.0.8.2)
unicode-display_width (1.8.0) unicode-display_width (1.8.0)
webrick (1.7.0) webrick (1.8.1)
word_wrap (1.0.0) word_wrap (1.0.0)
xcinvoke (0.3.0)
liferaft (~> 0.0.6)
xcodeproj (1.22.0) xcodeproj (1.22.0)
CFPropertyList (>= 2.3.3, < 4.0) CFPropertyList (>= 2.3.3, < 4.0)
atomos (~> 0.1.3) atomos (~> 0.1.3)
@ -298,7 +276,6 @@ GEM
rouge (~> 2.0.7) rouge (~> 2.0.7)
xcpretty-travis-formatter (1.0.1) xcpretty-travis-formatter (1.0.1)
xcpretty (~> 0.2, >= 0.0.7) xcpretty (~> 0.2, >= 0.0.7)
zeitwerk (2.6.6)
PLATFORMS PLATFORMS
ruby ruby
@ -306,8 +283,7 @@ PLATFORMS
DEPENDENCIES DEPENDENCIES
cocoapods cocoapods
fastlane fastlane
jazzy
synx synx
BUNDLED WITH BUNDLED WITH
2.1.4 2.2.33

View File

@ -1,7 +1,7 @@
Pod::Spec.new do |s| Pod::Spec.new do |s|
s.name = "HaishinKit" s.name = "HaishinKit"
s.version = "1.4.0" s.version = "1.5.2"
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS and tvOS." s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS and tvOS."
s.swift_version = "5.7" s.swift_version = "5.7"
@ -25,6 +25,6 @@ Pod::Spec.new do |s|
s.tvos.source_files = "Platforms/tvOS/*.{h,swift}" s.tvos.source_files = "Platforms/tvOS/*.{h,swift}"
s.source_files = "Sources/**/*.swift" s.source_files = "Sources/**/*.swift"
s.dependency 'Logboard', '~> 2.3.0' s.dependency 'Logboard', '~> 2.3.1'
end end

File diff suppressed because it is too large Load Diff

View File

@ -39,6 +39,7 @@
ignoresPersistentStateOnLaunch = "NO" ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES" debugDocumentVersioning = "YES"
debugServiceExtension = "internal" debugServiceExtension = "internal"
enableGPUValidationMode = "1"
allowLocationSimulation = "YES"> allowLocationSimulation = "YES">
<BuildableProductRunnable <BuildableProductRunnable
runnableDebuggingMode = "0"> runnableDebuggingMode = "0">

View File

@ -39,6 +39,7 @@
ignoresPersistentStateOnLaunch = "NO" ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES" debugDocumentVersioning = "YES"
debugServiceExtension = "internal" debugServiceExtension = "internal"
enableGPUValidationMode = "1"
allowLocationSimulation = "YES"> allowLocationSimulation = "YES">
<BuildableProductRunnable <BuildableProductRunnable
runnableDebuggingMode = "0"> runnableDebuggingMode = "0">

View File

@ -0,0 +1,78 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1420"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "29C932931CD76FD300283FC5"
BuildableName = "Example macOS.app"
BlueprintName = "Example macOS"
ReferencedContainer = "container:HaishinKit.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "29C932931CD76FD300283FC5"
BuildableName = "Example macOS.app"
BlueprintName = "Example macOS"
ReferencedContainer = "container:HaishinKit.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "29C932931CD76FD300283FC5"
BuildableName = "Example macOS.app"
BlueprintName = "Example macOS"
ReferencedContainer = "container:HaishinKit.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -26,14 +26,24 @@
</TestAction> </TestAction>
<LaunchAction <LaunchAction
buildConfiguration = "Debug" buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedDebuggerIdentifier = ""
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" selectedLauncherIdentifier = "Xcode.IDEFoundation.Launcher.PosixSpawn"
launchStyle = "0" launchStyle = "0"
useCustomWorkingDirectory = "NO" useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO" ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES" debugDocumentVersioning = "YES"
debugServiceExtension = "internal" debugServiceExtension = "internal"
allowLocationSimulation = "YES"> allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "29C9327D1CD76FB800283FC5"
BuildableName = "Example iOS.app"
BlueprintName = "Example iOS"
ReferencedContainer = "container:HaishinKit.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction> </LaunchAction>
<ProfileAction <ProfileAction
buildConfiguration = "Release" buildConfiguration = "Release"
@ -41,6 +51,15 @@
savedToolIdentifier = "" savedToolIdentifier = ""
useCustomWorkingDirectory = "NO" useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES"> debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "29C9327D1CD76FB800283FC5"
BuildableName = "Example iOS.app"
BlueprintName = "Example iOS"
ReferencedContainer = "container:HaishinKit.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction> </ProfileAction>
<AnalyzeAction <AnalyzeAction
buildConfiguration = "Debug"> buildConfiguration = "Debug">

View File

@ -1,4 +1,4 @@
// swift-tools-version:5.1 // swift-tools-version:5.5
// The swift-tools-version declares the minimum version of Swift required to build this package. // The swift-tools-version declares the minimum version of Swift required to build this package.
import PackageDescription import PackageDescription
@ -7,32 +7,36 @@ let package = Package(
platforms: [ platforms: [
.iOS(.v11), .iOS(.v11),
.tvOS(.v11), .tvOS(.v11),
.macOS(.v10_13) .macOS(.v10_13),
.macCatalyst(.v14)
], ],
products: [ products: [
.library(name: "HaishinKit", targets: ["HaishinKit"]) .library(name: "HaishinKit", targets: ["HaishinKit"])
], ],
dependencies: [ dependencies: [
.package(url: "https://github.com/shogo4405/Logboard.git", from: "2.3.0") .package(url: "https://github.com/shogo4405/Logboard.git", from: "2.3.1")
], ],
targets: [ targets: [
.target(name: "SwiftPMSupport"), .target(name: "SwiftPMSupport"),
.target(name: "HaishinKit", .target(name: "HaishinKit",
dependencies: ["Logboard", "SwiftPMSupport"], dependencies: ["Logboard", "SwiftPMSupport"],
path: "Sources", path: "Sources",
exclude: [
"Platforms/iOS/Info.plist",
"Platforms/macOS/Info.plist",
"Platforms/tvOS/Info.plist"
],
sources: [ sources: [
"Codec", "Codec",
"Extension", "Extension",
"FLV", "FLV",
"HTTP", "HTTP",
"ISO",
"Media", "Media",
"MP4", "MPEG",
"Net", "Net",
"RTMP", "RTMP",
"Util", "Util",
"Platforms", "Platforms"
"TS"
]) ])
] ]
) )

View File

@ -102,6 +102,9 @@ extension HKView: NetStreamDrawable {
stream.lockQueue.async { stream.lockQueue.async {
stream.mixer.videoIO.drawable = self stream.mixer.videoIO.drawable = self
DispatchQueue.main.async {
self.layer.session = stream.mixer.session
}
self.currentStream = stream self.currentStream = stream
stream.mixer.startRunning() stream.mixer.startRunning()
} }

View File

@ -3,9 +3,7 @@
import AppKit import AppKit
import AVFoundation import AVFoundation
/** /// A view that displays a video content of a NetStream object which uses AVCaptureVideoPreviewLayer.
* A view that displays a video content of a NetStream object which uses AVCaptureVideoPreviewLayer.
*/
public class HKView: NSView { public class HKView: NSView {
/// The views background color. /// The views background color.
public static var defaultBackgroundColor: NSColor = .black public static var defaultBackgroundColor: NSColor = .black

249
README.md
View File

@ -1,11 +1,13 @@
# HaishinKit for iOS, macOS, tvOS, and [Android](https://github.com/shogo4405/HaishinKit.kt). # HaishinKit for iOS, macOS, tvOS, and [Android](https://github.com/shogo4405/HaishinKit.kt).
[![Platform](https://img.shields.io/cocoapods/p/HaishinKit.svg?style=flat)](http://cocoapods.org/pods/HaishinKit) [![GitHub Stars](https://img.shields.io/github/stars/shogo4405/HaishinKit.swift?style=social)](https://github.com/shogo4405/HaishinKit.swift/stargazers)
![Language](https://img.shields.io/badge/language-Swift%205.3-orange.svg) [![Release](https://img.shields.io/github/v/release/shogo4405/HaishinKit.swift)](https://github.com/shogo4405/HaishinKit.swift/releases/latest)
[![CocoaPods](https://img.shields.io/cocoapods/v/HaishinKit.svg?style=flat)](http://cocoapods.org/pods/HaishinKit) [![Platform Compatibility](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fshogo4405%2FHaishinKit.swift%2Fbadge%3Ftype%3Dplatforms)](https://swiftpackageindex.com/shogo4405/HaishinKit.swift)
[![Swift Compatibility](https://img.shields.io/endpoint?url=https%3A%2F%2Fswiftpackageindex.com%2Fapi%2Fpackages%2Fshogo4405%2FHaishinKit.swift%2Fbadge%3Ftype%3Dswift-versions)](https://swiftpackageindex.com/shogo4405/HaishinKit.swift)
[![GitHub license](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://raw.githubusercontent.com/shogo4405/HaishinKit.swift/master/LICENSE.md) [![GitHub license](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://raw.githubusercontent.com/shogo4405/HaishinKit.swift/master/LICENSE.md)
* Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS. * Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS.
* [API Documentation](https://shogo4405.github.io/HaishinKit.swift/) * README.md contains unreleased content, which can be tested on the main branch.
* [API Documentation](https://shogo4405.github.io/HaishinKit.swift/documentation/haishinkit)
<p align="center"> <p align="center">
<strong>Sponsored with 💖 by</strong><br /> <strong>Sponsored with 💖 by</strong><br />
@ -26,21 +28,27 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
* If you want to support e-mail based communication without GitHub. * If you want to support e-mail based communication without GitHub.
- Consulting fee is [$50](https://www.paypal.me/shogo4405/50USD)/1 incident. I'm able to response a few days. - Consulting fee is [$50](https://www.paypal.me/shogo4405/50USD)/1 incident. I'm able to response a few days.
* [Discord chatroom](https://discord.com/invite/8nkshPnanr). * [Discord chatroom](https://discord.com/invite/8nkshPnanr).
* 日本語が分かる方は日本語でお願いします! * 日本語が分かる方は日本語でのコミニケーションをお願いします!
## 💖 Sponsors ## 💖 Sponsors
<p align="center"> <p align="center">
<a href="https://streamlabs.com/" target="_blank"><img src="https://user-images.githubusercontent.com/810189/206836172-9c360977-ab6b-4eff-860b-82d0e7b06318.png" width="350px" alt="Streamlabs" /></a> <a href="https://streamlabs.com/" target="_blank"><img src="https://user-images.githubusercontent.com/810189/206836172-9c360977-ab6b-4eff-860b-82d0e7b06318.png" width="350px" alt="Streamlabs" /></a>
</p> </p>
## 🌏 Related projects
Project name |Notes |License
----------------|------------|--------------
[SRTHaishinKit for iOS.](https://github.com/shogo4405/SRTHaishinKit.swift)|Camera and Microphone streaming library via SRT.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/SRTHaishinKit.swift/blob/master/LICENSE.md)
[HaishinKit for Android.](https://github.com/shogo4405/HaishinKit.kt)|Camera and Microphone streaming library via RTMP for Android.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/HaishinKit.kt/blob/master/LICENSE.md)
[HaishinKit for Flutter.](https://github.com/shogo4405/HaishinKit.dart)|Camera and Microphone streaming library via RTMP for Flutter.|[BSD 3-Clause "New" or "Revised" License](https://github.com/shogo4405/HaishinKit.dart/blob/master/LICENSE.md)
## 🎨 Features ## 🎨 Features
### RTMP ### RTMP
- [x] Authentication - [x] Authentication
- [x] Publish and Recording (H264/AAC) - [x] Publish and Recording
- [x] _Playback (Beta)_ - [x] _Playback (Beta)_
- [x] Adaptive bitrate streaming - [x] Adaptive bitrate streaming
- [x] Handling (see also [#126](/../../issues/126)) - [x] Handling (see also [#1153](/../../issues/1153))
- [x] Automatic drop frames
- [ ] Action Message Format - [ ] Action Message Format
- [x] AMF0 - [x] AMF0
- [ ] AMF3 - [ ] AMF3
@ -50,22 +58,50 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
- [x] _Tunneled (RTMPT over SSL/TLS) (Technical Preview)_ - [x] _Tunneled (RTMPT over SSL/TLS) (Technical Preview)_
- [x] _RTMPT (Technical Preview)_ - [x] _RTMPT (Technical Preview)_
- [x] ReplayKit Live as a Broadcast Upload Extension - [x] ReplayKit Live as a Broadcast Upload Extension
- [x] Supported codec
- Audio
- [x] AAC
- Video
- [x] H264/AVC
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String`
- [x] H265/HEVC ([Server-side support is required.](https://github.com/veovera/enhanced-rtmp/blob/main/enhanced-rtmp-v1.pdf))
- ex: `stream.videoSettings.profileLevel = kVTProfileLevel_HEVC_Main_AutoLevel as String`
### HLS ### HLS
- [x] HTTPService - [x] HTTPService
- [x] HLS Publish - [x] HLS Publish
### Multi Camera
Supports two camera video sources. A picture-in-picture display that shows the image of the secondary camera of the primary camera. Supports camera split display that displays horizontally and vertically.
|Picture-In-Picture|Split|
|:-:|:-:|
|<img width="1382" alt="" src="https://user-images.githubusercontent.com/810189/210043421-ceb18cb7-9b50-43fa-a0a2-8b92b78d9df1.png">|<img width="1382" alt="" src="https://user-images.githubusercontent.com/810189/210043687-a99f21b6-28b2-4170-96de-6c814debd84d.png">|
```swift
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
stream.attachCamera(back)
if #available(iOS 13.0, *) {
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
stream.attachMultiCamera(front)
}
```
### Rendering ### Rendering
|-|[HKView](https://shogo4405.github.io/HaishinKit.swift/Classes/HKView.html)|[PiPHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/PiPHKView.html)|[MTHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/MTHKView.html)| |Features|[HKView](https://shogo4405.github.io/HaishinKit.swift/Classes/HKView.html)|[PiPHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/PiPHKView.html)|[MTHKView](https://shogo4405.github.io/HaishinKit.swift/Classes/MTHKView.html)|
|-|:---:|:---:|:---:| |-|:---:|:---:|:---:|
|Engine|AVCaptureVideoPreviewLayer|AVSampleBufferDisplayLayer|Metal| |Engine|AVCaptureVideoPreviewLayer|AVSampleBufferDisplayLayer|Metal|
|Publish|○|◯|○| |Publish|✔|✔|✔|
|Playback|×|◯|○| |Playback|<br />|✔|✔|
|VisualEffect|×|◯|○| |VisualEffect|<br />|✔|✔|
|PictureInPicture|<br />|✔|<br />|
|MultiCamera|<br />|✔|✔|
### Others ### Others
- [x] [Support multitasking camera access.](https://developer.apple.com/documentation/avfoundation/capture_setup/accessing_the_camera_while_multitasking)
- [x] _Support tvOS 11.0+ (Technical Preview)_ - [x] _Support tvOS 11.0+ (Technical Preview)_
- tvOS can't publish Camera and Microphone. Available playback feature. - tvOS can't use camera and microphone devices.
- [x] Hardware acceleration for H264 video encoding, AAC audio encoding - [x] Hardware acceleration for H264 video encoding, AAC audio encoding
- [x] Support "Allow app extension API only" option - [x] Support "Allow app extension API only" option
- [ ] ~~Support GPUImage framework (~> 0.5.12)~~ - [ ] ~~Support GPUImage framework (~> 0.5.12)~~
@ -75,9 +111,8 @@ Enterprise Grade APIs for Feeds & Chat. <a href="https://getstream.io/tutorials/
## 🌏 Requirements ## 🌏 Requirements
|-|iOS|OSX|tvOS|Xcode|Swift| |-|iOS|OSX|tvOS|Xcode|Swift|
|:----:|:----:|:----:|:----:|:----:|:----:| |:----:|:----:|:----:|:----:|:----:|:----:|
|1.5.0+|11.0+|10.13+|10.2+|14.3+|5.7+|
|1.4.0+|11.0+|10.13+|10.2+|14.0+|5.7+| |1.4.0+|11.0+|10.13+|10.2+|14.0+|5.7+|
|1.3.0+|11.0+|10.13+|10.2+|14.0+|5.7+|
|1.2.0+|9.0+|10.11+|10.2+|13.0+|5.5+|
## 🐾 Examples ## 🐾 Examples
Examples project are available for iOS with UIKit, iOS with SwiftUI, macOS and tvOS. Examples project are available for iOS with UIKit, iOS with SwiftUI, macOS and tvOS.
@ -108,7 +143,7 @@ source 'https://github.com/CocoaPods/Specs.git'
use_frameworks! use_frameworks!
def import_pods def import_pods
pod 'HaishinKit', '~> 1.4.0 pod 'HaishinKit', '~> 1.5.2
end end
target 'Your Target' do target 'Your Target' do
@ -118,21 +153,15 @@ end
``` ```
### Carthage ### Carthage
``` ```
github "shogo4405/HaishinKit.swift" ~> 1.4.0 github "shogo4405/HaishinKit.swift" ~> 1.5.2
``` ```
### Swift Package Manager ### Swift Package Manager
``` ```
https://github.com/shogo4405/HaishinKit.swift https://github.com/shogo4405/HaishinKit.swift
``` ```
## 💠 Donation
- GitHub Sponsors
- https://github.com/sponsors/shogo4405
- Paypal
- https://www.paypal.me/shogo4405
## 🔧 Prerequisites ## 🔧 Prerequisites
Make sure you setup and activate your AVAudioSession. Make sure you setup and activate your AVAudioSession iOS.
```swift ```swift
import AVFoundation import AVFoundation
let session = AVAudioSession.sharedInstance() let session = AVAudioSession.sharedInstance()
@ -147,26 +176,24 @@ do {
## 📓 RTMP Usage ## 📓 RTMP Usage
Real Time Messaging Protocol (RTMP). Real Time Messaging Protocol (RTMP).
```swift ```swift
let rtmpConnection = RTMPConnection() let connection = RTMPConnection()
let rtmpStream = RTMPStream(connection: rtmpConnection) let stream = RTMPStream(connection: rtmpConnection)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in stream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error) // print(error)
} }
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) { error in stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) { error in
// print(error) // print(error)
} }
let hkView = HKView(frame: view.bounds) let hkView = MTHKView(frame: view.bounds)
hkView.videoGravity = AVLayerVideoGravity.resizeAspectFill hkView.videoGravity = AVLayerVideoGravity.resizeAspectFill
hkView.attachStream(rtmpStream) hkView.attachStream(stream)
// add ViewController#view // add ViewController#view
view.addSubview(hkView) view.addSubview(hkView)
rtmpConnection.connect("rtmp://localhost/appName/instanceName") connection.connect("rtmp://localhost/appName/instanceName")
rtmpStream.publish("streamName") stream.publish("streamName")
// if you want to record a stream.
// rtmpStream.publish("streamName", type: .localRecord)
``` ```
### RTMP URL Format ### RTMP URL Format
@ -184,89 +211,139 @@ rtmpStream.publish("streamName")
### Settings ### Settings
```swift ```swift
var rtmpStream = RTMPStream(connection: rtmpConnection) var stream = RTMPStream(connection: rtmpConnection)
rtmpStream.captureSettings = [ stream.frameRate = 30
.fps: 30, // FPS stream.sessionPreset = AVCaptureSession.Preset.medium
.sessionPreset: AVCaptureSession.Preset.medium, // input video width/height
// .isVideoMirrored: false, /// Specifies the video capture settings.
// .continuousAutofocus: false, // use camera autofocus mode stream.videoCapture(for: 0).isVideoMirrored = false
// .continuousExposure: false, // use camera exposure mode stream.videoCapture(for: 0).preferredVideoStabilizationMode = .auto
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto // rtmpStream.videoCapture(for: 1).isVideoMirrored = false
]
rtmpStream.audioSettings = [ // Specifies the audio codec settings.
.muted: false, // mute audio stream.audioSettings = AudioCodecSettings(
.bitrate: 32 * 1000, bitRate: 64 * 1000
] )
rtmpStream.videoSettings = [
.width: 640, // video output width // Specifies the video codec settings.
.height: 360, // video output height stream.videoSettings = VideoCodecSettings(
.bitrate: 160 * 1000, // video output bitrate videoSize: .init(width: 854, height: 480),
.profileLevel: kVTProfileLevel_H264_Baseline_3_1, // H264 Profile require "import VideoToolbox" profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
.maxKeyFrameIntervalDuration: 2, // key frame / sec bitRate: 640 * 1000,
] maxKeyFrameIntervalDuration: 2,
// "0" means the same of input scalingMode: .trim,
rtmpStream.recorderSettings = [ bitRateMode: .average,
AVMediaType.audio: [ allowFrameReordering: nil,
AVFormatIDKey: Int(kAudioFormatMPEG4AAC), isHardwareEncoderEnabled: true
AVSampleRateKey: 0, )
AVNumberOfChannelsKey: 0,
// AVEncoderBitRateKey: 128000, // Specifies the recording settings. 0" means the same of input.
], stream.startRecording([
AVMediaType.video: [ AVMediaType.audio: [
AVVideoCodecKey: AVVideoCodecH264, AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVVideoHeightKey: 0, AVSampleRateKey: 0,
AVVideoWidthKey: 0, AVNumberOfChannelsKey: 0,
/* // AVEncoderBitRateKey: 128000,
AVVideoCompressionPropertiesKey: [ ],
AVVideoMaxKeyFrameIntervalDurationKey: 2, AVMediaType.video: [
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30, AVVideoCodecKey: AVVideoCodecH264,
AVVideoAverageBitRateKey: 512000 AVVideoHeightKey: 0,
] AVVideoWidthKey: 0,
*/ /*
], AVVideoCompressionPropertiesKey: [
] AVVideoMaxKeyFrameIntervalDurationKey: 2,
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
AVVideoAverageBitRateKey: 512000
]
*/
]
])
// 2nd arguemnt set false // 2nd arguemnt set false
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false)
```
```swift
// picrure in picrure settings.
stream.multiCamCaptureSettings = MultiCamCaptureSetting(
mode: .pip,
cornerRadius: 16.0,
regionOfInterest: .init(
origin: CGPoint(x: 16, y: 16),
size: .init(width: 160, height: 160)
)
)
```
```swift
// split settings.
stream.multiCamCaptureSettings = MultiCamCaptureSetting(
mode: .split(direction: .east),
cornerRadius: 0.0,
regionOfInterest: .init(
origin: .zero,
size: .zero
)
)
``` ```
### Authentication ### Authentication
```swift ```swift
var rtmpConnection = RTMPConnection() var connection = RTMPConnection()
rtmpConnection.connect("rtmp://username:password@localhost/appName/instanceName") connection.connect("rtmp://username:password@localhost/appName/instanceName")
``` ```
### Screen Capture ### Screen Capture
```swift ```swift
// iOS // iOS
let screen = IOUIScreenCaptureUnit(shared: UIApplication.shared) let screen = IOUIScreenCaptureUnit(shared: UIApplication.shared)
screen.delegate = rtmpStream screen.delegate = stream
screen.startRunning() screen.startRunning()
// macOS // macOS
rtmpStream.attachScreen(AVCaptureScreenInput(displayID: CGMainDisplayID())) stream.attachScreen(AVCaptureScreenInput(displayID: CGMainDisplayID()))
``` ```
## 📓 HTTP Usage ## 📓 HTTP Usage
HTTP Live Streaming (HLS). Your iPhone/Mac become a IP Camera. Basic snipet. You can see http://ip.address:8080/hello/playlist.m3u8 HTTP Live Streaming (HLS). Your iPhone/Mac become a IP Camera. Basic snipet. You can see http://ip.address:8080/hello/playlist.m3u8
```swift ```swift
var httpStream = HTTPStream() var stream = HTTPStream()
httpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)) stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back))
httpStream.attachAudio(AVCaptureDevice.default(for: .audio)) stream.attachAudio(AVCaptureDevice.default(for: .audio))
httpStream.publish("hello") stream.publish("hello")
var hkView = HKView(frame: view.bounds) var hkView = MTHKView(frame: view.bounds)
hkView.attachStream(httpStream) hkView.attachStream(httpStream)
var httpService = HLSService(domain: "", type: "_http._tcp", name: "HaishinKit", port: 8080) var httpService = HLSService(domain: "", type: "_http._tcp", name: "HaishinKit", port: 8080)
httpService.addHTTPStream(stream)
httpService.startRunning() httpService.startRunning()
httpService.addHTTPStream(httpStream)
// add ViewController#view // add ViewController#view
view.addSubview(hkView) view.addSubview(hkView)
``` ```
## 💠 Sponsorship
Looking for sponsors. Sponsoring I will enable us to:
- Purchase smartphones or peripheral devices for testing purposes.
- Pay for testing on a specific streaming service or for testing on mobile lines.
- Potentially private use to continue the OSS development
If you use any of our libraries for work, see if your employers would be interested in sponsorship. I have some special offers. I would greatly appreciate. Thank you.
- If you request I will note your name product our README.
- If you mention on a discussion, an issue or pull request that you are sponsoring us I will prioritise helping you even higher.
スポンサーを募集しています。利用用途としては、
- テスト目的で、スマートフォンの購入や周辺機器の購入を行います。
- 特定のストリーミングサービスへのテストの支払いや、モバイル回線でのテストの支払いに利用します。
- 著書のOSS開発を継続的に行う為に私的に利用する可能性もあります。
このライブラリーを仕事で継続的に利用している場合は、ぜひ。雇用主に、スポンサーに興味がないか確認いただけると幸いです。いくつか特典を用意しています。
- README.mdへの企業ロゴの掲載
- IssueやPull Requestの優先的な対応
[Sponsorship](https://github.com/sponsors/shogo4405)
## 📖 Reference ## 📖 Reference
* Adobes Real Time Messaging Protocol * Adobes Real Time Messaging Protocol
* http://www.adobe.com/content/dam/Adobe/en/devnet/rtmp/pdf/rtmp_specification_1.0.pdf * http://www.adobe.com/content/dam/Adobe/en/devnet/rtmp/pdf/rtmp_specification_1.0.pdf

View File

@ -4,10 +4,12 @@ import AVFoundation
* The interface a AudioCodec uses to inform its delegate. * The interface a AudioCodec uses to inform its delegate.
*/ */
public protocol AudioCodecDelegate: AnyObject { public protocol AudioCodecDelegate: AnyObject {
/// Tells the receiver to set a formatDescription. /// Tells the receiver to output an AVAudioFormat.
func audioCodec(_ codec: AudioCodec, didSet formatDescription: CMFormatDescription?) func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat)
/// Tells the receiver to output a encoded or decoded sampleBuffer. /// Tells the receiver to output an encoded or decoded CMSampleBuffer.
func audioCodec(_ codec: AudioCodec, didOutput sample: UnsafeMutableAudioBufferListPointer, presentationTimeStamp: CMTime) func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime)
/// Tells the receiver to occured an error.
func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error)
} }
// MARK: - // MARK: -
@ -16,302 +18,149 @@ public protocol AudioCodecDelegate: AnyObject {
* - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html * - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
*/ */
public class AudioCodec { public class AudioCodec {
enum Error: Swift.Error { /// The AudioCodec error domain codes.
case setPropertyError(id: AudioConverterPropertyID, status: OSStatus) public enum Error: Swift.Error {
case failedToCreate(from: AVAudioFormat, to: AVAudioFormat)
case failedToConvert(error: NSError)
} }
/**
* The audio encoding or decoding options.
*/
public enum Option: String, KeyPathRepresentable {
/// Specifies the bitRate of audio output.
case bitrate
/// Specifies the sampleRate of audio output.
case sampleRate
/// The bitRate of audio output.
case actualBitrate
public var keyPath: AnyKeyPath {
switch self {
case .bitrate:
return \AudioCodec.bitrate
case .sampleRate:
return \AudioCodec.sampleRate
case .actualBitrate:
return \AudioCodec.actualBitrate
}
}
}
/// The default minimum bitrate for an AudioCodec, value is 8000.
public static let minimumBitrate: UInt32 = 8 * 1000
/// The default bitrate for an AudioCidec, the value is 32000.
public static let defaultBitrate: UInt32 = 32 * 1000
/// The default channels for an AudioCodec, the value is 0 means according to a input source.
public static let defaultChannels: UInt32 = 0
/// The default sampleRate for an AudioCodec, the value is 0 means according to a input source.
public static let defaultSampleRate: Double = 0
/// The default mamimu buffers for an AudioCodec.
public static let defaultMaximumBuffers: Int = 1
/// Specifies the output format.
public var destination: AudioCodecFormat = .aac
/// Specifies the delegate. /// Specifies the delegate.
public weak var delegate: AudioCodecDelegate? public weak var delegate: (any AudioCodecDelegate)?
/// This instance is running to process(true) or not(false).
public private(set) var isRunning: Atomic<Bool> = .init(false) public private(set) var isRunning: Atomic<Bool> = .init(false)
/// Specifies the settings for audio codec. /// Specifies the settings for audio codec.
public var settings: Setting<AudioCodec, Option> = [:] { public var settings: AudioCodecSettings = .default {
didSet { didSet {
settings.observer = self settings.apply(audioConverter, oldValue: oldValue)
}
}
private static let numSamples: Int = 1024
var bitrate: UInt32 = AudioCodec.defaultBitrate {
didSet {
guard bitrate != oldValue else {
return
}
lockQueue.async {
if let format = self._inDestinationFormat {
self.setBitrateUntilNoErr(self.bitrate * format.mChannelsPerFrame)
}
}
}
}
var sampleRate: Double = AudioCodec.defaultSampleRate
var actualBitrate: UInt32 = AudioCodec.defaultBitrate {
didSet {
logger.info(actualBitrate)
}
}
var channels: UInt32 = AudioCodec.defaultChannels
var formatDescription: CMFormatDescription? {
didSet {
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
return
}
logger.info(formatDescription.debugDescription)
delegate?.audioCodec(self, didSet: formatDescription)
}
}
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioConverter.lock")
var inSourceFormat: AudioStreamBasicDescription? {
didSet {
guard let inSourceFormat = inSourceFormat, inSourceFormat != oldValue else {
return
}
_converter = nil
formatDescription = nil
_inDestinationFormat = nil
logger.info("\(String(describing: inSourceFormat))")
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : AudioCodec.defaultMaximumBuffers
currentAudioBuffer = AudioCodecBuffer(inSourceFormat, numSamples: AudioCodec.numSamples)
} }
} }
var effects: Set<AudioEffect> = [] var effects: Set<AudioEffect> = []
private let numSamples = AudioCodec.numSamples var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCodec.lock")
private var maximumBuffers: Int = AudioCodec.defaultMaximumBuffers var inSourceFormat: AudioStreamBasicDescription? {
private var currentAudioBuffer = AudioCodecBuffer(AudioStreamBasicDescription(mSampleRate: 0, mFormatID: 0, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)) didSet {
private var _inDestinationFormat: AudioStreamBasicDescription? guard var inSourceFormat, inSourceFormat != oldValue else {
private var inDestinationFormat: AudioStreamBasicDescription { return
get {
if _inDestinationFormat == nil {
_inDestinationFormat = destination.audioStreamBasicDescription(inSourceFormat, sampleRate: sampleRate, channels: channels)
CMAudioFormatDescriptionCreate(
allocator: kCFAllocatorDefault,
asbd: &_inDestinationFormat!,
layoutSize: 0,
layout: nil,
magicCookieSize: 0,
magicCookie: nil,
extensions: nil,
formatDescriptionOut: &formatDescription
)
} }
return _inDestinationFormat! outputBuffers.removeAll()
} ringBuffer = .init(&inSourceFormat)
set { audioConverter = makeAudioConverter(&inSourceFormat)
_inDestinationFormat = newValue
} }
} }
private var ringBuffer: AudioCodecRingBuffer?
private var outputBuffers: [AVAudioBuffer] = []
private var audioConverter: AVAudioConverter?
private var audioStreamPacketDescription = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: 0) /// Append a CMSampleBuffer.
private let inputDataProc: AudioConverterComplexInputDataProc = {(_: AudioConverterRef, ioNumberDataPackets: UnsafeMutablePointer<UInt32>, ioData: UnsafeMutablePointer<AudioBufferList>, outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?, inUserData: UnsafeMutableRawPointer?) in public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
Unmanaged<AudioCodec>.fromOpaque(inUserData!).takeUnretainedValue().onInputDataForAudioConverter(
ioNumberDataPackets,
ioData: ioData,
outDataPacketDescription: outDataPacketDescription
)
}
/// Create an AudioCodec instance.
public init() {
settings.observer = self
}
private var _converter: AudioConverterRef?
private var converter: AudioConverterRef {
var status: OSStatus = noErr
if _converter == nil {
var inClassDescriptions = destination.inClassDescriptions
status = AudioConverterNewSpecific(
&inSourceFormat!,
&inDestinationFormat,
UInt32(inClassDescriptions.count),
&inClassDescriptions,
&_converter
)
setBitrateUntilNoErr(bitrate * inDestinationFormat.mChannelsPerFrame)
}
if status != noErr {
logger.warn("\(status)")
}
return _converter!
}
/// Encodes bytes data.
public func encodeBytes(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
guard isRunning.value else { guard isRunning.value else {
currentAudioBuffer.clear()
return return
} }
currentAudioBuffer.write(bytes, count: count, presentationTimeStamp: presentationTimeStamp) switch settings.format {
convert(numSamples * Int(destination.bytesPerFrame), presentationTimeStamp: presentationTimeStamp) case .aac:
} guard let audioConverter, let ringBuffer else {
return
/// Encodes a CMSampleBuffer. }
public func encodeSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) { let numSamples = ringBuffer.appendSampleBuffer(sampleBuffer, offset: offset)
guard let format = sampleBuffer.formatDescription, CMSampleBufferDataIsReady(sampleBuffer) && isRunning.value else { if ringBuffer.isReady {
currentAudioBuffer.clear() guard let buffer = getOutputBuffer() else {
return return
}
inSourceFormat = format.streamBasicDescription?.pointee
do {
let numSamples = try currentAudioBuffer.write(sampleBuffer, offset: offset)
if currentAudioBuffer.isReady {
for effect in effects {
effect.execute(currentAudioBuffer.input, format: inSourceFormat)
} }
convert(currentAudioBuffer.maxLength, presentationTimeStamp: currentAudioBuffer.presentationTimeStamp) for effect in effects {
effect.execute(ringBuffer.current, presentationTimeStamp: ringBuffer.presentationTimeStamp)
}
var error: NSError?
audioConverter.convert(to: buffer, error: &error) { _, status in
status.pointee = .haveData
return ringBuffer.current
}
if let error {
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
} else {
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: ringBuffer.presentationTimeStamp)
}
ringBuffer.next()
} }
if offset + numSamples < sampleBuffer.numSamples { if offset + numSamples < sampleBuffer.numSamples {
encodeSampleBuffer(sampleBuffer, offset: offset + numSamples) appendSampleBuffer(sampleBuffer, offset: offset + numSamples)
} }
} catch { case .pcm:
logger.error(error) var offset = 0
} var presentationTimeStamp = sampleBuffer.presentationTimeStamp
} for i in 0..<sampleBuffer.numSamples {
guard let buffer = makeInputBuffer() as? AVAudioCompressedBuffer else {
@inline(__always) continue
private func convert(_ dataBytesSize: Int, presentationTimeStamp: CMTime) {
var finished = false
repeat {
var ioOutputDataPacketSize: UInt32 = destination.packetSize
let maximumBuffers = destination.maximumBuffers((channels == 0) ? inSourceFormat?.mChannelsPerFrame ?? 1 : channels)
let outOutputData: UnsafeMutableAudioBufferListPointer = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
for i in 0..<maximumBuffers {
outOutputData[i].mNumberChannels = inDestinationFormat.mChannelsPerFrame
outOutputData[i].mDataByteSize = UInt32(dataBytesSize)
outOutputData[i].mData = UnsafeMutableRawPointer.allocate(byteCount: dataBytesSize, alignment: 0)
}
let status = AudioConverterFillComplexBuffer(
converter,
inputDataProc,
Unmanaged.passUnretained(self).toOpaque(),
&ioOutputDataPacketSize,
outOutputData.unsafeMutablePointer,
nil
)
switch status {
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
case noErr, kAudioConverterErr_InvalidInputSize:
delegate?.audioCodec(self, didOutput: outOutputData, presentationTimeStamp: presentationTimeStamp)
case -1:
if destination == .pcm {
delegate?.audioCodec(self, didOutput: outOutputData, presentationTimeStamp: presentationTimeStamp)
} }
finished = true let sampleSize = CMSampleBufferGetSampleSize(sampleBuffer, at: i)
default: let byteCount = sampleSize - ADTSHeader.size
finished = true buffer.packetDescriptions?.pointee = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: UInt32(byteCount))
} buffer.packetCount = 1
buffer.byteLength = UInt32(byteCount)
for i in 0..<outOutputData.count { if let blockBuffer = sampleBuffer.dataBuffer {
if let mData = outOutputData[i].mData { CMBlockBufferCopyDataBytes(blockBuffer, atOffset: offset + ADTSHeader.size, dataLength: byteCount, destination: buffer.data)
free(mData) appendAudioBuffer(buffer, presentationTimeStamp: presentationTimeStamp)
presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(1024), timescale: sampleBuffer.presentationTimeStamp.timescale))
offset += sampleSize
} }
} }
free(outOutputData.unsafeMutablePointer)
} while !finished
}
func invalidate() {
lockQueue.async {
self.inSourceFormat = nil
self._inDestinationFormat = nil
if let converter: AudioConverterRef = self._converter {
AudioConverterDispose(converter)
}
self._converter = nil
} }
} }
func onInputDataForAudioConverter( func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
_ ioNumberDataPackets: UnsafeMutablePointer<UInt32>, guard isRunning.value, let audioConverter, let buffer = getOutputBuffer() else {
ioData: UnsafeMutablePointer<AudioBufferList>,
outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?) -> OSStatus {
guard currentAudioBuffer.isReady else {
ioNumberDataPackets.pointee = 0
return -1
}
memcpy(ioData, currentAudioBuffer.input.unsafePointer, currentAudioBuffer.listSize)
if destination == .pcm {
ioNumberDataPackets.pointee = 1
} else {
ioNumberDataPackets.pointee = UInt32(numSamples)
}
if destination == .pcm && outDataPacketDescription != nil {
audioStreamPacketDescription.mDataByteSize = currentAudioBuffer.input.unsafePointer.pointee.mBuffers.mDataByteSize
outDataPacketDescription?.pointee = withUnsafeMutablePointer(to: &audioStreamPacketDescription) { $0 }
}
currentAudioBuffer.clear()
return noErr
}
private func setBitrateUntilNoErr(_ bitrate: UInt32) {
do {
try setProperty(id: kAudioConverterEncodeBitRate, data: bitrate * inDestinationFormat.mChannelsPerFrame)
actualBitrate = bitrate
} catch {
if Self.minimumBitrate < bitrate {
setBitrateUntilNoErr(bitrate - Self.minimumBitrate)
} else {
actualBitrate = Self.minimumBitrate
}
}
}
private func setProperty<T>(id: AudioConverterPropertyID, data: T) throws {
guard let converter: AudioConverterRef = _converter else {
return return
} }
let size = UInt32(MemoryLayout<T>.size) var error: NSError?
var buffer = data audioConverter.convert(to: buffer, error: &error) { _, status in
let status = AudioConverterSetProperty(converter, id, size, &buffer) status.pointee = .haveData
guard status == 0 else { return audioBuffer
throw Error.setPropertyError(id: id, status: status)
} }
if let error {
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
} else {
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: presentationTimeStamp)
}
}
func makeInputBuffer() -> AVAudioBuffer? {
guard let inputFormat = audioConverter?.inputFormat else {
return nil
}
switch inSourceFormat?.mFormatID {
case kAudioFormatLinearPCM:
return AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: 1024)
default:
return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024)
}
}
func releaseOutputBuffer(_ buffer: AVAudioBuffer) {
outputBuffers.append(buffer)
}
private func getOutputBuffer() -> AVAudioBuffer? {
guard let outputFormat = audioConverter?.outputFormat else {
return nil
}
if outputBuffers.isEmpty {
return settings.format.makeAudioBuffer(outputFormat)
}
return outputBuffers.removeFirst()
}
private func makeAudioConverter(_ inSourceFormat: inout AudioStreamBasicDescription) -> AVAudioConverter? {
guard
let inputFormat = AVAudioFormat(streamDescription: &inSourceFormat),
let outputFormat = settings.format.makeAudioFormat(inSourceFormat) else {
return nil
}
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
settings.apply(converter, oldValue: nil)
if converter == nil {
delegate?.audioCodec(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat))
} else {
delegate?.audioCodec(self, didOutput: outputFormat)
}
return converter
} }
} }
@ -319,20 +168,24 @@ extension AudioCodec: Running {
// MARK: Running // MARK: Running
public func startRunning() { public func startRunning() {
lockQueue.async { lockQueue.async {
guard !self.isRunning.value else {
return
}
if let audioConverter = self.audioConverter {
self.delegate?.audioCodec(self, didOutput: audioConverter.outputFormat)
}
self.isRunning.mutate { $0 = true } self.isRunning.mutate { $0 = true }
} }
} }
public func stopRunning() { public func stopRunning() {
lockQueue.async { lockQueue.async {
if let convert: AudioQueueRef = self._converter { guard self.isRunning.value else {
AudioConverterDispose(convert) return
self._converter = nil
} }
self.currentAudioBuffer.clear()
self.inSourceFormat = nil self.inSourceFormat = nil
self.formatDescription = nil self.audioConverter = nil
self._inDestinationFormat = nil self.ringBuffer = nil
self.isRunning.mutate { $0 = false } self.isRunning.mutate { $0 = false }
} }
} }

View File

@ -1,126 +0,0 @@
import AVFoundation
import Foundation
final class AudioCodecBuffer {
enum Error: Swift.Error {
case isReady
case noBlockBuffer
}
static let numSamples = 1024
let input: UnsafeMutableAudioBufferListPointer
var isReady: Bool {
numSamples == index
}
var maxLength: Int {
numSamples * bytesPerFrame * numberChannels * maximumBuffers
}
let listSize: Int
private var index = 0
private var buffers: [Data]
private var numSamples: Int
private let bytesPerFrame: Int
private let maximumBuffers: Int
private let numberChannels: Int
private let bufferList: UnsafeMutableAudioBufferListPointer
private(set) var presentationTimeStamp: CMTime = .invalid
deinit {
input.unsafeMutablePointer.deallocate()
bufferList.unsafeMutablePointer.deallocate()
}
init(_ inSourceFormat: AudioStreamBasicDescription, numSamples: Int = AudioCodecBuffer.numSamples) {
self.numSamples = numSamples
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
bytesPerFrame = Int(inSourceFormat.mBytesPerFrame)
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : 1
listSize = AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers)
input = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
bufferList = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
numberChannels = nonInterleaved ? 1 : Int(inSourceFormat.mChannelsPerFrame)
let dataByteSize = numSamples * bytesPerFrame
buffers = .init(repeating: .init(repeating: 0, count: numSamples * bytesPerFrame), count: maximumBuffers)
input.unsafeMutablePointer.pointee.mNumberBuffers = UInt32(maximumBuffers)
for i in 0..<maximumBuffers {
input[i].mNumberChannels = UInt32(numberChannels)
buffers[i].withUnsafeMutableBytes { pointer in
input[i].mData = pointer.baseAddress
}
input[i].mDataByteSize = UInt32(dataByteSize)
}
}
func write(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
numSamples = count
index = count
input.unsafeMutablePointer.pointee.mBuffers.mNumberChannels = 1
input.unsafeMutablePointer.pointee.mBuffers.mData = bytes
input.unsafeMutablePointer.pointee.mBuffers.mDataByteSize = UInt32(count)
}
func write(_ sampleBuffer: CMSampleBuffer, offset: Int) throws -> Int {
guard !isReady else {
throw Error.isReady
}
if presentationTimeStamp == .invalid {
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
}
var blockBuffer: CMBlockBuffer?
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
bufferListSizeNeededOut: nil,
bufferListOut: bufferList.unsafeMutablePointer,
bufferListSize: listSize,
blockBufferAllocator: kCFAllocatorDefault,
blockBufferMemoryAllocator: kCFAllocatorDefault,
flags: 0,
blockBufferOut: &blockBuffer
)
guard blockBuffer != nil else {
throw Error.noBlockBuffer
}
let numSamples = min(self.numSamples - index, sampleBuffer.numSamples - offset)
for i in 0..<maximumBuffers {
guard let data = bufferList[i].mData else {
continue
}
buffers[i].replaceSubrange(
index * bytesPerFrame..<index * bytesPerFrame + numSamples * bytesPerFrame,
with: data.advanced(by: offset * bytesPerFrame),
count: numSamples * bytesPerFrame
)
}
index += numSamples
return numSamples
}
func muted() {
for i in 0..<maximumBuffers {
buffers[i].resetBytes(in: 0...)
}
}
func clear() {
presentationTimeStamp = .invalid
index = 0
}
}
extension AudioCodecBuffer: CustomDebugStringConvertible {
// MARK: CustomDebugStringConvertible
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}

View File

@ -1,115 +0,0 @@
import AudioToolbox
/// The type of the AudioCodec supports format.
public enum AudioCodecFormat {
/// The AAC format.
case aac
/// The PCM format.
case pcm
var formatID: AudioFormatID {
switch self {
case .aac:
return kAudioFormatMPEG4AAC
case .pcm:
return kAudioFormatLinearPCM
}
}
var formatFlags: UInt32 {
switch self {
case .aac:
return UInt32(MPEG4ObjectID.AAC_LC.rawValue)
case .pcm:
return kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat
}
}
var framesPerPacket: UInt32 {
switch self {
case .aac:
return 1024
case .pcm:
return 1
}
}
var packetSize: UInt32 {
switch self {
case .aac:
return 1
case .pcm:
return 1024
}
}
var bitsPerChannel: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return 32
}
}
var bytesPerPacket: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
var bytesPerFrame: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
var inClassDescriptions: [AudioClassDescription] {
switch self {
case .aac:
#if os(iOS)
return [
AudioClassDescription(mType: kAudioEncoderComponentType, mSubType: kAudioFormatMPEG4AAC, mManufacturer: kAppleSoftwareAudioCodecManufacturer),
AudioClassDescription(mType: kAudioEncoderComponentType, mSubType: kAudioFormatMPEG4AAC, mManufacturer: kAppleHardwareAudioCodecManufacturer)
]
#else
return []
#endif
case .pcm:
return []
}
}
func maximumBuffers(_ channel: UInt32) -> Int {
switch self {
case .aac:
return 1
case .pcm:
return Int(channel)
}
}
func audioStreamBasicDescription(_ inSourceFormat: AudioStreamBasicDescription?, sampleRate: Double, channels: UInt32) -> AudioStreamBasicDescription? {
guard let inSourceFormat = inSourceFormat else {
return nil
}
let destinationChannels: UInt32 = (channels == 0) ? inSourceFormat.mChannelsPerFrame : channels
return AudioStreamBasicDescription(
mSampleRate: sampleRate == 0 ? inSourceFormat.mSampleRate : sampleRate,
mFormatID: formatID,
mFormatFlags: formatFlags,
mBytesPerPacket: bytesPerPacket,
mFramesPerPacket: framesPerPacket,
mBytesPerFrame: bytesPerFrame,
mChannelsPerFrame: destinationChannels,
mBitsPerChannel: bitsPerChannel,
mReserved: 0
)
}
}

View File

@ -0,0 +1,147 @@
import AVFoundation
import Foundation
final class AudioCodecRingBuffer {
enum Error: Swift.Error {
case isReady
case noBlockBuffer
}
static let numSamples: UInt32 = 1024
static let maxBuffers: Int = 6
var isReady: Bool {
numSamples == index
}
var current: AVAudioPCMBuffer {
return buffers[cursor]
}
private(set) var presentationTimeStamp: CMTime = .invalid
private var index: Int = 0
private var numSamples: Int
private var format: AVAudioFormat
private var buffers: [AVAudioPCMBuffer] = []
private var cursor: Int = 0
private var workingBuffer: AVAudioPCMBuffer
private var maxBuffers: Int = AudioCodecRingBuffer.maxBuffers
init?(_ inSourceFormat: inout AudioStreamBasicDescription, numSamples: UInt32 = AudioCodecRingBuffer.numSamples) {
guard
inSourceFormat.mFormatID == kAudioFormatLinearPCM,
let format = AVAudioFormat(streamDescription: &inSourceFormat),
let workingBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numSamples) else {
return nil
}
for _ in 0..<maxBuffers {
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numSamples) else {
return nil
}
buffer.frameLength = numSamples
self.buffers.append(buffer)
}
self.format = format
self.workingBuffer = workingBuffer
self.numSamples = Int(numSamples)
}
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int) -> Int {
if isReady {
return -1
}
if presentationTimeStamp == .invalid {
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
}
if offset == 0 {
if workingBuffer.frameLength < sampleBuffer.numSamples {
if let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) {
self.workingBuffer = buffer
}
}
workingBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples)
CMSampleBufferCopyPCMDataIntoAudioBufferList(
sampleBuffer,
at: 0,
frameCount: Int32(sampleBuffer.numSamples),
into: workingBuffer.mutableAudioBufferList
)
}
let numSamples = min(self.numSamples - index, Int(sampleBuffer.numSamples) - offset)
if format.isInterleaved {
let channelCount = Int(format.channelCount)
switch format.commonFormat {
case .pcmFormatInt16:
memcpy(current.int16ChannelData?[0].advanced(by: index), workingBuffer.int16ChannelData?[0].advanced(by: offset), numSamples * 2 * channelCount)
case .pcmFormatInt32:
memcpy(current.int32ChannelData?[0].advanced(by: index), workingBuffer.int32ChannelData?[0].advanced(by: offset), numSamples * 4 * channelCount)
case .pcmFormatFloat32:
memcpy(current.floatChannelData?[0].advanced(by: index), workingBuffer.floatChannelData?[0].advanced(by: offset), numSamples * 4 * channelCount)
default:
break
}
} else {
for i in 0..<Int(format.channelCount) {
switch format.commonFormat {
case .pcmFormatInt16:
memcpy(current.int16ChannelData?[i].advanced(by: index), workingBuffer.int16ChannelData?[i].advanced(by: offset), numSamples * 2)
case .pcmFormatInt32:
memcpy(current.int32ChannelData?[i].advanced(by: index), workingBuffer.int32ChannelData?[i].advanced(by: offset), numSamples * 4)
case .pcmFormatFloat32:
memcpy(current.floatChannelData?[i].advanced(by: index), workingBuffer.floatChannelData?[i].advanced(by: offset), numSamples * 4)
default:
break
}
}
}
index += numSamples
return numSamples
}
func muted() {
if format.isInterleaved {
let channelCount = Int(format.channelCount)
switch format.commonFormat {
case .pcmFormatInt16:
current.int16ChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
case .pcmFormatInt32:
current.int32ChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
case .pcmFormatFloat32:
current.floatChannelData?[0].assign(repeating: 0, count: numSamples * channelCount)
default:
break
}
} else {
for i in 0..<Int(format.channelCount) {
switch format.commonFormat {
case .pcmFormatInt16:
current.int16ChannelData?[i].assign(repeating: 0, count: numSamples)
case .pcmFormatInt32:
current.int32ChannelData?[i].assign(repeating: 0, count: numSamples)
case .pcmFormatFloat32:
current.floatChannelData?[i].assign(repeating: 0, count: numSamples)
default:
break
}
}
}
}
func next() {
presentationTimeStamp = .invalid
index = 0
cursor += 1
if cursor == buffers.count {
cursor = 0
}
}
}
extension AudioCodecRingBuffer: CustomDebugStringConvertible {
// MARK: CustomDebugStringConvertible
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}

View File

@ -0,0 +1,146 @@
import AVFAudio
import Foundation
/// The AudioCodecSettings class specifying audio compression settings.
public struct AudioCodecSettings: Codable {
/// The defualt value.
public static let `default` = AudioCodecSettings()
/// The type of the AudioCodec supports format.
public enum Format: Codable {
/// The AAC format.
case aac
/// The PCM format.
case pcm
var formatID: AudioFormatID {
switch self {
case .aac:
return kAudioFormatMPEG4AAC
case .pcm:
return kAudioFormatLinearPCM
}
}
var formatFlags: UInt32 {
switch self {
case .aac:
return UInt32(MPEG4ObjectID.AAC_LC.rawValue)
case .pcm:
return kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat
}
}
var framesPerPacket: UInt32 {
switch self {
case .aac:
return 1024
case .pcm:
return 1
}
}
var packetSize: UInt32 {
switch self {
case .aac:
return 1
case .pcm:
return 1024
}
}
var bitsPerChannel: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return 32
}
}
var bytesPerPacket: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
var bytesPerFrame: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
func makeAudioBuffer(_ format: AVAudioFormat) -> AVAudioBuffer? {
switch self {
case .aac:
return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024)
case .pcm:
return AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024)
}
}
func makeAudioFormat(_ inSourceFormat: AudioStreamBasicDescription?) -> AVAudioFormat? {
guard let inSourceFormat else {
return nil
}
switch self {
case .aac:
var streamDescription = AudioStreamBasicDescription(
mSampleRate: inSourceFormat.mSampleRate,
mFormatID: formatID,
mFormatFlags: formatFlags,
mBytesPerPacket: bytesPerPacket,
mFramesPerPacket: framesPerPacket,
mBytesPerFrame: bytesPerFrame,
mChannelsPerFrame: inSourceFormat.mChannelsPerFrame,
mBitsPerChannel: bitsPerChannel,
mReserved: 0
)
return AVAudioFormat(streamDescription: &streamDescription)
case .pcm:
return AVAudioFormat(
commonFormat: .pcmFormatFloat32,
sampleRate: inSourceFormat.mSampleRate,
channels: inSourceFormat.mChannelsPerFrame,
interleaved: true
)
}
}
}
/// Specifies the bitRate of audio output.
public var bitRate: Int
/// Specifies the output format.
public var format: AudioCodecSettings.Format
/// Create an new AudioCodecSettings instance.
public init(
bitRate: Int = 64 * 1000,
format: AudioCodecSettings.Format = .aac
) {
self.bitRate = bitRate
self.format = format
}
func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) {
guard let converter else {
return
}
if bitRate != oldValue?.bitRate {
let minAvailableBitRate = converter.applicableEncodeBitRates?.min(by: { a, b in
return a.intValue < b.intValue
})?.intValue ?? bitRate
let maxAvailableBitRate = converter.applicableEncodeBitRates?.max(by: { a, b in
return a.intValue < b.intValue
})?.intValue ?? bitRate
converter.bitRate = min(maxAvailableBitRate, max(minAvailableBitRate, bitRate))
}
}
}

View File

@ -6,8 +6,8 @@ protocol VTSessionConvertible {
func setOption(_ option: VTSessionOption) -> OSStatus func setOption(_ option: VTSessionOption) -> OSStatus
func setOptions(_ options: Set<VTSessionOption>) -> OSStatus func setOptions(_ options: Set<VTSessionOption>) -> OSStatus
func copySupportedPropertyDictionary() -> [AnyHashable: Any] func copySupportedPropertyDictionary() -> [AnyHashable: Any]
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus
func invalidate() func invalidate()
} }

View File

@ -5,15 +5,15 @@ enum VTSessionMode {
case compression case compression
case decompression case decompression
func makeSession(_ videoCodec: VideoCodec) -> VTSessionConvertible? { func makeSession(_ videoCodec: VideoCodec) -> (any VTSessionConvertible)? {
switch self { switch self {
case .compression: case .compression:
var session: VTCompressionSession? var session: VTCompressionSession?
var status = VTCompressionSessionCreate( var status = VTCompressionSessionCreate(
allocator: kCFAllocatorDefault, allocator: kCFAllocatorDefault,
width: videoCodec.width, width: videoCodec.settings.videoSize.width,
height: videoCodec.height, height: videoCodec.settings.videoSize.height,
codecType: kCMVideoCodecType_H264, codecType: videoCodec.settings.format.codecType,
encoderSpecification: nil, encoderSpecification: nil,
imageBufferAttributes: videoCodec.attributes as CFDictionary?, imageBufferAttributes: videoCodec.attributes as CFDictionary?,
compressedDataAllocator: nil, compressedDataAllocator: nil,
@ -25,7 +25,7 @@ enum VTSessionMode {
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: status)) videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: status))
return nil return nil
} }
status = session.setOptions(videoCodec.options()) status = session.setOptions(videoCodec.settings.options())
guard status == noErr else { guard status == noErr else {
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToPrepare(status: status)) videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToPrepare(status: status))
return nil return nil

View File

@ -1,5 +1,6 @@
import Foundation import Foundation
/// A structure that represents Key-Value-Object for the VideoToolbox option.
public struct VTSessionOption { public struct VTSessionOption {
let key: VTSessionOptionKey let key: VTSessionOptionKey
let value: AnyObject let value: AnyObject

View File

@ -42,6 +42,8 @@ struct VTSessionOptionKey {
static let maxH264SliceBytes = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxH264SliceBytes) static let maxH264SliceBytes = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxH264SliceBytes)
static let maxFrameDelayCount = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxFrameDelayCount) static let maxFrameDelayCount = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_MaxFrameDelayCount)
static let encoderID = VTSessionOptionKey(CFString: kVTVideoEncoderSpecification_EncoderID) static let encoderID = VTSessionOptionKey(CFString: kVTVideoEncoderSpecification_EncoderID)
@available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
static let constantBitRate = VTSessionOptionKey(CFString: kVTCompressionPropertyKey_ConstantBitRate)
let CFString: CFString let CFString: CFString
} }

View File

@ -11,11 +11,13 @@ import UIKit
*/ */
public protocol VideoCodecDelegate: AnyObject { public protocol VideoCodecDelegate: AnyObject {
/// Tells the receiver to set a formatDescription. /// Tells the receiver to set a formatDescription.
func videoCodec(_ codec: VideoCodec, didSet formatDescription: CMFormatDescription?) func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?)
/// Tells the receiver to output an encoded or decoded sampleBuffer. /// Tells the receiver to output an encoded or decoded sampleBuffer.
func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer)
/// Tells the receiver to occured an error. /// Tells the receiver to occured an error.
func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error)
/// Tells the receiver to drop frame.
func videoCodecWillDropFame(_ codec: VideoCodec) -> Bool
} }
// MARK: - // MARK: -
@ -23,16 +25,6 @@ public protocol VideoCodecDelegate: AnyObject {
* The VideoCodec class provides methods for encode or decode for video. * The VideoCodec class provides methods for encode or decode for video.
*/ */
public class VideoCodec { public class VideoCodec {
static let defaultMinimumGroupOfPictures: Int = 12
#if os(OSX)
#if arch(arm64)
static let encoderName = NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc")
#else
static let encoderName = NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva")
#endif
#endif
/** /**
* The VideoCodec error domain codes. * The VideoCodec error domain codes.
*/ */
@ -47,173 +39,37 @@ public class VideoCodec {
case failedToSetOption(status: OSStatus, option: VTSessionOption) case failedToSetOption(status: OSStatus, option: VTSessionOption)
} }
/**
* The video encoding or decoding options.
*/
public enum Option: String, KeyPathRepresentable, CaseIterable {
/// Specifies the width of video.
case width
/// Specifies the height of video.
case height
/// Specifies the bitrate.
case bitrate
/// Specifies the H264 profile level.
case profileLevel
#if os(macOS)
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE).
case enabledHardwareEncoder
#endif
/// Specifies the keyframeInterval.
case maxKeyFrameIntervalDuration
/// Specifies the scalingMode.
case scalingMode
case allowFrameReordering
public var keyPath: AnyKeyPath {
switch self {
case .width:
return \VideoCodec.width
case .height:
return \VideoCodec.height
case .bitrate:
return \VideoCodec.bitrate
#if os(macOS)
case .enabledHardwareEncoder:
return \VideoCodec.enabledHardwareEncoder
#endif
case .maxKeyFrameIntervalDuration:
return \VideoCodec.maxKeyFrameIntervalDuration
case .scalingMode:
return \VideoCodec.scalingMode
case .profileLevel:
return \VideoCodec.profileLevel
case .allowFrameReordering:
return \VideoCodec.allowFrameReordering
}
}
}
/// The videoCodec's width value. The default value is 480.
public static let defaultWidth: Int32 = 480
/// The videoCodec's height value. The default value is 272.
public static let defaultHeight: Int32 = 272
/// The videoCodec's bitrate value. The default value is 160,000.
public static let defaultBitrate: UInt32 = 160 * 1000
/// The videoCodec's scalingMode value. The default value is trim.
public static let defaultScalingMode: ScalingMode = .trim
/// The videoCodec's attributes value. /// The videoCodec's attributes value.
public static var defaultAttributes: [NSString: AnyObject]? = [ public static var defaultAttributes: [NSString: AnyObject]? = [
kCVPixelBufferIOSurfacePropertiesKey: [:] as AnyObject, kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
] ]
/// Specifies the settings for a VideoCodec. /// Specifies the settings for a VideoCodec.
public var settings: Setting<VideoCodec, Option> = [:] { public var settings: VideoCodecSettings = .default {
didSet { didSet {
settings.observer = self let invalidateSession = settings.invalidateSession(oldValue)
if invalidateSession {
self.invalidateSession = invalidateSession
} else {
settings.apply(self, rhs: oldValue)
}
} }
} }
/// The running value indicating whether the VideoCodec is running. /// The running value indicating whether the VideoCodec is running.
public private(set) var isRunning: Atomic<Bool> = .init(false) public private(set) var isRunning: Atomic<Bool> = .init(false)
var scalingMode = VideoCodec.defaultScalingMode {
didSet {
guard scalingMode != oldValue else {
return
}
invalidateSession = true
}
}
var width = VideoCodec.defaultWidth {
didSet {
guard width != oldValue else {
return
}
invalidateSession = true
}
}
var height = VideoCodec.defaultHeight {
didSet {
guard height != oldValue else {
return
}
invalidateSession = true
}
}
#if os(macOS)
var enabledHardwareEncoder = true {
didSet {
guard enabledHardwareEncoder != oldValue else {
return
}
invalidateSession = true
}
}
#endif
var bitrate = VideoCodec.defaultBitrate {
didSet {
guard bitrate != oldValue else {
return
}
let option = VTSessionOption(key: .averageBitRate, value: NSNumber(value: bitrate))
if let status = session?.setOption(option), status != noErr {
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
var profileLevel = kVTProfileLevel_H264_Baseline_3_1 as String {
didSet {
guard profileLevel != oldValue else {
return
}
invalidateSession = true
}
}
var maxKeyFrameIntervalDuration = 2.0 {
didSet {
guard maxKeyFrameIntervalDuration != oldValue else {
return
}
invalidateSession = true
}
}
// swiftlint:disable discouraged_optional_boolean
var allowFrameReordering: Bool? = false {
didSet {
guard allowFrameReordering != oldValue else {
return
}
invalidateSession = true
}
}
var locked: UInt32 = 0
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock") var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock")
var expectedFrameRate = IOMixer.defaultFrameRate {
didSet {
guard expectedFrameRate != oldValue else {
return
}
let option = VTSessionOption(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate))
if let status = session?.setOption(option), status != noErr {
delegate?.videoCodec(self, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
var formatDescription: CMFormatDescription? { var formatDescription: CMFormatDescription? {
didSet { didSet {
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else { guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
return return
} }
if let atoms: [String: AnyObject] = formatDescription?.`extension`(by: "SampleDescriptionExtensionAtoms"), let avcC: Data = atoms["avcC"] as? Data { delegate?.videoCodec(self, didOutput: formatDescription)
let config = AVCConfigurationRecord(data: avcC)
isBaseline = config.AVCProfileIndication == 66
}
delegate?.videoCodec(self, didSet: formatDescription)
} }
} }
var needsSync: Atomic<Bool> = .init(true) var needsSync: Atomic<Bool> = .init(true)
var isBaseline = true
var attributes: [NSString: AnyObject]? { var attributes: [NSString: AnyObject]? {
guard VideoCodec.defaultAttributes != nil else { guard VideoCodec.defaultAttributes != nil else {
return nil return nil
@ -222,49 +78,44 @@ public class VideoCodec {
for (key, value) in VideoCodec.defaultAttributes ?? [:] { for (key, value) in VideoCodec.defaultAttributes ?? [:] {
attributes[key] = value attributes[key] = value
} }
attributes[kCVPixelBufferWidthKey] = NSNumber(value: width) attributes[kCVPixelBufferWidthKey] = NSNumber(value: settings.videoSize.width)
attributes[kCVPixelBufferHeightKey] = NSNumber(value: height) attributes[kCVPixelBufferHeightKey] = NSNumber(value: settings.videoSize.height)
return attributes return attributes
} }
weak var delegate: VideoCodecDelegate? weak var delegate: (any VideoCodecDelegate)?
private(set) var session: (any VTSessionConvertible)? {
private var lastImageBuffer: CVImageBuffer?
private var session: VTSessionConvertible? {
didSet { didSet {
oldValue?.invalidate() oldValue?.invalidate()
invalidateSession = false invalidateSession = false
} }
} }
private var invalidateSession = true private var invalidateSession = true
private var buffers: [CMSampleBuffer] = []
private var minimumGroupOfPictures: Int = VideoCodec.defaultMinimumGroupOfPictures
init() { func appendImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
settings.observer = self guard isRunning.value, !(delegate?.videoCodecWillDropFame(self) ?? false) else {
}
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
guard isRunning.value && locked == 0 else {
return return
} }
if invalidateSession { if invalidateSession {
session = VTSessionMode.compression.makeSession(self) session = VTSessionMode.compression.makeSession(self)
} }
session?.inputBuffer( _ = session?.encodeFrame(
imageBuffer, imageBuffer,
presentationTimeStamp: presentationTimeStamp, presentationTimeStamp: presentationTimeStamp,
duration: duration duration: duration
) { [unowned self] status, _, sampleBuffer in ) { [unowned self] status, _, sampleBuffer in
guard let sampleBuffer, status == noErr else { guard let sampleBuffer, status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status)) delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return return
} }
self.formatDescription = sampleBuffer.formatDescription formatDescription = sampleBuffer.formatDescription
self.delegate?.videoCodec(self, didOutput: sampleBuffer) delegate?.videoCodec(self, didOutput: sampleBuffer)
} }
} }
func inputBuffer(_ sampleBuffer: CMSampleBuffer) { func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
guard isRunning.value else {
return
}
if invalidateSession { if invalidateSession {
session = VTSessionMode.decompression.makeSession(self) session = VTSessionMode.decompression.makeSession(self)
needsSync.mutate { $0 = true } needsSync.mutate { $0 = true }
@ -272,30 +123,26 @@ public class VideoCodec {
if !sampleBuffer.isNotSync { if !sampleBuffer.isNotSync {
needsSync.mutate { $0 = false } needsSync.mutate { $0 = false }
} }
session?.inputBuffer(sampleBuffer) { [unowned self] status, _, imageBuffer, presentationTimeStamp, duration in _ = session?.decodeFrame(sampleBuffer) { [unowned self] status, _, imageBuffer, presentationTimeStamp, duration in
guard let imageBuffer = imageBuffer, status == noErr else { guard let imageBuffer, status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status)) self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return return
} }
var timingInfo = CMSampleTimingInfo( var timingInfo = CMSampleTimingInfo(
duration: duration, duration: duration,
presentationTimeStamp: presentationTimeStamp, presentationTimeStamp: presentationTimeStamp,
decodeTimeStamp: .invalid decodeTimeStamp: sampleBuffer.decodeTimeStamp
) )
var videoFormatDescription: CMVideoFormatDescription? var videoFormatDescription: CMVideoFormatDescription?
var status = CMVideoFormatDescriptionCreateForImageBuffer( var status = CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault, allocator: kCFAllocatorDefault,
imageBuffer: imageBuffer, imageBuffer: imageBuffer,
formatDescriptionOut: &videoFormatDescription formatDescriptionOut: &videoFormatDescription
) )
guard status == noErr else { guard status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status)) delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return return
} }
var sampleBuffer: CMSampleBuffer? var sampleBuffer: CMSampleBuffer?
status = CMSampleBufferCreateForImageBuffer( status = CMSampleBufferCreateForImageBuffer(
allocator: kCFAllocatorDefault, allocator: kCFAllocatorDefault,
@ -307,52 +154,14 @@ public class VideoCodec {
sampleTiming: &timingInfo, sampleTiming: &timingInfo,
sampleBufferOut: &sampleBuffer sampleBufferOut: &sampleBuffer
) )
guard let buffer = sampleBuffer, status == noErr else { guard let buffer = sampleBuffer, status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status)) delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return return
} }
delegate?.videoCodec(self, didOutput: buffer)
if self.isBaseline {
self.delegate?.videoCodec(self, didOutput: buffer)
} else {
self.buffers.append(buffer)
self.buffers.sort {
$0.presentationTimeStamp < $1.presentationTimeStamp
}
if self.minimumGroupOfPictures <= buffers.count {
self.delegate?.videoCodec(self, didOutput: buffer)
}
}
} }
} }
func options() -> Set<VTSessionOption> {
let isBaseline = profileLevel.contains("Baseline")
var options = Set<VTSessionOption>([
.init(key: .realTime, value: kCFBooleanTrue),
.init(key: .profileLevel, value: profileLevel as NSObject),
.init(key: .averageBitRate, value: NSNumber(value: bitrate)),
.init(key: .expectedFrameRate, value: NSNumber(value: expectedFrameRate)),
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
.init(key: .pixelTransferProperties, value: [
"ScalingMode": scalingMode.rawValue
] as NSObject)
])
#if os(OSX)
if enabledHardwareEncoder {
options.insert(.init(key: .encoderID, value: VideoCodec.encoderName))
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
}
#endif
if !isBaseline {
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
}
return options
}
#if os(iOS) #if os(iOS)
@objc @objc
private func applicationWillEnterForeground(_ notification: Notification) { private func applicationWillEnterForeground(_ notification: Notification) {
@ -382,7 +191,6 @@ extension VideoCodec: Running {
public func startRunning() { public func startRunning() {
lockQueue.async { lockQueue.async {
self.isRunning.mutate { $0 = true } self.isRunning.mutate { $0 = true }
OSAtomicAnd32Barrier(0, &self.locked)
#if os(iOS) #if os(iOS)
NotificationCenter.default.addObserver( NotificationCenter.default.addObserver(
self, self,
@ -405,8 +213,6 @@ extension VideoCodec: Running {
self.session = nil self.session = nil
self.invalidateSession = true self.invalidateSession = true
self.needsSync.mutate { $0 = true } self.needsSync.mutate { $0 = true }
self.buffers.removeAll()
self.lastImageBuffer = nil
self.formatDescription = nil self.formatDescription = nil
#if os(iOS) #if os(iOS)
NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil) NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)

View File

@ -0,0 +1,175 @@
import Foundation
import VideoToolbox
/// The VideoCodecSettings class specifying video compression settings.
public struct VideoCodecSettings: Codable {
/// The defulat value.
public static let `default` = VideoCodecSettings()
/// A bitRate mode that affectes how to encode the video source.
public enum BitRateMode: String, Codable {
/// The average bit rate.
case average
/// The constant bit rate.
@available(iOS 16.0, tvOS 16.0, macOS 13.0, *)
case constant
var key: VTSessionOptionKey {
if #available(iOS 16.0, tvOS 16.0, macOS 13.0, *) {
switch self {
case .average:
return .averageBitRate
case .constant:
return .constantBitRate
}
}
return .averageBitRate
}
}
/**
* The scaling mode.
* - seealso: https://developer.apple.com/documentation/videotoolbox/kvtpixeltransferpropertykey_scalingmode
* - seealso: https://developer.apple.com/documentation/videotoolbox/vtpixeltransfersession/pixel_transfer_properties/scaling_mode_constants
*/
public enum ScalingMode: String, Codable {
/// kVTScalingMode_Normal
case normal = "Normal"
/// kVTScalingMode_Letterbox
case letterbox = "Letterbox"
/// kVTScalingMode_CropSourceToCleanAperture
case cropSourceToCleanAperture = "CropSourceToCleanAperture"
/// kVTScalingMode_Trim
case trim = "Trim"
}
/// The type of the VideoCodec supports format.
enum Format: Codable {
case h264
case hevc
#if os(macOS)
var encoderID: NSString {
switch self {
case .h264:
#if arch(arm64)
return NSString(string: "com.apple.videotoolbox.videoencoder.ave.avc")
#else
return NSString(string: "com.apple.videotoolbox.videoencoder.h264.gva")
#endif
case .hevc:
return NSString(string: "com.apple.videotoolbox.videoencoder.ave.hevc")
}
}
#endif
var codecType: UInt32 {
switch self {
case .h264:
return kCMVideoCodecType_H264
case .hevc:
return kCMVideoCodecType_HEVC
}
}
}
/// Specifies the video size of encoding video.
public var videoSize: VideoSize
/// Specifies the bitrate.
public var bitRate: UInt32
/// Specifies the keyframeInterval.
public var maxKeyFrameIntervalDuration: Int32
/// Specifies the scalingMode.
public var scalingMode: ScalingMode
// swiftlint:disable discouraged_optional_boolean
/// Specifies the allowFrameRecording.
public var allowFrameReordering: Bool?
/// Specifies the bitRateMode.
public var bitRateMode: BitRateMode
/// Specifies the H264 profileLevel.
public var profileLevel: String {
didSet {
if profileLevel.contains("HEVC") {
format = .hevc
} else {
format = .h264
}
}
}
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE) for macOS.
public var isHardwareEncoderEnabled = true
var format: Format = .h264
var expectedFrameRate: Float64 = IOMixer.defaultFrameRate
/// Creates a new VideoCodecSettings instance.
public init(
videoSize: VideoSize = .init(width: 854, height: 480),
profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: UInt32 = 640 * 1000,
maxKeyFrameIntervalDuration: Int32 = 2,
scalingMode: ScalingMode = .trim,
bitRateMode: BitRateMode = .average,
allowFrameReordering: Bool? = nil,
isHardwareEncoderEnabled: Bool = true
) {
self.videoSize = videoSize
self.profileLevel = profileLevel
self.bitRate = bitRate
self.maxKeyFrameIntervalDuration = maxKeyFrameIntervalDuration
self.scalingMode = scalingMode
self.bitRateMode = bitRateMode
self.allowFrameReordering = allowFrameReordering
self.isHardwareEncoderEnabled = isHardwareEncoderEnabled
if profileLevel.contains("HEVC") {
self.format = .hevc
}
}
func invalidateSession(_ rhs: VideoCodecSettings) -> Bool {
return !(videoSize == rhs.videoSize &&
maxKeyFrameIntervalDuration == rhs.maxKeyFrameIntervalDuration &&
scalingMode == rhs.scalingMode &&
allowFrameReordering == rhs.allowFrameReordering &&
bitRateMode == rhs.bitRateMode &&
profileLevel == rhs.profileLevel &&
isHardwareEncoderEnabled == rhs.isHardwareEncoderEnabled
)
}
func apply(_ codec: VideoCodec, rhs: VideoCodecSettings) {
if bitRate != rhs.bitRate {
let option = VTSessionOption(key: bitRateMode.key, value: NSNumber(value: bitRate))
if let status = codec.session?.setOption(option), status != noErr {
codec.delegate?.videoCodec(codec, errorOccurred: .failedToSetOption(status: status, option: option))
}
}
}
func options() -> Set<VTSessionOption> {
let isBaseline = profileLevel.contains("Baseline")
var options = Set<VTSessionOption>([
.init(key: .realTime, value: kCFBooleanTrue),
.init(key: .profileLevel, value: profileLevel as NSObject),
.init(key: bitRateMode.key, value: NSNumber(value: bitRate)),
// It seemes that VT supports the range 0 to 30.
.init(key: .expectedFrameRate, value: NSNumber(value: (expectedFrameRate <= 30) ? expectedFrameRate : 0)),
.init(key: .maxKeyFrameIntervalDuration, value: NSNumber(value: maxKeyFrameIntervalDuration)),
.init(key: .allowFrameReordering, value: (allowFrameReordering ?? !isBaseline) as NSObject),
.init(key: .pixelTransferProperties, value: [
"ScalingMode": scalingMode.rawValue
] as NSObject)
])
#if os(macOS)
if isHardwareEncoderEnabled {
options.insert(.init(key: .encoderID, value: format.encoderID))
options.insert(.init(key: .enableHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
options.insert(.init(key: .requireHardwareAcceleratedVideoEncoder, value: kCFBooleanTrue))
}
#endif
if !isBaseline {
options.insert(.init(key: .H264EntropyMode, value: kVTH264EntropyMode_CABAC))
}
return options
}
}

View File

@ -0,0 +1,20 @@
import Foundation
/// The VideoSize class represents video width and height.
public struct VideoSize: Equatable, Codable {
/// The video width.
public let width: Int32
/// The video height.
public let height: Int32
/// Creates a VideoSize object.
public init(width: Int32, height: Int32) {
self.width = width
self.height = height
}
/// Swap width for height.
public func swap() -> VideoSize {
return VideoSize(width: height, height: width)
}
}

View File

@ -1,34 +1,22 @@
import AVFoundation import AVFoundation
import Foundation import Foundation
#if os(iOS) #if os(iOS) || os(macOS)
extension AVCaptureDevice { extension AVCaptureDevice {
func videoFormat(width: Int32, height: Int32, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? { func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
if #available(iOS 13.0, *), isMultiCamSupported { if isMultiCamSupported {
return formats.first { return formats.first {
$0.isMultiCamSupported && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
} ?? formats.last { } ?? formats.last {
$0.isMultiCamSupported && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height $0.isMultiCamSupported && $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
} }
} else { } else {
return formats.first { return formats.first {
width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height $0.isFrameRateSupported(frameRate) && width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
} ?? formats.last { } ?? formats.last {
$0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height $0.isFrameRateSupported(frameRate) && $0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
} }
} }
} }
} }
#endif #endif
#if os(macOS)
extension AVCaptureDevice {
func videoFormat(width: Int32, height: Int32, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
return formats.first {
width <= $0.formatDescription.dimensions.width && height <= $0.formatDescription.dimensions.height
} ?? formats.last {
$0.formatDescription.dimensions.width < width && $0.formatDescription.dimensions.height < height
}
}
}
#endif

View File

@ -0,0 +1,26 @@
import AVFoundation
import Foundation
#if targetEnvironment(macCatalyst)
extension AVCaptureSession {
var isMultitaskingCameraAccessSupported: Bool {
get {
false
}
// swiftlint:disable unused_setter_value
set {
logger.warn("isMultitaskingCameraAccessSupported is unavailabled in Mac Catalyst.")
}
}
var isMultitaskingCameraAccessEnabled: Bool {
get {
false
}
// swiftlint:disable unused_setter_value
set {
logger.warn("isMultitaskingCameraAccessEnabled is unavailabled in Mac Catalyst.")
}
}
}
#endif

View File

@ -16,4 +16,9 @@ extension CMBlockBuffer {
} }
return Data(bytes: buffer!, count: length) return Data(bytes: buffer!, count: length)
} }
@discardableResult
func copyDataBytes(to buffer: UnsafeMutableRawPointer) -> OSStatus {
return CMBlockBufferCopyDataBytes(self, atOffset: 0, dataLength: dataLength, destination: buffer)
}
} }

View File

@ -2,7 +2,7 @@ import CoreMedia
import Foundation import Foundation
extension CMFormatDescription { extension CMFormatDescription {
func `extension`(by key: String) -> [String: AnyObject]? { var _mediaType: CMMediaType {
CMFormatDescriptionGetExtension(self, extensionKey: key as CFString) as? [String: AnyObject] CMFormatDescriptionGetMediaType(self)
} }
} }

View File

@ -1,4 +1,5 @@
import Accelerate import Accelerate
import AVFoundation
import CoreMedia import CoreMedia
extension CMSampleBuffer { extension CMSampleBuffer {

View File

@ -8,21 +8,4 @@ extension CMVideoFormatDescription {
var dimensions: CMVideoDimensions { var dimensions: CMVideoDimensions {
CMVideoFormatDescriptionGetDimensions(self) CMVideoFormatDescriptionGetDimensions(self)
} }
static func create(pixelBuffer: CVPixelBuffer) -> CMVideoFormatDescription? {
var formatDescription: CMFormatDescription?
let status: OSStatus = CMVideoFormatDescriptionCreate(
allocator: kCFAllocatorDefault,
codecType: kCMVideoCodecType_422YpCbCr8,
width: Int32(pixelBuffer.width),
height: Int32(pixelBuffer.height),
extensions: nil,
formatDescriptionOut: &formatDescription
)
guard status == noErr else {
logger.warn("\(status)")
return nil
}
return formatDescription
}
} }

View File

@ -42,7 +42,8 @@ extension CVPixelBuffer {
let yScale = Float(roi.height) / Float(inputImageBuffer.height) let yScale = Float(roi.height) / Float(inputImageBuffer.height)
let scaleFactor = (xScale < yScale) ? xScale : yScale let scaleFactor = (xScale < yScale) ? xScale : yScale
var scaledInputImageBuffer = inputImageBuffer.scale(scaleFactor) var scaledInputImageBuffer = inputImageBuffer.scale(scaleFactor)
scaledInputImageBuffer.cornerRadius(radius) var shape = ShapeFactory.shared.cornerRadius(CGSize(width: CGFloat(scaledInputImageBuffer.width), height: CGFloat(scaledInputImageBuffer.height)), cornerRadius: radius)
vImageSelectChannels_ARGB8888(&shape, &scaledInputImageBuffer, &scaledInputImageBuffer, 0x8, vImage_Flags(kvImageNoFlags))
defer { defer {
scaledInputImageBuffer.free() scaledInputImageBuffer.free()
} }
@ -52,7 +53,7 @@ extension CVPixelBuffer {
} }
@discardableResult @discardableResult
func split(_ pixelBuffer: CVPixelBuffer?, direction: vImage_Buffer.TransformDirection) -> Self { func split(_ pixelBuffer: CVPixelBuffer?, direction: ImageTransform) -> Self {
guard var inputImageBuffer = try? pixelBuffer?.makevImage_Buffer(format: &Self.format) else { guard var inputImageBuffer = try? pixelBuffer?.makevImage_Buffer(format: &Self.format) else {
return self return self
} }

View File

@ -1,3 +1,4 @@
import CoreMedia
import Foundation import Foundation
extension Data { extension Data {
@ -9,4 +10,37 @@ extension Data {
return [UInt8](UnsafeBufferPointer(start: pointer, count: count)) return [UInt8](UnsafeBufferPointer(start: pointer, count: count))
} }
} }
func makeBlockBuffer(advancedBy: Int = 0) -> CMBlockBuffer? {
var blockBuffer: CMBlockBuffer?
let length = count - advancedBy
return withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> CMBlockBuffer? in
guard let baseAddress = buffer.baseAddress else {
return nil
}
guard CMBlockBufferCreateWithMemoryBlock(
allocator: kCFAllocatorDefault,
memoryBlock: nil,
blockLength: length,
blockAllocator: nil,
customBlockSource: nil,
offsetToData: 0,
dataLength: length,
flags: 0,
blockBufferOut: &blockBuffer) == noErr else {
return nil
}
guard let blockBuffer else {
return nil
}
guard CMBlockBufferReplaceDataBytes(
with: baseAddress.advanced(by: advancedBy),
blockBuffer: blockBuffer,
offsetIntoDestination: 0,
dataLength: length) == noErr else {
return nil
}
return blockBuffer
}
}
} }

View File

@ -9,9 +9,11 @@ extension VTCompressionSession {
extension VTCompressionSession: VTSessionConvertible { extension VTCompressionSession: VTSessionConvertible {
// MARK: VTSessionConvertible // MARK: VTSessionConvertible
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) { @discardableResult
@inline(__always)
func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus {
var flags: VTEncodeInfoFlags = [] var flags: VTEncodeInfoFlags = []
VTCompressionSessionEncodeFrame( return VTCompressionSessionEncodeFrame(
self, self,
imageBuffer: imageBuffer, imageBuffer: imageBuffer,
presentationTimeStamp: presentationTimeStamp, presentationTimeStamp: presentationTimeStamp,
@ -22,7 +24,10 @@ extension VTCompressionSession: VTSessionConvertible {
) )
} }
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) { @discardableResult
@inline(__always)
func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus {
return noErr
} }
func invalidate() { func invalidate() {

View File

@ -7,12 +7,17 @@ extension VTDecompressionSession: VTSessionConvertible {
._EnableTemporalProcessing ._EnableTemporalProcessing
] ]
func inputBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) { @discardableResult
@inline(__always)
func encodeFrame(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime, outputHandler: @escaping VTCompressionOutputHandler) -> OSStatus {
return noErr
} }
func inputBuffer(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) { @discardableResult
@inline(__always)
func decodeFrame(_ sampleBuffer: CMSampleBuffer, outputHandler: @escaping VTDecompressionOutputHandler) -> OSStatus {
var flagsOut: VTDecodeInfoFlags = [] var flagsOut: VTDecodeInfoFlags = []
VTDecompressionSessionDecodeFrame( return VTDecompressionSessionDecodeFrame(
self, self,
sampleBuffer: sampleBuffer, sampleBuffer: sampleBuffer,
flags: Self.defaultDecodeFlags, flags: Self.defaultDecodeFlags,

View File

@ -1,54 +1,9 @@
import Accelerate import Accelerate
import CoreMedia import CoreMedia
import CoreVideo
import Foundation import Foundation
extension vImage_Buffer { extension vImage_Buffer {
enum TransformDirection {
case north
case south
case east
case west
var opposite: TransformDirection {
switch self {
case .north:
return .south
case .south:
return .north
case .east:
return .west
case .west:
return .east
}
}
func tx(_ width: Double) -> Double {
switch self {
case .north:
return 0.0
case .south:
return Double.leastNonzeroMagnitude
case .east:
return width / 2
case .west:
return -(width / 2)
}
}
func ty(_ height: Double) -> Double {
switch self {
case .north:
return height / 2
case .south:
return -(height / 2)
case .east:
return Double.leastNonzeroMagnitude
case .west:
return 0.0
}
}
}
init?(height: vImagePixelCount, width: vImagePixelCount, pixelBits: UInt32, flags: vImage_Flags) { init?(height: vImagePixelCount, width: vImagePixelCount, pixelBits: UInt32, flags: vImage_Flags) {
self.init() self.init()
guard vImageBuffer_Init( guard vImageBuffer_Init(
@ -65,6 +20,11 @@ extension vImage_Buffer {
mutating func copy(to cvPixelBuffer: CVPixelBuffer, format: inout vImage_CGImageFormat) -> vImage_Error { mutating func copy(to cvPixelBuffer: CVPixelBuffer, format: inout vImage_CGImageFormat) -> vImage_Error {
let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer).takeRetainedValue() let cvImageFormat = vImageCVImageFormat_CreateWithCVPixelBuffer(cvPixelBuffer).takeRetainedValue()
vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB()) vImageCVImageFormat_SetColorSpace(cvImageFormat, CGColorSpaceCreateDeviceRGB())
defer {
if let dictionary = CVBufferGetAttachments(cvPixelBuffer, .shouldNotPropagate) {
CVBufferSetAttachments(cvPixelBuffer, dictionary, .shouldPropagate)
}
}
return vImageBuffer_CopyToCVPixelBuffer( return vImageBuffer_CopyToCVPixelBuffer(
&self, &self,
&format, &format,
@ -95,34 +55,6 @@ extension vImage_Buffer {
return imageBuffer return imageBuffer
} }
@discardableResult
mutating func cornerRadius(_ radius: CGFloat) -> Self {
guard 0 < radius else {
return self
}
let buffer = data.assumingMemoryBound(to: Pixel_8.self)
for x in 0 ..< Int(width) {
for y in 0 ..< Int(height) {
let index = y * rowBytes + x * 4
var dx = CGFloat(min(x, Int(width) - x))
var dy = CGFloat(min(y, Int(height) - y))
if dx == 0 && dy == 0 {
buffer[index] = 0
continue
}
if radius < dx || radius < dy {
continue
}
dx = radius - dx
dy = radius - dy
if radius < round(sqrt(dx * dx + dy * dy)) {
buffer[index] = 0
}
}
}
return self
}
@discardableResult @discardableResult
mutating func over(_ src: inout vImage_Buffer, origin: CGPoint = .zero) -> Self { mutating func over(_ src: inout vImage_Buffer, origin: CGPoint = .zero) -> Self {
let start = Int(origin.y) * rowBytes + Int(origin.x) * 4 let start = Int(origin.y) * rowBytes + Int(origin.x) * 4
@ -144,13 +76,15 @@ extension vImage_Buffer {
} }
@discardableResult @discardableResult
mutating func split(_ buffer: inout vImage_Buffer, direction: TransformDirection) -> Self { mutating func split(_ buffer: inout vImage_Buffer, direction: ImageTransform) -> Self {
buffer.transform(direction.opposite) buffer.transform(direction.opposite)
var shape = ShapeFactory.shared.split(CGSize(width: CGFloat(width), height: CGFloat(height)), direction: direction.opposite)
vImageSelectChannels_ARGB8888(&shape, &buffer, &buffer, 0x8, vImage_Flags(kvImageNoFlags))
transform(direction) transform(direction)
guard vImageAlphaBlend_ARGB8888( guard vImageAlphaBlend_ARGB8888(
&self,
&buffer, &buffer,
&self, &self,
&self,
vImage_Flags(kvImageDoNotTile) vImage_Flags(kvImageDoNotTile)
) == kvImageNoError else { ) == kvImageNoError else {
return self return self
@ -158,7 +92,7 @@ extension vImage_Buffer {
return self return self
} }
private mutating func transform(_ direction: TransformDirection) { private mutating func transform(_ direction: ImageTransform) {
let backgroundColor: [Pixel_8] = [0, 255, 255, 255] let backgroundColor: [Pixel_8] = [0, 255, 255, 255]
var vImageTransform = vImage_CGAffineTransform( var vImageTransform = vImage_CGAffineTransform(
a: 1, a: 1,

View File

@ -0,0 +1,22 @@
import Accelerate
import Foundation
extension vImage_CGImageFormat {
@available(iOS, obsoleted: 13.0)
@available(tvOS, obsoleted: 13.0)
@available(macOS, obsoleted: 10.15)
init?(cgImage: CGImage) {
guard
let colorSpace = cgImage.colorSpace else {
return nil
}
self = vImage_CGImageFormat(
bitsPerComponent: UInt32(cgImage.bitsPerComponent),
bitsPerPixel: UInt32(cgImage.bitsPerPixel),
colorSpace: Unmanaged.passRetained(colorSpace),
bitmapInfo: cgImage.bitmapInfo,
version: 0,
decode: nil,
renderingIntent: cgImage.renderingIntent)
}
}

View File

@ -1,5 +1,5 @@
/// The type of flv supports aac packet types. /// The type of flv supports aac packet types.
public enum FLVAACPacketType: UInt8 { enum FLVAACPacketType: UInt8 {
/// The sequence data. /// The sequence data.
case seq = 0 case seq = 0
/// The raw data. /// The raw data.

View File

@ -1,5 +1,5 @@
/// The type of flv supports avc packet types. /// The type of flv supports avc packet types.
public enum FLVAVCPacketType: UInt8 { enum FLVAVCPacketType: UInt8 {
/// The sequence data. /// The sequence data.
case seq = 0 case seq = 0
/// The NAL unit data. /// The NAL unit data.

View File

@ -1,7 +1,7 @@
import AVFoundation import AVFoundation
/// The type of flv supports audio codecs. /// The type of flv supports audio codecs.
public enum FLVAudioCodec: UInt8 { enum FLVAudioCodec: UInt8 {
/// The PCM codec. /// The PCM codec.
case pcm = 0 case pcm = 0
/// The ADPCM codec. /// The ADPCM codec.

View File

@ -1,5 +1,5 @@
/// The type of flv supports video frame types. /// The type of flv supports video frame types.
public enum FLVFrameType: UInt8 { enum FLVFrameType: UInt8 {
/// The keyframe. /// The keyframe.
case key = 1 case key = 1
/// The inter frame. /// The inter frame.

View File

@ -1,62 +0,0 @@
import AVFoundation
/// The FLVReader is used to read the contents of a FLV file.
public final class FLVReader {
/// The header of a FLV.
public static let header = Data([0x46, 0x4C, 0x56, 1])
/// The headerSize of a FLV.
static let headerSize: Int = 11
/// The url of a FLV file.
public let url: URL
private var currentOffSet: UInt64 = 0
private var fileHandle: FileHandle?
/// Initializes and returns a newly allocated reader.
public init(url: URL) {
do {
self.url = url
fileHandle = try FileHandle(forReadingFrom: url)
fileHandle?.seek(toFileOffset: 13)
currentOffSet = 13
} catch {
logger.error(error)
}
}
/// Returns data by FLVTag.
public func getData(_ tag: FLVTag) -> Data? {
fileHandle?.seek(toFileOffset: tag.offset)
return fileHandle?.readData(ofLength: Int(UInt64(tag.dataSize)))
}
}
extension FLVReader: IteratorProtocol {
// MARK: IteratorProtocol
public func next() -> FLVTag? {
guard let fileHandle: FileHandle = fileHandle else {
return nil
}
var tag: FLVTag!
fileHandle.seek(toFileOffset: currentOffSet)
let data: Data = fileHandle.readData(ofLength: FLVReader.headerSize)
guard !data.isEmpty else {
return nil
}
switch data[0] {
case 8:
tag = FLVAudioTag(data: data)
case 9:
tag = FLVVideoTag(data: data)
case 18:
tag = FLVDataTag(data: data)
default:
return nil
}
tag.readData(fileHandle)
tag.offset = currentOffSet + UInt64(FLVReader.headerSize)
currentOffSet += UInt64(FLVReader.headerSize) + UInt64(tag.dataSize) + 4
return tag
}
}

View File

@ -1,5 +1,5 @@
/// The type of flv supports audio sound rates. /// The type of flv supports audio sound rates.
public enum FLVSoundRate: UInt8 { enum FLVSoundRate: UInt8 {
/// The sound rate of 5,500.0kHz. /// The sound rate of 5,500.0kHz.
case kHz5_5 = 0 case kHz5_5 = 0
/// Ths sound rate of 11,000.0kHz. /// Ths sound rate of 11,000.0kHz.
@ -10,7 +10,7 @@ public enum FLVSoundRate: UInt8 {
case kHz44 = 3 case kHz44 = 3
/// The float typed value. /// The float typed value.
public var floatValue: Float64 { var floatValue: Float64 {
switch self { switch self {
case .kHz5_5: case .kHz5_5:
return 5500 return 5500

View File

@ -1,5 +1,5 @@
/// The type of flv supports audio sound size. /// The type of flv supports audio sound size.
public enum FLVSoundSize: UInt8 { enum FLVSoundSize: UInt8 {
/// The 8bit sound. /// The 8bit sound.
case snd8bit = 0 case snd8bit = 0
/// The 16bit sound. /// The 16bit sound.

View File

@ -1,5 +1,5 @@
/// The type of flv supports audio sound channel type.. /// The type of flv supports audio sound channel type..
public enum FLVSoundType: UInt8 { enum FLVSoundType: UInt8 {
/// The mono sound. /// The mono sound.
case mono = 0 case mono = 0
/// The stereo sound. /// The stereo sound.

View File

@ -1,7 +1,7 @@
import Foundation import Foundation
/// The type of flv tag. /// The type of flv tag.
public enum FLVTagType: UInt8 { enum FLVTagType: UInt8 {
/// The Audio tag, /// The Audio tag,
case audio = 8 case audio = 8
/// The Video tag. /// The Video tag.
@ -29,127 +29,3 @@ public enum FLVTagType: UInt8 {
} }
} }
} }
// MARK: -
/// The interface of FLV tag.
public protocol FLVTag: CustomDebugStringConvertible {
/// The type of this tag.
var tagType: FLVTagType { get set }
/// The length of data int the field.
var dataSize: UInt32 { get set }
/// The timestamp in milliseconds.
var timestamp: UInt32 { get set }
/// The extension of the timestamp.
var timestampExtended: UInt8 { get set }
/// The streamId, always 0.
var streamId: UInt32 { get set }
/// The data offset of a flv file.
var offset: UInt64 { get set }
/// Initialize a new object.
init()
/// Read data of fileHandler.
mutating func readData(_ fileHandler: FileHandle)
}
extension FLVTag {
var headerSize: Int {
tagType.headerSize
}
init?(data: Data) {
self.init()
let buffer = ByteArray(data: data)
do {
tagType = FLVTagType(rawValue: try buffer.readUInt8()) ?? .data
dataSize = try buffer.readUInt24()
timestamp = try buffer.readUInt24()
timestampExtended = try buffer.readUInt8()
streamId = try buffer.readUInt24()
buffer.clear()
} catch {
return nil
}
}
// MARK: CustomDebugStringConvertible
public var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}
// MARK: -
/// A structure that defines the FLVTag of Data.
public struct FLVDataTag: FLVTag {
public var tagType: FLVTagType = .data
public var dataSize: UInt32 = 0
public var timestamp: UInt32 = 0
public var timestampExtended: UInt8 = 0
public var streamId: UInt32 = 0
public var offset: UInt64 = 0
public init() {
}
public mutating func readData(_ fileHandler: FileHandle) {
}
}
// MARK: -
/// A structure that defines the FLVTag of an audio.
public struct FLVAudioTag: FLVTag {
public var tagType: FLVTagType = .audio
public var dataSize: UInt32 = 0
public var timestamp: UInt32 = 0
public var timestampExtended: UInt8 = 0
public var streamId: UInt32 = 0
public var offset: UInt64 = 0
/// Specifies the codec of audio.
public var codec: FLVAudioCodec = .unknown
/// Specifies the sound of rate.
public var soundRate: FLVSoundRate = .kHz5_5
/// Specifies the sound of size.
public var soundSize: FLVSoundSize = .snd8bit
/// Specifies the sound of type.
public var soundType: FLVSoundType = .mono
public init() {
}
public mutating func readData(_ fileHandler: FileHandle) {
let data: Data = fileHandler.readData(ofLength: headerSize)
codec = FLVAudioCodec(rawValue: data[0] >> 4) ?? .unknown
soundRate = FLVSoundRate(rawValue: (data[0] & 0b00001100) >> 2) ?? .kHz5_5
soundSize = FLVSoundSize(rawValue: (data[0] & 0b00000010) >> 1) ?? .snd8bit
soundType = FLVSoundType(rawValue: data[0] & 0b00000001) ?? .mono
}
}
// MARK: -
/// A structure that defines the FLVTag of am video.
public struct FLVVideoTag: FLVTag {
public var tagType: FLVTagType = .video
public var dataSize: UInt32 = 0
public var timestamp: UInt32 = 0
public var timestampExtended: UInt8 = 0
public var streamId: UInt32 = 0
public var offset: UInt64 = 0
/// Specifies the frame type of video.
public var frameType: FLVFrameType = .command
/// Specifies the codec of video.
public var codec: FLVVideoCodec = .unknown
/// Specifies the avc packet type.
public var avcPacketType: FLVAVCPacketType = .eos
/// Specifies the composition time.
public var compositionTime: Int32 = 0
public init() {
}
public mutating func readData(_ fileHandler: FileHandle) {
let data: Data = fileHandler.readData(ofLength: headerSize)
frameType = FLVFrameType(rawValue: data[0] >> 4) ?? .command
codec = FLVVideoCodec(rawValue: data[0] & 0b00001111) ?? .unknown
avcPacketType = FLVAVCPacketType(rawValue: data[1]) ?? .eos
}
}

View File

@ -1,7 +1,7 @@
import Foundation import Foundation
/// The type of flv supports video codecs. /// The type of flv supports video codecs.
public enum FLVVideoCodec: UInt8 { enum FLVVideoCodec: UInt8 {
/// The JPEG codec. /// The JPEG codec.
case jpeg = 1 case jpeg = 1
/// The Sorenson H263 codec. /// The Sorenson H263 codec.

View File

@ -0,0 +1,18 @@
import Foundation
enum FLVVideoFourCC: UInt32 {
case av1 = 0x61763031 // { 'a', 'v', '0', '1' }
case vp9 = 0x76703039 // { 'v', 'p', '0', '9' }
case hevc = 0x68766331 // { 'h', 'v', 'c', '1' }
var isSupported: Bool {
switch self {
case .av1:
return false
case .vp9:
return false
case .hevc:
return true
}
}
}

View File

@ -0,0 +1,10 @@
import Foundation
enum FLVVideoPacketType: UInt8 {
case sequenceStart = 0
case codedFrames = 1
case sequenceEnd = 2
case codedFramesX = 3
case metadata = 4
case mpeg2TSSequenceStart = 5
}

View File

@ -1,36 +0,0 @@
import Foundation
protocol BaseDescriptor: Equatable, DataConvertible, CustomDebugStringConvertible {
var tag: UInt8 { get }
var size: UInt32 { get }
}
extension BaseDescriptor {
func writeSize(_ byteArray: ByteArray) {
let bytes = UInt32(byteArray.position - 5).bigEndian.data.bytes
byteArray.position = 1
for i in 0..<bytes.count - 1 {
byteArray.writeUInt8(bytes[i] | 0x80)
}
if let last = bytes.last {
byteArray.writeUInt8(last)
}
}
func readSize(_ byteArray: ByteArray) throws -> UInt32 {
var size: UInt32 = 0
var length: UInt8 = 0
repeat {
length = try byteArray.readUInt8()
size += size << 7 | UInt32(length & 0x7F)
} while ((length & 0x80) != 0)
return size
}
}
extension BaseDescriptor {
// MARK: CustomDebugStringConvertible
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}

View File

@ -1,57 +0,0 @@
import Foundation
struct DecoderConfigDescriptor: BaseDescriptor {
static let tag: UInt8 = 0x04
// MARK: BaseDescriptor
let tag: UInt8 = Self.tag
var size: UInt32 = 0
// MARK: DecoderConfigDescriptor
var objectTypeIndication: UInt8 = 0
var streamType: UInt8 = 0
var upStream = false
var bufferSizeDB: UInt32 = 0
var maxBitrate: UInt32 = 0
var avgBitrate: UInt32 = 0
var decSpecificInfo = DecoderSpecificInfo()
var profileLevelIndicationIndexDescriptor = ProfileLevelIndicationIndexDescriptor()
}
extension DecoderConfigDescriptor: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt8(tag)
.writeUInt32(0)
.writeUInt8(objectTypeIndication)
.writeUInt8(streamType << 2 | (upStream ? 1 : 0) << 1 | 1)
.writeUInt24(bufferSizeDB)
.writeUInt32(maxBitrate)
.writeUInt32(avgBitrate)
.writeBytes(decSpecificInfo.data)
.writeBytes(profileLevelIndicationIndexDescriptor.data)
writeSize(buffer)
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
_ = try buffer.readUInt8()
size = try readSize(buffer)
objectTypeIndication = try buffer.readUInt8()
let first = try buffer.readUInt8()
streamType = (first >> 2)
upStream = (first & 2) != 0
bufferSizeDB = try buffer.readUInt24()
maxBitrate = try buffer.readUInt32()
avgBitrate = try buffer.readUInt32()
let position = buffer.position
decSpecificInfo.data = try buffer.readBytes(buffer.bytesAvailable)
buffer.position = position + Int(decSpecificInfo.size) + 5
profileLevelIndicationIndexDescriptor.data = try buffer.readBytes(buffer.bytesAvailable)
} catch {
logger.error(error)
}
}
}
}

View File

@ -1,33 +0,0 @@
import Foundation
struct DecoderSpecificInfo: BaseDescriptor {
static let tag: UInt8 = 0x05
// MARK: BaseDescriptor
let tag: UInt8 = Self.tag
var size: UInt32 = 0
// MARK: DecoderConfigDescriptor
private var _data = Data()
}
extension DecoderSpecificInfo: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt8(tag)
.writeUInt32(0)
.writeBytes(_data)
writeSize(buffer)
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
_ = try buffer.readUInt8()
size = try readSize(buffer)
_data = try buffer.readBytes(Int(size))
} catch {
logger.error(error)
}
}
}
}

View File

@ -1,77 +0,0 @@
import Foundation
struct ESDescriptor: BaseDescriptor {
static let tag: UInt8 = 0x03
// MARK: BaseDescriptor
let tag: UInt8 = Self.tag
var size: UInt32 = 0
// MARK: ESDescriptor
var ES_ID: UInt16 = 0
var streamDependenceFlag = false
var URLFlag = false
var OCRstreamFlag = false
var streamPriority: UInt8 = 0
var dependsOn_ES_ID: UInt16 = 0
var URLLength: UInt8 = 0
var URLstring: String = ""
var OCR_ES_Id: UInt16 = 0
var decConfigDescr = DecoderConfigDescriptor()
var slConfigDescr = SLConfigDescriptor()
}
extension ESDescriptor: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt8(tag)
.writeUInt32(0)
.writeUInt16(ES_ID)
.writeUInt8((streamDependenceFlag ? 1 : 0) << 7 | (URLFlag ? 1 : 0) << 6 | streamPriority)
if streamDependenceFlag {
buffer.writeUInt16(dependsOn_ES_ID)
}
if URLFlag {
buffer
.writeUInt8(URLLength)
.writeUTF8Bytes(URLstring)
}
if OCRstreamFlag {
buffer.writeUInt16(OCR_ES_Id)
}
buffer.writeBytes(decConfigDescr.data)
buffer.writeBytes(slConfigDescr.data)
writeSize(buffer)
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
_ = try buffer.readUInt8()
size = try readSize(buffer)
ES_ID = try buffer.readUInt16()
let first = try buffer.readUInt8()
streamDependenceFlag = (first & 0x80) != 0
URLFlag = (first & 0x40) != 0
streamPriority = (first & 0x1F)
if streamDependenceFlag {
dependsOn_ES_ID = try buffer.readUInt16()
}
if URLFlag {
URLLength = try buffer.readUInt8()
URLstring = try buffer.readUTF8Bytes(Int(URLLength))
}
if OCRstreamFlag {
OCR_ES_Id = try buffer.readUInt16()
}
var position = buffer.position
decConfigDescr.data = try buffer.readBytes(buffer.bytesAvailable)
position += 5 + Int(decConfigDescr.size)
buffer.position = position
slConfigDescr.data = try buffer.readBytes(buffer.bytesAvailable)
} catch {
logger.error(error)
}
}
}
}

View File

@ -1,49 +0,0 @@
import Foundation
struct ElementaryStreamSpecificData {
static let fixedHeaderSize: Int = 5
var streamType: UInt8 = 0
var elementaryPID: UInt16 = 0
var ESInfoLength: UInt16 = 0
var ESDescriptors = Data()
init() {
}
init?(_ data: Data) {
self.data = data
}
}
extension ElementaryStreamSpecificData: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
ByteArray()
.writeUInt8(streamType)
.writeUInt16(elementaryPID | 0xe000)
.writeUInt16(ESInfoLength | 0xf000)
.writeBytes(ESDescriptors)
.data
}
set {
let buffer = ByteArray(data: newValue)
do {
streamType = try buffer.readUInt8()
elementaryPID = try buffer.readUInt16() & 0x0fff
ESInfoLength = try buffer.readUInt16() & 0x01ff
ESDescriptors = try buffer.readBytes(Int(ESInfoLength))
} catch {
logger.error("\(buffer)")
}
}
}
}
extension ElementaryStreamSpecificData: CustomDebugStringConvertible {
// MARK: CustomDebugStringConvertible
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}

View File

@ -1,16 +0,0 @@
import Foundation
enum ElementaryStreamType: UInt8 {
case mpeg1Video = 0x01
case mpeg2Video = 0x02
case mpeg1Audio = 0x03
case mpeg2Audio = 0x04
case mpeg2TabledData = 0x05
case mpeg2PacketizedData = 0x06
case adtsaac = 0x0F
case h263 = 0x10
case h264 = 0x1B
case h265 = 0x24
}

View File

@ -1,48 +0,0 @@
import AVFoundation
import VideoToolbox
enum NALType: UInt8 {
case unspec = 0
case slice = 1 // P frame
case dpa = 2
case dpb = 3
case dpc = 4
case idr = 5 // I frame
case sei = 6
case sps = 7
case pps = 8
case aud = 9
case eoseq = 10
case eostream = 11
case fill = 12
}
// MARK: -
struct NALUnit {
var refIdc: UInt8 = 0
var type: NALType = .unspec
var payload = Data()
}
extension NALUnit: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
ByteArray()
.writeUInt8(refIdc << 5 | type.rawValue)
.writeBytes(payload)
.data
}
set {
let buffer = ByteArray(data: newValue)
do {
let byte: UInt8 = try buffer.readUInt8()
refIdc = byte & 0x60 >> 5
type = NALType(rawValue: byte & 0x31) ?? .unspec
payload = try buffer.readBytes(buffer.bytesAvailable)
} catch {
logger.error("\(buffer)")
}
}
}
}

View File

@ -1,34 +0,0 @@
import Foundation
struct ProfileLevelIndicationIndexDescriptor: BaseDescriptor {
static let tag: UInt8 = 0x14
// MARK: BaseDescriptor
let tag: UInt8 = Self.tag
var size: UInt32 = 0
// MARK: ProfileLevelIndicationIndexDescriptor
var profileLevelIndicationIndex: UInt8 = 0
}
extension ProfileLevelIndicationIndexDescriptor: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt8(tag)
.writeUInt32(0)
.writeUInt8(profileLevelIndicationIndex)
writeSize(buffer)
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
_ = try buffer.readUInt8()
size = try readSize(buffer)
profileLevelIndicationIndex = try buffer.readUInt8()
} catch {
logger.error(error)
}
}
}
}

View File

@ -1,33 +0,0 @@
import Foundation
struct SLConfigDescriptor: BaseDescriptor {
// MARK: BaseDescriptor
let tag: UInt8 = 0x06
var size: UInt32 = 0
// MARK: SLConfigDescriptor
var predefined: UInt8 = 0
}
extension SLConfigDescriptor: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt8(tag)
.writeUInt32(0)
.writeUInt8(predefined)
writeSize(buffer)
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
_ = try buffer.readUInt8()
size = try readSize(buffer)
predefined = try buffer.readUInt8()
} catch {
logger.error(error)
}
}
}
}

View File

@ -1,41 +0,0 @@
import Foundation
/// ISO/IEC 14496-15 5.3.4.1.2
struct MP4AVCConfigurationBox: MP4BoxConvertible {
// MARK: MP4BoxConvertible
var size: UInt32 = 0
let type: String = "avcC"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
// MARK: MP4AVCConfigurationBox
var config = AVCConfigurationRecord()
}
extension MP4AVCConfigurationBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeBytes(config.data)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
config = AVCConfigurationRecord(data: try buffer.readBytes(buffer.bytesAvailable))
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let avcC = MP4Box.Name<MP4PixelAspectRatioBox>(rawValue: "avcC")
}

View File

@ -1,72 +0,0 @@
import Foundation
struct MP4AudioSampleEntry: MP4SampleEntry {
static let channelCount: UInt16 = 2
static let sampleSize: UInt16 = 16
// MARK: MP4SampleEntry
var size: UInt32 = 0
var type: String = ""
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var dataReferenceIndex: UInt16 = 0
// MARK: MP4AudioSampleEntry
var channelCount: UInt16 = Self.channelCount
var sampleSize: UInt16 = Self.sampleSize
var sampleRate: UInt32 = 0
}
extension MP4AudioSampleEntry: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeBytes(.init(repeating: 0, count: 6)) // const unsigned int(8)[6] reserved = 0
.writeUInt16(dataReferenceIndex)
.writeUInt32(0)
.writeUInt32(0) // const unsigned int(32)[2] reserved = 0
.writeUInt16(channelCount)
.writeUInt16(sampleSize)
.writeUInt16(0) // unsigned int(16) pre_defined = 0
.writeUInt16(0) // const unsigned int(16) reserved = 0
.writeUInt32(sampleRate << 16)
for child in children {
buffer.writeBytes(child.data)
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
type = try buffer.readUTF8Bytes(4)
buffer.position += 6
dataReferenceIndex = try buffer.readUInt16()
buffer.position += 8
channelCount = try buffer.readUInt16()
sampleSize = try buffer.readUInt16()
buffer.position += 4
sampleRate = try buffer.readUInt32() >> 16
children.removeAll()
while 0 < buffer.bytesAvailable {
let size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
buffer.position -= 8
var entry = MP4Box()
entry.data = try buffer.readBytes(Int(size))
children.append(entry)
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let mp4a = MP4Box.Name<MP4AudioSampleEntry>(rawValue: "mp4a")
static let mlpa = MP4Box.Name<MP4AudioSampleEntry>(rawValue: "mlpa")
}

View File

@ -1,84 +0,0 @@
import Foundation
struct MP4Box: MP4BoxConvertible {
static let containers: Set<String> = [
"cmov",
"ctts",
"edts",
"iods",
"junk",
"mdia",
"minf",
"moov",
"pict",
"pnot",
"rmda",
"rmra",
"skip",
"stbl",
"trak",
"uuid",
"wide",
"moof",
"traf"
]
class Names {
}
final class Name<T: MP4BoxConvertible>: Names, Hashable, RawRepresentable {
let rawValue: String
// swiftlint:disable nesting
typealias RawValue = String
init(rawValue: String) {
self.rawValue = rawValue
}
}
var size: UInt32 = 0
var type: String = ""
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
private var _data = Data()
}
extension MP4Box: DataConvertible {
var data: Data {
get {
_data
}
set {
do {
_data = newValue
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
type = try buffer.readUTF8Bytes(4)
if Self.containers.contains(type) {
children.removeAll()
while 0 < buffer.bytesAvailable {
let size = try buffer.readInt32()
_ = try buffer.readBytes(4)
buffer.position -= 8
var child = MP4Box()
child.data = try buffer.readBytes(Int(size))
children.append(child)
}
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let trak = MP4Box.Name<MP4Box>(rawValue: "trak")
}
extension MP4Box: CustomDebugStringConvertible {
// MARK: CustomDebugStringConvertible
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}

View File

@ -1,43 +0,0 @@
import Foundation
protocol MP4BoxConvertible: DataConvertible, CustomXmlStringConvertible {
var size: UInt32 { get }
var type: String { get }
var offset: UInt64 { get set }
var children: [MP4BoxConvertible] { get }
init()
func getBoxes<T>(by name: MP4Box.Name<T>) -> [T]
}
extension MP4BoxConvertible {
var xmlString: String {
guard !children.isEmpty else {
return "<\(type) size=\"\(size)\" offset=\"\(offset)\" />"
}
var tags: [String] = []
for child in children {
tags.append(child.xmlString)
}
return "<\(type) size=\"\(size)\" offset=\"\(offset)\">\(tags.joined())</\(type)>"
}
func getBoxes<T>(by name: MP4Box.Name<T>) -> [T] {
var list: [T] = []
for child in children {
if name.rawValue == child.type {
if let box = child as? T {
list.append(box)
} else {
var box = T()
box.data = child.data
list.append(box)
}
}
if !child.children.isEmpty {
list += child.getBoxes(by: name)
}
}
return list
}
}

View File

@ -1,56 +0,0 @@
import Foundation
struct MP4ChunkOffsetBox: MP4FullBox {
static let version: UInt8 = 0
static let flags: UInt32 = 0
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "stco"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = Self.version
var flags: UInt32 = Self.flags
// MARK: MP4ChunkOffsetBox
var entries: [UInt32] = []
}
extension MP4ChunkOffsetBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(UInt32(entries.count))
for entry in entries {
buffer
.writeUInt32(entry)
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
let numberOfEntries = try buffer.readUInt32()
entries.removeAll()
for _ in 0..<numberOfEntries {
entries.append(try buffer.readUInt32())
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let stco = MP4Box.Name<MP4ChunkOffsetBox>(rawValue: "stco")
}

View File

@ -1,50 +0,0 @@
import Foundation
/// ISO/IEC 14496-12 5th 8.7.2.2
struct MP4DataEntryUrlBox: MP4FullBox {
static let version: UInt8 = 0
static let flags: UInt32 = 0
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "url "
var offset: UInt64 = 0
var version: UInt8 = Self.version
var flags: UInt32 = Self.flags
var children: [MP4BoxConvertible] = []
// MARK: MP4DataEntryUrlBox
var location: String = ""
}
extension MP4DataEntryUrlBox: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUTF8Bytes(location)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
location = try buffer.readUTF8Bytes(buffer.bytesAvailable)
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let url = MP4Box.Name<MP4DataEntryUrlBox>(rawValue: "url ")
}

View File

@ -1,92 +0,0 @@
import Foundation
struct MP4EditListBox: MP4FullBox {
static let flags: UInt32 = 0
struct Entry: Equatable, CustomDebugStringConvertible {
let segmentDuration: UInt64
let mediaTime: UInt64
let mediaRateInteger: Int16
let mediaRateFraction: Int16
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "elst"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = Self.flags
// MARK: MP4EditListBox
var entries: [Entry] = []
}
extension MP4EditListBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(UInt32(entries.count))
for entry in entries {
if version == 0 {
buffer
.writeUInt32(UInt32(entry.segmentDuration))
.writeUInt32(UInt32(entry.mediaTime))
.writeInt16(entry.mediaRateInteger)
.writeInt16(entry.mediaRateFraction)
} else {
buffer
.writeUInt64(entry.segmentDuration)
.writeUInt64(entry.mediaTime)
.writeInt16(entry.mediaRateInteger)
.writeInt16(entry.mediaRateFraction)
}
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
let numberOfEntries = try buffer.readUInt32()
entries.removeAll()
for _ in 0..<numberOfEntries {
if version == 0 {
entries.append(Entry(
segmentDuration: UInt64(try buffer.readUInt32()),
mediaTime: UInt64(try buffer.readUInt32()),
mediaRateInteger: try buffer.readInt16(),
mediaRateFraction: try buffer.readInt16()
))
} else {
entries.append(Entry(
segmentDuration: try buffer.readUInt64(),
mediaTime: try buffer.readUInt64(),
mediaRateInteger: try buffer.readInt16(),
mediaRateFraction: try buffer.readInt16()
))
}
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let elst = MP4Box.Name<MP4EditListBox>(rawValue: "elst")
}

View File

@ -1,49 +0,0 @@
import Foundation
/**
- seealso: https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html#//apple_ref/doc/uid/TP40000939-CH205-124774
*/
struct MP4ElementaryStreamDescriptorBox: MP4FullBox {
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "esds"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = 0
// MARK: MP4FullBox
var descriptor = ESDescriptor()
}
extension MP4ElementaryStreamDescriptorBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeBytes(descriptor.data)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
descriptor.data = try buffer.readBytes(buffer.bytesAvailable)
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let esds = MP4Box.Name<MP4ElementaryStreamDescriptorBox>(rawValue: "esds")
}

View File

@ -1,29 +0,0 @@
import Foundation
enum MP4File {
struct Builder {
private var ftyp: MP4FileTypeBox?
private var moov: MP4Box?
mutating func setFileTypeBox(_ ftyp: MP4FileTypeBox?) -> Self {
self.ftyp = ftyp
return self
}
mutating func setMoovieBox(_ moov: MP4Box?) -> Self {
self.moov = moov
return self
}
func build() -> MP4Box {
var box = MP4Box()
if let ftyp = ftyp {
box.children.append(ftyp)
}
if let moov = moov {
box.children.append(moov)
}
return box
}
}
}

View File

@ -1,53 +0,0 @@
import Foundation
final class MP4FileReader: MP4ReaderConvertible {
var fileType: MP4FileTypeBox {
root.getBoxes(by: .ftyp).first ?? MP4FileTypeBox()
}
var tracks: [MP4TrackReader] = []
private var root = MP4Box()
private let fileHandle: FileHandle
init(forReadingFrom url: URL) throws {
fileHandle = try FileHandle(forReadingFrom: url)
}
func execute() -> Self {
do {
var currentOffset = root.offset
let length = fileHandle.seekToEndOfFile()
root.children.removeAll()
repeat {
fileHandle.seek(toFileOffset: currentOffset)
let buffer = ByteArray(data: fileHandle.readData(ofLength: 8))
let size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
fileHandle.seek(toFileOffset: currentOffset)
var child = MP4Box()
child.data = fileHandle.readData(ofLength: Int(size))
root.children.append(child)
currentOffset += UInt64(size)
} while currentOffset < length
} catch {
logger.error(error)
}
return self
}
func getBoxes<T: MP4BoxConvertible>(by name: MP4Box.Name<T>) -> [T] {
return root.getBoxes(by: name)
}
}
extension MP4FileReader: CustomDebugStringConvertible {
var debugDescription: String {
return root.debugDescription
}
}
extension MP4FileReader: CustomXmlStringConvertible {
var xmlString: String {
return root.xmlString
}
}

View File

@ -1,49 +0,0 @@
import Foundation
struct MP4FileTypeBox: MP4BoxConvertible {
// MARK: MP4BoxConvertible
var size: UInt32 = 0
var type: String = ""
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
// MARK: MP4MediaHeaderBox
var majorBrand: UInt32 = 0
var minorVersion: UInt32 = 0
var compatibleBrands: [UInt32] = []
}
extension MP4FileTypeBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt32(majorBrand)
.writeUInt32(minorVersion)
for brand in compatibleBrands {
buffer.writeUInt32(brand)
}
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
type = try buffer.readUTF8Bytes(4)
majorBrand = try buffer.readUInt32()
minorVersion = try buffer.readUInt32()
compatibleBrands.removeAll()
while 0 < buffer.bytesAvailable {
compatibleBrands.append(try buffer.readUInt32())
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let styp = MP4Box.Name<MP4FileTypeBox>(rawValue: "styp")
static let ftyp = MP4Box.Name<MP4FileTypeBox>(rawValue: "ftyp")
}

View File

@ -1,31 +0,0 @@
import AVFoundation
import Foundation
protocol MP4FragmentedWriterDelegate: AnyObject {
func writer(_ writer: MP4FragmentedWriter, didSegmentChanged segment: MP4Box)
}
final class MP4FragmentedWriter: MP4WriterConvertible {
private var segment = MP4Box()
private(set) var mapping = MP4Box()
private var audio = MP4FragmentedTrafWriter()
private var video = MP4FragmentedTrafWriter()
weak var delegate: MP4FragmentedWriterDelegate?
}
extension MP4FragmentedWriter: AudioCodecDelegate {
// MARK: AudioCodecDelegate
func audioCodec(_ codec: AudioCodec, didSet formatDescription: CMFormatDescription?) {
}
func audioCodec(_ codec: AudioCodec, didOutput sample: UnsafeMutableAudioBufferListPointer, presentationTimeStamp: CMTime) {
}
}
final class MP4FragmentedTrafWriter {
private var tkhd = MP4TrackFragmentHeaderBox()
private var trun = MP4TrackRunBox()
private var tfdt = MP4TrackRunBox()
}

View File

@ -1,6 +0,0 @@
import Foundation
protocol MP4FullBox: MP4BoxConvertible {
var version: UInt8 { get }
var flags: UInt32 { get }
}

View File

@ -1,62 +0,0 @@
import Foundation
/// ISO/IEC 14496-12 5th 8.4.3.2
struct MP4HandlerBox: MP4FullBox {
static let version: UInt8 = 0
static let flags: UInt32 = 0
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "hdlr"
var offset: UInt64 = 0
var version: UInt8 = Self.version
var flags: UInt32 = Self.flags
var children: [MP4BoxConvertible] = []
// MARK: MP4HandlerBox
var handlerType: UInt32 = 0
var name: String = ""
}
extension MP4HandlerBox: DataConvertible {
// MARK: DataConvertible
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(0) // pre_defined
.writeUInt32(handlerType)
.writeUInt32(0) // reserved
.writeUInt32(0) // reserved
.writeUInt32(0) // reserved
.writeUTF8Bytes(name)
.writeUTF8Bytes("\0")
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
buffer.position += 4 // pre_defined
handlerType = try buffer.readUInt32()
buffer.position += 4 // reserved
buffer.position += 4 // reserved
buffer.position += 4 // reserved
name = try buffer.readUTF8Bytes(buffer.bytesAvailable - 1)
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let hdlr = MP4Box.Name<MP4HandlerBox>(rawValue: "hdlr")
}

View File

@ -1,82 +0,0 @@
import Foundation
struct MP4MediaHeaderBox: MP4FullBox {
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "mdhd"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = 0
// MARK: MP4MediaHeaderBox
var creationTime: UInt64 = 0
var modificationTime: UInt64 = 0
var timeScale: UInt32 = 0
var duration: UInt64 = 0
var language: [UInt8] = [0, 0, 0]
}
extension MP4MediaHeaderBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
if version == 0 {
buffer
.writeUInt32(UInt32(creationTime))
.writeUInt32(UInt32(modificationTime))
.writeUInt32(timeScale)
.writeUInt32(UInt32(duration))
} else {
buffer
.writeUInt64(creationTime)
.writeUInt64(modificationTime)
.writeUInt32(timeScale)
.writeUInt64(duration)
}
buffer
.writeUInt16(
UInt16(language[0]) << 10 |
UInt16(language[1]) << 5 |
UInt16(language[2])
)
.writeUInt16(0) // pre_defined = 0
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
if version == 0 {
creationTime = UInt64(try buffer.readUInt32())
modificationTime = UInt64(try buffer.readUInt32())
timeScale = try buffer.readUInt32()
duration = UInt64(try buffer.readUInt32())
} else {
creationTime = try buffer.readUInt64()
modificationTime = try buffer.readUInt64()
timeScale = try buffer.readUInt32()
duration = try buffer.readUInt64()
}
let lang = try buffer.readUInt16()
language = [
UInt8((lang & 0x7C00) >> 10),
UInt8((lang & 0x3E0) >> 5),
UInt8(lang & 0x1F)
]
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let mdhd = MP4Box.Name<MP4MediaHeaderBox>(rawValue: "mdhd")
}

View File

@ -1,32 +0,0 @@
import Foundation
enum MP4MovieFragmentBox {
struct Builder {
private var mfhd: MP4MovieFragmentHeaderBox?
private var traf: [MP4Box] = []
mutating func setMovieFragmentHeaderBox(_ mfhd: MP4MovieFragmentHeaderBox?) -> Self {
self.mfhd = mfhd
return self
}
mutating func addTrackFragmentBox(_ traf: MP4Box?) -> Self {
guard let traf = traf else {
return self
}
self.traf.append(traf)
return self
}
func build() -> MP4Box {
var box = MP4Box()
if let mfhd = mfhd {
box.children.append(mfhd)
}
for t in traf {
box.children.append(t)
}
return box
}
}
}

View File

@ -1,47 +0,0 @@
import Foundation
struct MP4MovieFragmentHeaderBox: MP4FullBox {
static let version: UInt8 = 0
static let flags: UInt32 = 0
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "mfhd"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
let version: UInt8 = Self.version
let flags: UInt32 = Self.flags
// MARK: MP4MovieFragmentHeaderBox
var sequenceNumber: UInt32 = 0
}
extension MP4MovieFragmentHeaderBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(sequenceNumber)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
buffer.position += 4
sequenceNumber = try buffer.readUInt32()
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let mfhd = MP4Box.Name<MP4MovieFragmentHeaderBox>(rawValue: "mfhd")
}

View File

@ -1,104 +0,0 @@
import Foundation
struct MP4MovieHeaderBox: MP4FullBox {
static let rate: Int32 = 0x00010000
static let volume: Int16 = 0x0100
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "mvhd"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = 0
// MARK: MP4MediaHeaderBox
var creationTime: UInt64 = 0
var modificationTime: UInt64 = 0
var timeScale: UInt32 = 0
var duration: UInt64 = 0
var rate: Int32 = Self.rate
var volume: Int16 = Self.volume
var matrix: [Int32] = []
var nextTrackID: UInt32 = 0
}
extension MP4MovieHeaderBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
if version == 0 {
buffer
.writeUInt32(UInt32(creationTime))
.writeUInt32(UInt32(modificationTime))
.writeUInt32(timeScale)
.writeUInt32(UInt32(duration))
} else {
buffer
.writeUInt64(creationTime)
.writeUInt64(modificationTime)
.writeUInt32(timeScale)
.writeUInt64(duration)
}
buffer
.writeInt32(rate)
.writeInt16(volume)
.writeInt16(0)
.writeUInt32(0)
.writeUInt32(0)
for m in matrix {
buffer.writeInt32(m)
}
buffer
.writeInt32(0)
.writeInt32(0)
.writeInt32(0)
.writeInt32(0)
.writeInt32(0)
.writeInt32(0)
.writeUInt32(nextTrackID)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
if version == 0 {
creationTime = UInt64(try buffer.readUInt32())
modificationTime = UInt64(try buffer.readUInt32())
timeScale = try buffer.readUInt32()
duration = UInt64(try buffer.readUInt32())
} else {
creationTime = try buffer.readUInt64()
modificationTime = try buffer.readUInt64()
timeScale = try buffer.readUInt32()
duration = try buffer.readUInt64()
}
rate = try buffer.readInt32()
volume = try buffer.readInt16()
buffer.position += 2 // const bit(16) reserved
buffer.position += 8 // const unsigned int(32)[2] reserved
matrix.removeAll()
for _ in 0..<9 {
matrix.append(try buffer.readInt32())
}
buffer.position += 24 // bit(32)[6] pre_defined = 0
nextTrackID = try buffer.readUInt32()
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let mvhd = MP4Box.Name<MP4MovieHeaderBox>(rawValue: "mvhd")
}

View File

@ -1,44 +0,0 @@
import Foundation
/// ISO/IEC 14496-12 5th 12.1.4.2
struct MP4PixelAspectRatioBox: MP4BoxConvertible {
// MARK: MP4BoxConvertible
var size: UInt32 = 0
let type: String = "pasp"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
// MARK: MP4PixelAspectRatioBox
var hSpacing: UInt32 = 0
var vSpacing: UInt32 = 0
}
extension MP4PixelAspectRatioBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt32(hSpacing)
.writeUInt32(vSpacing)
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
hSpacing = try buffer.readUInt32()
vSpacing = try buffer.readUInt32()
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let pasp = MP4Box.Name<MP4PixelAspectRatioBox>(rawValue: "pasp")
}

View File

@ -1,33 +0,0 @@
import AVFoundation
final class MP4Reader: MP4ReaderConvertible {
let fileType: MP4FileTypeBox
let tracks: [MP4TrackReader]
init(fileType: MP4FileTypeBox, tracks: [MP4TrackReader]) {
self.fileType = fileType
self.tracks = tracks
}
}
final class MP4TrackReader {
struct MP4SampleIterator: IteratorProtocol {
// swiftlint:disable nesting
typealias Element = UInt8
private var cursor: Int = 0
private let reader: MP4TrackReader
init(reader: MP4TrackReader) {
self.reader = reader
}
mutating func next() -> Element? {
return nil
}
}
func makeIterator() -> MP4SampleIterator {
return MP4SampleIterator(reader: self)
}
}

View File

@ -1,14 +0,0 @@
import Foundation
protocol MP4ReaderConvertible: AnyObject {
var fileType: MP4FileTypeBox { get }
var tracks: [MP4TrackReader] { get }
func execute() -> Self
}
extension MP4ReaderConvertible {
func execute() -> Self {
return self
}
}

View File

@ -1,74 +0,0 @@
import Foundation
struct MP4SampleDescriptionBox: MP4FullBox {
static let audio: Set<String> = ["mp4a"]
static let video: Set<String> = ["mp4v", "s263", "avc1"]
static func makeEntry(by type: String) -> MP4SampleEntry? {
switch true {
case video.contains(type):
return MP4VisualSampleEntry()
case audio.contains(type):
return MP4AudioSampleEntry()
default:
return nil
}
}
static let flags: UInt32 = 0
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "stsd"
var offset: UInt64 = 0
var version: UInt8 = 0
var flags: UInt32 = Self.flags
// MARK: MP4SampleDescriptionBox
var children: [MP4BoxConvertible] = []
}
extension MP4SampleDescriptionBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(UInt32(children.count))
for child in children {
buffer.writeBytes(child.data)
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
let numberOfEntries = try buffer.readUInt32()
children.removeAll()
for _ in 0..<numberOfEntries {
let size = try buffer.readUInt32()
let type = try buffer.readUTF8Bytes(4)
buffer.position -= 8
var entry = Self.makeEntry(by: type)
entry?.data = try buffer.readBytes(Int(size))
if let entry = entry {
children.append(entry)
}
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let stsd = MP4Box.Name<MP4SampleDescriptionBox>(rawValue: "stsd")
}

View File

@ -1,5 +0,0 @@
import Foundation
protocol MP4SampleEntry: MP4BoxConvertible {
var dataReferenceIndex: UInt16 { get }
}

View File

@ -1,62 +0,0 @@
import Foundation
/// ISO/IEC 14496-12 5th 8.7.3.2.1
struct MP4SampleSizeBox: MP4FullBox {
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "stsz"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = 0
// MARK: MP4SampleSizeBox
var sampleSize: UInt32 = 0
var entries: [UInt32] = []
}
extension MP4SampleSizeBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(sampleSize)
.writeUInt32(UInt32(entries.count))
for entry in entries {
buffer
.writeUInt32(entry)
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
version = try buffer.readUInt8()
flags = try buffer.readUInt24()
sampleSize = try buffer.readUInt32()
entries.removeAll()
let numberOfEntries = try buffer.readUInt32()
if sampleSize == 0 {
for _ in 0..<numberOfEntries {
entries.append(try buffer.readUInt32())
}
} else {
entries.append(sampleSize)
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let stsz = MP4Box.Name<MP4SampleSizeBox>(rawValue: "stsz")
}

View File

@ -1,69 +0,0 @@
import Foundation
/// ISO/IEC 14496-12 5th 8.7.4.2
struct MP4SampleToChunkBox: MP4FullBox {
struct Entry: Equatable, CustomDebugStringConvertible {
let firstChunk: UInt32
let samplesPerChunk: UInt32
let sampleDescriptionIndex: UInt32
var debugDescription: String {
Mirror(reflecting: self).debugDescription
}
}
// MARK: MP4FullBox
var size: UInt32 = 0
let type: String = "stsc"
var offset: UInt64 = 0
var children: [MP4BoxConvertible] = []
var version: UInt8 = 0
var flags: UInt32 = 0
// MARK: MP4SampleToChunkBox
var entries: [Entry] = []
}
extension MP4SampleToChunkBox: DataConvertible {
var data: Data {
get {
let buffer = ByteArray()
.writeUInt32(size)
.writeUTF8Bytes(type)
.writeUInt8(version)
.writeUInt24(flags)
.writeUInt32(UInt32(entries.count))
for entry in entries {
buffer
.writeUInt32(entry.firstChunk)
.writeUInt32(entry.samplesPerChunk)
.writeUInt32(entry.sampleDescriptionIndex)
}
let size = buffer.position
buffer.position = 0
buffer.writeUInt32(UInt32(size))
return buffer.data
}
set {
do {
let buffer = ByteArray(data: newValue)
size = try buffer.readUInt32()
_ = try buffer.readUTF8Bytes(4)
buffer.position += 4
let numberOfEntries: UInt32 = try buffer.readUInt32()
entries.removeAll()
for _ in 0..<numberOfEntries {
entries.append(Entry(
firstChunk: try buffer.readUInt32(),
samplesPerChunk: try buffer.readUInt32(),
sampleDescriptionIndex: try buffer.readUInt32()
))
}
} catch {
logger.error(error)
}
}
}
}
extension MP4Box.Names {
static let stsc = MP4Box.Name<MP4SampleToChunkBox>(rawValue: "stsc")
}

Some files were not shown because too many files have changed in this diff Show More