add AVCaptureIOUnit

This commit is contained in:
shogo4405 2022-11-16 00:28:48 +09:00
parent 63cab4f151
commit 06fdae1fef
7 changed files with 104 additions and 122 deletions

View File

@ -325,6 +325,9 @@
BC11023E2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */; };
BC11023F2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */; };
BC1102402917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */; };
BC11024A2925147300D48035 /* AVCaptureIOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1102492925147300D48035 /* AVCaptureIOUnit.swift */; };
BC11024B2925147300D48035 /* AVCaptureIOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1102492925147300D48035 /* AVCaptureIOUnit.swift */; };
BC11024C2925147300D48035 /* AVCaptureIOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1102492925147300D48035 /* AVCaptureIOUnit.swift */; };
BC11D94625A1B01000D710BA /* Screencast.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 2915EC521D85BDF100621092 /* Screencast.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
BC20DF38250377A3007BC608 /* ScreenCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 299B131C1D35272D00A1E8F5 /* ScreenCaptureSession.swift */; };
BC34DFF025EBB53F005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
@ -842,6 +845,7 @@
2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MTHKSwiftUiView.swift; sourceTree = "<group>"; };
BC0D236C26331BAB001DDA0C /* NetSocket.CircularBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NetSocket.CircularBuffer.swift; sourceTree = "<group>"; };
BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CVPixelBufferPool+Extension.swift"; sourceTree = "<group>"; };
BC1102492925147300D48035 /* AVCaptureIOUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVCaptureIOUnit.swift; sourceTree = "<group>"; };
BC34DFD125EBB12C005F975A /* Logboard.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = Logboard.xcframework; path = Carthage/Build/Logboard.xcframework; sourceTree = "<group>"; };
BC34FA0A286CB90A00EFAF27 /* PiPHkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPHkView.swift; sourceTree = "<group>"; };
BC44A1A823D31E92002D4297 /* AudioCodecBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecBuffer.swift; sourceTree = "<group>"; wrapsLines = 1; };
@ -1362,6 +1366,7 @@
children = (
2941746A22D069B300A2944F /* AudioEffect.swift */,
29B876891CD70AFE00FC07DA /* AVAudioIOUnit.swift */,
BC1102492925147300D48035 /* AVCaptureIOUnit.swift */,
BC570B4728E9ACC10098A12C /* AVIOUnit.swift */,
29B8768B1CD70AFE00FC07DA /* AVMixer.swift */,
2976A47D1D48C5C700B53EF2 /* AVRecorder.swift */,
@ -2179,6 +2184,7 @@
29B876AC1CD70B2800FC07DA /* AMF3Serializer.swift in Sources */,
2916196C1E7F0768009FB344 /* CMFormatDescription+Extension.swift in Sources */,
BCB976DF26107B5600C9A649 /* TSAdaptationField.swift in Sources */,
BC11024A2925147300D48035 /* AVCaptureIOUnit.swift in Sources */,
29B876921CD70AFE00FC07DA /* AVMixer.swift in Sources */,
29DC17B321D0CC0600E26CED /* Atomic.swift in Sources */,
BCC1A6DF264470F900661156 /* MP4DataEntryUrlBox.swift in Sources */,
@ -2269,6 +2275,7 @@
29B876F81CD70D5900FC07DA /* HTTPService.swift in Sources */,
29B876F91CD70D5900FC07DA /* HTTPStream.swift in Sources */,
296543631D62FE9000734698 /* HKView-macOS.swift in Sources */,
BC11024B2925147300D48035 /* AVCaptureIOUnit.swift in Sources */,
29B876FA1CD70D5900FC07DA /* M3U.swift in Sources */,
292D8A341D8B294900DBECE2 /* MP4Reader.swift in Sources */,
29B876FD1CD70D5A00FC07DA /* AudioSpecificConfig.swift in Sources */,
@ -2565,6 +2572,7 @@
29EB3DF61ED0577C001CAE8B /* CMSampleBuffer+Extension.swift in Sources */,
BCC1A729264FA1C100661156 /* ProfileLevelIndicationIndexDescriptor.swift in Sources */,
29EB3E281ED05A0C001CAE8B /* RTMPTSocket.swift in Sources */,
BC11024C2925147300D48035 /* AVCaptureIOUnit.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View File

@ -5,14 +5,14 @@ import CoreImage
extension AVVideoIOUnit {
var zoomFactor: CGFloat {
guard let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device else {
guard let device = capture?.device else {
return 0
}
return device.videoZoomFactor
}
func setZoomFactor(_ zoomFactor: CGFloat, ramping: Bool, withRate: Float) {
guard let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
guard let device = capture?.device,
1 <= zoomFactor && zoomFactor < device.activeFormat.videoMaxZoomFactor
else { return }
do {
@ -34,8 +34,7 @@ extension AVVideoIOUnit {
self.screen = nil
return
}
input = nil
output = nil
capture = nil
if useScreenSize {
codec.width = screen.attributes["Width"] as! Int32
codec.height = screen.attributes["Height"] as! Int32

View File

@ -5,18 +5,20 @@ import AVFoundation
extension AVVideoIOUnit {
func attachScreen(_ screen: AVCaptureScreenInput?) {
mixer?.session.beginConfiguration()
output = nil
guard screen != nil else {
input = nil
defer {
mixer?.session.commitConfiguration()
}
guard let screen else {
capture = nil
return
}
input = screen
mixer?.session.addOutput(output)
output.setSampleBufferDelegate(self, queue: lockQueue)
mixer?.session.commitConfiguration()
if mixer?.session.isRunning ?? false {
mixer?.session.startRunning()
capture = AVCaptureIOUnit(screen) {
let output = AVCaptureVideoDataOutput()
output.alwaysDiscardsLateVideoFrames = true
output.videoSettings = videoSettings as? [String: Any]
return output
}
capture?.output.setSampleBufferDelegate(self, queue: lockQueue)
}
}

View File

@ -22,37 +22,10 @@ final class AVAudioIOUnit: NSObject, AVIOUnit {
var muted = false
#if os(iOS) || os(macOS)
var input: AVCaptureDeviceInput? {
var capture: AVCaptureIOUnit<AVCaptureAudioDataOutput>? {
didSet {
guard let mixer: AVMixer = mixer, oldValue != input else {
return
}
if let oldValue: AVCaptureDeviceInput = oldValue {
mixer.session.removeInput(oldValue)
}
if let input: AVCaptureDeviceInput = input, mixer.session.canAddInput(input) {
mixer.session.addInput(input)
}
}
}
private var _output: AVCaptureAudioDataOutput?
var output: AVCaptureAudioDataOutput! {
get {
if _output == nil {
_output = AVCaptureAudioDataOutput()
}
return _output
}
set {
if _output == newValue {
return
}
if let output: AVCaptureAudioDataOutput = _output {
output.setSampleBufferDelegate(nil, queue: nil)
mixer?.session.removeOutput(output)
}
_output = newValue
oldValue?.output.setSampleBufferDelegate(nil, queue: nil)
oldValue?.detach(mixer?.session)
}
}
#endif
@ -61,36 +34,31 @@ final class AVAudioIOUnit: NSObject, AVIOUnit {
#if os(iOS) || os(macOS)
deinit {
input = nil
output = nil
capture = nil
}
#endif
#if os(iOS) || os(macOS)
func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool) throws {
guard let mixer: AVMixer = mixer else {
guard let mixer = mixer else {
return
}
mixer.session.beginConfiguration()
defer {
mixer.session.commitConfiguration()
}
output = nil
codec.invalidate()
guard let audio: AVCaptureDevice = audio else {
input = nil
capture = nil
return
}
input = try AVCaptureDeviceInput(device: audio)
capture = AVCaptureIOUnit(try AVCaptureDeviceInput(device: audio)) {
AVCaptureAudioDataOutput()
}
#if os(iOS)
mixer.session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession
#endif
mixer.session.addOutput(output)
output.setSampleBufferDelegate(self, queue: lockQueue)
capture?.output.setSampleBufferDelegate(self, queue: lockQueue)
}
#endif

View File

@ -0,0 +1,37 @@
#if os(iOS) || os(macOS)
import AVFoundation
import Foundation
struct AVCaptureIOUnit<T: AVCaptureOutput> {
let input: AVCaptureInput
let output: T
var device: AVCaptureDevice? {
(input as? AVCaptureDeviceInput)?.device
}
init(_ input: AVCaptureInput, factory: () -> T) {
self.input = input
self.output = factory()
}
func attach(_ session: AVCaptureSession?) {
guard let session else {
return
}
if session.canAddInput(input) {
session.addInput(input)
}
if session.canAddOutput(output) {
session.addOutput(output)
}
}
func detach(_ session: AVCaptureSession?) {
guard let session else {
return
}
session.removeInput(input)
session.removeOutput(output)
}
}
#endif

View File

@ -76,12 +76,9 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
#if os(iOS) || os(macOS)
var fps: Float64 = AVMixer.defaultFPS {
didSet {
guard
let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
let data = device.actualFPS(fps) else {
guard let device = capture?.device, let data = device.actualFPS(fps) else {
return
}
fps = data.fps
codec.expectedFrameRate = data.fps
logger.info("\(data)")
@ -101,7 +98,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
var videoSettings: [NSObject: AnyObject] = AVMixer.defaultVideoSettings {
didSet {
output.videoSettings = videoSettings as? [String: Any]
capture?.output.videoSettings = videoSettings as? [String: Any]
}
}
@ -110,8 +107,10 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
guard isVideoMirrored != oldValue else {
return
}
for connection in output.connections where connection.isVideoMirroringSupported {
connection.isVideoMirrored = isVideoMirrored
capture?.output.connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = isVideoMirrored
}
}
}
}
@ -122,7 +121,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
guard orientation != oldValue else {
return
}
for connection in output.connections where connection.isVideoOrientationSupported {
capture?.output.connections.filter({ $0.isVideoOrientationSupported }).forEach { connection in
connection.videoOrientation = orientation
if torch {
setTorchMode(.on)
@ -149,8 +148,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
return
}
let focusMode: AVCaptureDevice.FocusMode = continuousAutofocus ? .continuousAutoFocus : .autoFocus
guard let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
device.isFocusModeSupported(focusMode) else {
guard let device = capture?.device, device.isFocusModeSupported(focusMode) else {
logger.warn("focusMode(\(focusMode.rawValue)) is not supported")
return
}
@ -167,14 +165,14 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
var focusPointOfInterest: CGPoint? {
didSet {
guard
let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
let point: CGPoint = focusPointOfInterest,
let device = capture?.device,
let focusPointOfInterest,
device.isFocusPointOfInterestSupported else {
return
}
do {
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusPointOfInterest = focusPointOfInterest
device.focusMode = continuousAutofocus ? .continuousAutoFocus : .autoFocus
device.unlockForConfiguration()
} catch let error as NSError {
@ -186,14 +184,14 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
var exposurePointOfInterest: CGPoint? {
didSet {
guard
let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
let point: CGPoint = exposurePointOfInterest,
let device = capture?.device,
let exposurePointOfInterest,
device.isExposurePointOfInterestSupported else {
return
}
do {
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposurePointOfInterest = exposurePointOfInterest
device.exposureMode = continuousExposure ? .continuousAutoExposure : .autoExpose
device.unlockForConfiguration()
} catch let error as NSError {
@ -208,8 +206,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
return
}
let exposureMode: AVCaptureDevice.ExposureMode = continuousExposure ? .continuousAutoExposure : .autoExpose
guard let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
device.isExposureModeSupported(exposureMode) else {
guard let device = capture?.device, device.isExposureModeSupported(exposureMode) else {
logger.warn("exposureMode(\(exposureMode.rawValue)) is not supported")
return
}
@ -229,46 +226,16 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
guard preferredVideoStabilizationMode != oldValue else {
return
}
for connection in output.connections {
capture?.output.connections.forEach { connection in
connection.preferredVideoStabilizationMode = preferredVideoStabilizationMode
}
}
}
#endif
private var _output: AVCaptureVideoDataOutput?
var output: AVCaptureVideoDataOutput! {
get {
if _output == nil {
_output = AVCaptureVideoDataOutput()
_output?.alwaysDiscardsLateVideoFrames = true
_output?.videoSettings = videoSettings as? [String: Any]
}
return _output!
}
set {
if _output == newValue {
return
}
if let output: AVCaptureVideoDataOutput = _output {
output.setSampleBufferDelegate(nil, queue: nil)
mixer?.session.removeOutput(output)
}
_output = newValue
}
}
var input: AVCaptureInput? {
var capture: AVCaptureIOUnit<AVCaptureVideoDataOutput>? {
didSet {
guard let mixer: AVMixer = mixer, oldValue != input else {
return
}
if let oldValue: AVCaptureInput = oldValue {
mixer.session.removeInput(oldValue)
}
if let input: AVCaptureInput = input, mixer.session.canAddInput(input) {
mixer.session.addInput(input)
}
oldValue?.output.setSampleBufferDelegate(nil, queue: nil)
oldValue?.detach(mixer?.session)
}
}
#endif
@ -297,14 +264,13 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
}
}
#if os(iOS) || os(macOS)
input = nil
output = nil
capture = nil
#endif
}
#if os(iOS) || os(macOS)
func attachCamera(_ camera: AVCaptureDevice?) throws {
guard let mixer: AVMixer = mixer else {
guard let mixer else {
return
}
@ -316,10 +282,9 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
}
}
output = nil
guard let camera: AVCaptureDevice = camera else {
guard let camera else {
mixer.mediaSync = .passthrough
input = nil
capture = nil
return
}
@ -328,10 +293,14 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
screen = nil
#endif
input = try AVCaptureDeviceInput(device: camera)
mixer.session.addOutput(output)
for connection in output.connections {
capture = AVCaptureIOUnit(try AVCaptureDeviceInput(device: camera)) {
let output = AVCaptureVideoDataOutput()
output.alwaysDiscardsLateVideoFrames = true
output.videoSettings = videoSettings as? [String: Any]
return output
}
capture?.attach(mixer.session)
capture?.output.connections.forEach { connection in
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation
}
@ -342,8 +311,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
connection.preferredVideoStabilizationMode = preferredVideoStabilizationMode
#endif
}
output.setSampleBufferDelegate(self, queue: lockQueue)
capture?.output.setSampleBufferDelegate(self, queue: lockQueue)
fps *= 1
position = camera.position
@ -351,7 +319,7 @@ final class AVVideoIOUnit: NSObject, AVIOUnit {
}
func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) {
guard let device: AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device, device.isTorchModeSupported(torchMode) else {
guard let device = capture?.device, device.isTorchModeSupported(torchMode) else {
logger.warn("torchMode(\(torchMode)) is not supported")
return
}

View File

@ -430,14 +430,14 @@ open class RTMPStream: NetStream {
open func createMetaData() -> ASObject {
metadata.removeAll()
#if os(iOS) || os(macOS)
if let _: AVCaptureInput = mixer.videoIO.input {
if mixer.videoIO.capture != nil {
metadata["width"] = mixer.videoIO.codec.width
metadata["height"] = mixer.videoIO.codec.height
metadata["framerate"] = mixer.videoIO.fps
metadata["videocodecid"] = FLVVideoCodec.avc.rawValue
metadata["videodatarate"] = mixer.videoIO.codec.bitrate / 1000
}
if let _: AVCaptureInput = mixer.audioIO.input {
if mixer.audioIO.capture != nil {
metadata["audiocodecid"] = FLVAudioCodec.aac.rawValue
metadata["audiodatarate"] = mixer.audioIO.codec.bitrate / 1000
}