HaishinKit.swift/Sources/Net/NetStream.swift

181 lines
5.4 KiB
Swift

import AVFoundation
import CoreImage
/// The `NetStream` class is the foundation of a RTMPStream, HTTPStream.
open class NetStream: NSObject {
/// The lockQueue.
public let lockQueue: DispatchQueue = {
let queue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetStream.lock")
queue.setSpecific(key: queueKey, value: queueValue)
return queue
}()
private static let queueKey = DispatchSpecificKey<UnsafeMutableRawPointer>()
private static let queueValue = UnsafeMutableRawPointer.allocate(byteCount: 1, alignment: 1)
/// The mixer object.
public private(set) var mixer = AVMixer()
/// Specifies the metadata for the stream.
public var metadata: [String: Any?] = [:]
/// Specifies the context object.
public var context: CIContext? {
get {
mixer.videoIO.context
}
set {
mixer.videoIO.context = newValue
}
}
#if os(iOS) || os(macOS)
/// Specifiet the device torch indicating wheter the turn on(TRUE) or not(FALSE).
public var torch: Bool {
get {
var torch: Bool = false
lockQueue.sync {
torch = self.mixer.videoIO.torch
}
return torch
}
set {
lockQueue.async {
self.mixer.videoIO.torch = newValue
}
}
}
/// Specify the video orientation for stream.
public var videoOrientation: AVCaptureVideoOrientation {
get {
mixer.videoIO.orientation
}
set {
mixer.videoIO.orientation = newValue
}
}
#endif
/// Specify the audio compression properties.
public var audioSettings: Setting<AudioCodec, AudioCodec.Option> {
get {
mixer.audioIO.codec.settings
}
set {
mixer.audioIO.codec.settings = newValue
}
}
/// Specify the video compression properties.
public var videoSettings: Setting<VideoCodec, VideoCodec.Option> {
get {
mixer.videoIO.encoder.settings
}
set {
mixer.videoIO.encoder.settings = newValue
}
}
/// Specify the avsession properties.
open var captureSettings: Setting<AVMixer, AVMixer.Option> {
get {
mixer.settings
}
set {
mixer.settings = newValue
}
}
/// Specifies the recorder properties.
public var recorderSettings: [AVMediaType: [String: Any]] {
get {
mixer.recorder.outputSettings
}
set {
mixer.recorder.outputSettings = newValue
}
}
deinit {
metadata.removeAll()
}
#if os(iOS) || os(macOS)
/// Attaches the camera object.
/// - Warning: This method can't use appendSampleBuffer at the same time.
open func attachCamera(_ camera: AVCaptureDevice?, onError: ((_ error: NSError) -> Void)? = nil) {
lockQueue.async {
do {
try self.mixer.videoIO.attachCamera(camera)
} catch let error as NSError {
onError?(error)
}
}
}
/// Attaches the microphone object.
/// - Warning: This method can't use appendSampleBuffer at the same time.
open func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: NSError) -> Void)? = nil) {
lockQueue.async {
do {
try self.mixer.audioIO.attachAudio(audio, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession)
} catch let error as NSError {
onError?(error)
}
}
}
/// Set the point of interest.
public func setPointOfInterest(_ focus: CGPoint, exposure: CGPoint) {
mixer.videoIO.focusPointOfInterest = focus
mixer.videoIO.exposurePointOfInterest = exposure
}
#endif
/// Append a CMSampleBuffer?.
/// - Warning: This method can't use attachCamera or attachAudio method at the same time.
open func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, withType: AVMediaType, options: [NSObject: AnyObject]? = nil) {
switch withType {
case .audio:
mixer.audioIO.lockQueue.async {
self.mixer.audioIO.appendSampleBuffer(sampleBuffer)
}
case .video:
mixer.videoIO.lockQueue.async {
self.mixer.videoIO.encodeSampleBuffer(sampleBuffer)
}
default:
break
}
}
/// Register a video effect.
public func registerVideoEffect(_ effect: VideoEffect) -> Bool {
mixer.videoIO.lockQueue.sync {
self.mixer.videoIO.registerEffect(effect)
}
}
/// Unregister a video effect.
public func unregisterVideoEffect(_ effect: VideoEffect) -> Bool {
mixer.videoIO.lockQueue.sync {
self.mixer.videoIO.unregisterEffect(effect)
}
}
/// Register a audio effect.
public func registerAudioEffect(_ effect: AudioEffect) -> Bool {
mixer.audioIO.lockQueue.sync {
self.mixer.audioIO.registerEffect(effect)
}
}
/// Unregister a audio effect.
public func unregisterAudioEffect(_ effect: AudioEffect) -> Bool {
mixer.audioIO.lockQueue.sync {
self.mixer.audioIO.unregisterEffect(effect)
}
}
}