refs #58, Update README
This commit is contained in:
parent
04ede35edc
commit
45b64b58e6
|
@ -13,16 +13,19 @@ final class AppDelegate: UIResponder, UIApplicationDelegate {
|
||||||
// Logboard.with(HaishinKitIdentifier).level = .trace
|
// Logboard.with(HaishinKitIdentifier).level = .trace
|
||||||
let session = AVAudioSession.sharedInstance()
|
let session = AVAudioSession.sharedInstance()
|
||||||
do {
|
do {
|
||||||
try session.setPreferredSampleRate(44_100)
|
|
||||||
// https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent
|
// https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent
|
||||||
if #available(iOS 10.0, *) {
|
if #available(iOS 10.0, *) {
|
||||||
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
|
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
|
||||||
} else {
|
} else {
|
||||||
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [AVAudioSession.CategoryOptions.allowBluetooth])
|
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [
|
||||||
try? session.setMode(.default)
|
AVAudioSession.CategoryOptions.allowBluetooth,
|
||||||
|
AVAudioSession.CategoryOptions.defaultToSpeaker]
|
||||||
|
)
|
||||||
|
try session.setMode(.default)
|
||||||
}
|
}
|
||||||
try session.setActive(true)
|
try session.setActive(true)
|
||||||
} catch {
|
} catch {
|
||||||
|
logger.error(error)
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,6 @@ import Photos
|
||||||
import UIKit
|
import UIKit
|
||||||
import VideoToolbox
|
import VideoToolbox
|
||||||
|
|
||||||
let sampleRate: Double = 44_100
|
|
||||||
|
|
||||||
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
|
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
|
||||||
static let `default` = ExampleRecorderDelegate()
|
static let `default` = ExampleRecorderDelegate()
|
||||||
|
|
||||||
|
@ -62,9 +60,6 @@ final class LiveViewController: UIViewController {
|
||||||
.width: 720,
|
.width: 720,
|
||||||
.height: 1280
|
.height: 1280
|
||||||
]
|
]
|
||||||
rtmpStream.audioSettings = [
|
|
||||||
.sampleRate: sampleRate
|
|
||||||
]
|
|
||||||
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
|
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
|
||||||
|
|
||||||
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1024
|
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1024
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
struct Preference {
|
struct Preference {
|
||||||
static var defaultInstance = Preference()
|
static var defaultInstance = Preference()
|
||||||
|
|
||||||
var uri: String? = "rtmp://192.168.1.4/live"
|
var uri: String? = "rtmp://192.168.1.8/live"
|
||||||
var streamName: String? = "live"
|
var streamName: String? = "live"
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,15 +15,8 @@ open class SampleHandler: RPBroadcastSampleHandler {
|
||||||
logger.level = .debug
|
logger.level = .debug
|
||||||
logger.appender = socket
|
logger.appender = socket
|
||||||
*/
|
*/
|
||||||
print("broadcastStarted")
|
broadcaster.streamName = Preference.defaultInstance.streamName
|
||||||
super.broadcastStarted(withSetupInfo: setupInfo)
|
broadcaster.connect(Preference.defaultInstance.uri, arguments: nil)
|
||||||
guard
|
|
||||||
let endpointURL: String = setupInfo?["endpointURL"] as? String,
|
|
||||||
let streamName: String = setupInfo?["streamName"] as? String else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
broadcaster.streamName = streamName
|
|
||||||
broadcaster.connect(endpointURL, arguments: nil)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override open func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
|
override open func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
|
||||||
|
|
|
@ -40,8 +40,8 @@ final class MainViewController: NSViewController {
|
||||||
|
|
||||||
override func viewWillAppear() {
|
override func viewWillAppear() {
|
||||||
super.viewWillAppear()
|
super.viewWillAppear()
|
||||||
//rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
|
||||||
//rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video))
|
||||||
lfView?.attachStream(rtmpStream)
|
lfView?.attachStream(rtmpStream)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ final class MainViewController: NSViewController {
|
||||||
}
|
}
|
||||||
switch code {
|
switch code {
|
||||||
case RTMPConnection.Code.connectSuccess.rawValue:
|
case RTMPConnection.Code.connectSuccess.rawValue:
|
||||||
rtmpStream!.play(Preference.defaultInstance.streamName)
|
rtmpStream!.publish(Preference.defaultInstance.streamName)
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
|
@ -354,6 +354,10 @@
|
||||||
29FD1B5422FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
29FD1B5422FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||||
29FD1B5522FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
29FD1B5522FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||||
29FD1B5622FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
29FD1B5622FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||||
|
BC44A1A923D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||||
|
BC44A1AA23D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||||
|
BC44A1AB23D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||||
|
BC4C9EAC23F00F3A004A14F2 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = 291468161E581C7D00E619BA /* Preference.swift */; };
|
||||||
BC9CFA9323BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
BC9CFA9323BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||||
BC9CFA9423BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
BC9CFA9423BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||||
BC9CFA9523BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
BC9CFA9523BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||||
|
@ -645,6 +649,7 @@
|
||||||
29F97F232336A4FA00A4C317 /* SettingTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingTests.swift; sourceTree = "<group>"; };
|
29F97F232336A4FA00A4C317 /* SettingTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingTests.swift; sourceTree = "<group>"; };
|
||||||
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionPropertyKey.swift; sourceTree = "<group>"; };
|
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||||
29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTCompressionSessionPropertyKey.swift; sourceTree = "<group>"; };
|
29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTCompressionSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||||
|
BC44A1A823D31E92002D4297 /* AudioBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioBuffer.swift; sourceTree = "<group>"; };
|
||||||
BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamRenderer.swift; sourceTree = "<group>"; };
|
BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamRenderer.swift; sourceTree = "<group>"; };
|
||||||
/* End PBXFileReference section */
|
/* End PBXFileReference section */
|
||||||
|
|
||||||
|
@ -747,6 +752,7 @@
|
||||||
children = (
|
children = (
|
||||||
298BCF321DD4C44A007FF86A /* AnyUtil.swift */,
|
298BCF321DD4C44A007FF86A /* AnyUtil.swift */,
|
||||||
29DC17B221D0CC0600E26CED /* Atomic.swift */,
|
29DC17B221D0CC0600E26CED /* Atomic.swift */,
|
||||||
|
BC44A1A823D31E92002D4297 /* AudioBuffer.swift */,
|
||||||
29B876B81CD70B3900FC07DA /* ByteArray.swift */,
|
29B876B81CD70B3900FC07DA /* ByteArray.swift */,
|
||||||
294B2D3123785E3800CE7BDC /* CircularBuffer.swift */,
|
294B2D3123785E3800CE7BDC /* CircularBuffer.swift */,
|
||||||
29B876631CD70AB300FC07DA /* Constants.swift */,
|
29B876631CD70AB300FC07DA /* Constants.swift */,
|
||||||
|
@ -1593,6 +1599,7 @@
|
||||||
buildActionMask = 2147483647;
|
buildActionMask = 2147483647;
|
||||||
files = (
|
files = (
|
||||||
2930D0411E12D35400DA2DC5 /* SampleHandler.swift in Sources */,
|
2930D0411E12D35400DA2DC5 /* SampleHandler.swift in Sources */,
|
||||||
|
BC4C9EAC23F00F3A004A14F2 /* Preference.swift in Sources */,
|
||||||
2957473E1E34F3DB00EF056E /* RTMPBroadcaster.swift in Sources */,
|
2957473E1E34F3DB00EF056E /* RTMPBroadcaster.swift in Sources */,
|
||||||
);
|
);
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
@ -1626,6 +1633,7 @@
|
||||||
294B2D3223785E3800CE7BDC /* CircularBuffer.swift in Sources */,
|
294B2D3223785E3800CE7BDC /* CircularBuffer.swift in Sources */,
|
||||||
2958910E1EEB8D3C00CE51E1 /* FLVVideoCodec.swift in Sources */,
|
2958910E1EEB8D3C00CE51E1 /* FLVVideoCodec.swift in Sources */,
|
||||||
299B13271D3B751400A1E8F5 /* HKView.swift in Sources */,
|
299B13271D3B751400A1E8F5 /* HKView.swift in Sources */,
|
||||||
|
BC44A1A923D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||||
2926A9EC1DE6B71E0074E3D2 /* MachUtil.swift in Sources */,
|
2926A9EC1DE6B71E0074E3D2 /* MachUtil.swift in Sources */,
|
||||||
29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */,
|
29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */,
|
||||||
29D3D4CF1ED04C4C00DD4AA6 /* VideoIOComponent+Extension.swift in Sources */,
|
29D3D4CF1ED04C4C00DD4AA6 /* VideoIOComponent+Extension.swift in Sources */,
|
||||||
|
@ -1788,6 +1796,7 @@
|
||||||
2992D1541ED04A2C008D9DC1 /* VideoIOComponent+Extension-macOS.swift in Sources */,
|
2992D1541ED04A2C008D9DC1 /* VideoIOComponent+Extension-macOS.swift in Sources */,
|
||||||
2926A9EF1DE6B83F0074E3D2 /* MachUtil.swift in Sources */,
|
2926A9EF1DE6B83F0074E3D2 /* MachUtil.swift in Sources */,
|
||||||
2976A47F1D48FD6900B53EF2 /* AVRecorder.swift in Sources */,
|
2976A47F1D48FD6900B53EF2 /* AVRecorder.swift in Sources */,
|
||||||
|
BC44A1AA23D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||||
29B877071CD70D5A00FC07DA /* SoundTransform.swift in Sources */,
|
29B877071CD70D5A00FC07DA /* SoundTransform.swift in Sources */,
|
||||||
29B877081CD70D5A00FC07DA /* VideoIOComponent.swift in Sources */,
|
29B877081CD70D5A00FC07DA /* VideoIOComponent.swift in Sources */,
|
||||||
294CC9B422D9BEC000F9DD5C /* DisplayLink-macOS.swift in Sources */,
|
294CC9B422D9BEC000F9DD5C /* DisplayLink-macOS.swift in Sources */,
|
||||||
|
@ -1881,6 +1890,7 @@
|
||||||
29EB3DF01ED05768001CAE8B /* H264Encoder.swift in Sources */,
|
29EB3DF01ED05768001CAE8B /* H264Encoder.swift in Sources */,
|
||||||
29EB3E351ED05A33001CAE8B /* DeviceUtil.swift in Sources */,
|
29EB3E351ED05A33001CAE8B /* DeviceUtil.swift in Sources */,
|
||||||
29DC17B521D0CC0600E26CED /* Atomic.swift in Sources */,
|
29DC17B521D0CC0600E26CED /* Atomic.swift in Sources */,
|
||||||
|
BC44A1AB23D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||||
29EB3E261ED05A07001CAE8B /* RTMPStream.swift in Sources */,
|
29EB3E261ED05A07001CAE8B /* RTMPStream.swift in Sources */,
|
||||||
29DF20642312A3DD004057C3 /* RTMPNWSocket.swift in Sources */,
|
29DF20642312A3DD004057C3 /* RTMPNWSocket.swift in Sources */,
|
||||||
29EB3E111ED05881001CAE8B /* IOComponent.swift in Sources */,
|
29EB3E111ED05881001CAE8B /* IOComponent.swift in Sources */,
|
||||||
|
|
28
README.md
28
README.md
|
@ -104,16 +104,19 @@ Make sure you setup and activate your AVAudioSession.
|
||||||
import AVFoundation
|
import AVFoundation
|
||||||
let session = AVAudioSession.sharedInstance()
|
let session = AVAudioSession.sharedInstance()
|
||||||
do {
|
do {
|
||||||
try session.setPreferredSampleRate(44_100)
|
|
||||||
// https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent
|
// https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent
|
||||||
if #available(iOS 10.0, *) {
|
if #available(iOS 10.0, *) {
|
||||||
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
|
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
|
||||||
} else {
|
} else {
|
||||||
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [AVAudioSession.CategoryOptions.allowBluetooth])
|
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [
|
||||||
|
AVAudioSession.CategoryOptions.allowBluetooth,
|
||||||
|
AVAudioSession.CategoryOptions.defaultToSpeaker]
|
||||||
|
)
|
||||||
|
try session.setMode(.default)
|
||||||
}
|
}
|
||||||
try session.setMode(AVAudioSessionModeDefault)
|
|
||||||
try session.setActive(true)
|
try session.setActive(true)
|
||||||
} catch {
|
} catch {
|
||||||
|
print(error)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
## RTMP Usage
|
## RTMP Usage
|
||||||
|
@ -143,24 +146,6 @@ rtmpStream.publish("streamName")
|
||||||
|
|
||||||
### Settings
|
### Settings
|
||||||
```swift
|
```swift
|
||||||
let sampleRate:Double = 44_100
|
|
||||||
|
|
||||||
// see: #58
|
|
||||||
#if(iOS)
|
|
||||||
let session = AVAudioSession.sharedInstance()
|
|
||||||
do {
|
|
||||||
try session.setPreferredSampleRate(44_100)
|
|
||||||
// https://stackoverflow.com/questions/51010390/avaudiosession-setcategory-swift-4-2-ios-12-play-sound-on-silent
|
|
||||||
if #available(iOS 10.0, *) {
|
|
||||||
try session.setCategory(.playAndRecord, mode: .default, options: [.allowBluetooth])
|
|
||||||
} else {
|
|
||||||
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [AVAudioSession.CategoryOptions.allowBluetooth])
|
|
||||||
}
|
|
||||||
try session.setActive(true)
|
|
||||||
} catch {
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
var rtmpStream = RTMPStream(connection: rtmpConnection)
|
var rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||||
|
|
||||||
rtmpStream.captureSettings = [
|
rtmpStream.captureSettings = [
|
||||||
|
@ -173,7 +158,6 @@ rtmpStream.captureSettings = [
|
||||||
rtmpStream.audioSettings = [
|
rtmpStream.audioSettings = [
|
||||||
.muted: false, // mute audio
|
.muted: false, // mute audio
|
||||||
.bitrate: 32 * 1000,
|
.bitrate: 32 * 1000,
|
||||||
.sampleRate: sampleRate,
|
|
||||||
]
|
]
|
||||||
rtmpStream.videoSettings = [
|
rtmpStream.videoSettings = [
|
||||||
.width: 640, // video output width
|
.width: 640, // video output width
|
||||||
|
|
|
@ -42,7 +42,6 @@ public class AudioConverter {
|
||||||
/// 0 means according to a input source
|
/// 0 means according to a input source
|
||||||
public static let defaultSampleRate: Double = 0
|
public static let defaultSampleRate: Double = 0
|
||||||
public static let defaultMaximumBuffers: Int = 1
|
public static let defaultMaximumBuffers: Int = 1
|
||||||
public static let defaultBufferListSize: Int = AudioBufferList.sizeInBytes(maximumBuffers: 1)
|
|
||||||
|
|
||||||
public var destination: Destination = .aac
|
public var destination: Destination = .aac
|
||||||
public weak var delegate: AudioConverterDelegate?
|
public weak var delegate: AudioConverterDelegate?
|
||||||
|
@ -52,6 +51,7 @@ public class AudioConverter {
|
||||||
settings.observer = self
|
settings.observer = self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
private static let numSamples: Int = 1024
|
||||||
|
|
||||||
var muted: Bool = false
|
var muted: Bool = false
|
||||||
var bitrate: UInt32 = AudioConverter.defaultBitrate {
|
var bitrate: UInt32 = AudioConverter.defaultBitrate {
|
||||||
|
@ -83,30 +83,19 @@ public class AudioConverter {
|
||||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioConverter.lock")
|
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioConverter.lock")
|
||||||
var inSourceFormat: AudioStreamBasicDescription? {
|
var inSourceFormat: AudioStreamBasicDescription? {
|
||||||
didSet {
|
didSet {
|
||||||
logger.info("\(String(describing: self.inSourceFormat))")
|
guard let inSourceFormat = inSourceFormat else {
|
||||||
guard let inSourceFormat: AudioStreamBasicDescription = self.inSourceFormat else {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let nonInterleaved: Bool = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
logger.info("\(String(describing: self.inSourceFormat))")
|
||||||
|
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : AudioConverter.defaultMaximumBuffers
|
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : AudioConverter.defaultMaximumBuffers
|
||||||
bufferListSize = nonInterleaved ? AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers) : AudioConverter.defaultBufferListSize
|
currentAudioBuffer = AudioBuffer(inSourceFormat, numSamples: AudioConverter.numSamples)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var effects: Set<AudioEffect> = []
|
var effects: Set<AudioEffect> = []
|
||||||
private var maximumBuffers: Int = AudioConverter.defaultMaximumBuffers {
|
private let numSamples = AudioConverter.numSamples
|
||||||
didSet {
|
private var maximumBuffers: Int = AudioConverter.defaultMaximumBuffers
|
||||||
guard oldValue != maximumBuffers else {
|
private var currentAudioBuffer = AudioBuffer(AudioStreamBasicDescription(mSampleRate: 0, mFormatID: 0, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0))
|
||||||
return
|
|
||||||
}
|
|
||||||
currentBufferList.unsafeMutablePointer.deallocate()
|
|
||||||
currentBufferList = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
private var filled = false
|
|
||||||
private var bufferListSize: Int = AudioConverter.defaultBufferListSize
|
|
||||||
private lazy var currentBufferList: UnsafeMutableAudioBufferListPointer = {
|
|
||||||
AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
|
||||||
}()
|
|
||||||
private var _inDestinationFormat: AudioStreamBasicDescription?
|
private var _inDestinationFormat: AudioStreamBasicDescription?
|
||||||
private var inDestinationFormat: AudioStreamBasicDescription {
|
private var inDestinationFormat: AudioStreamBasicDescription {
|
||||||
get {
|
get {
|
||||||
|
@ -154,10 +143,6 @@ public class AudioConverter {
|
||||||
settings.observer = self
|
settings.observer = self
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
|
||||||
currentBufferList.unsafeMutablePointer.deallocate()
|
|
||||||
}
|
|
||||||
|
|
||||||
private var _converter: AudioConverterRef?
|
private var _converter: AudioConverterRef?
|
||||||
private var converter: AudioConverterRef {
|
private var converter: AudioConverterRef {
|
||||||
var status: OSStatus = noErr
|
var status: OSStatus = noErr
|
||||||
|
@ -179,14 +164,19 @@ public class AudioConverter {
|
||||||
}
|
}
|
||||||
|
|
||||||
public func encodeBytes(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
|
public func encodeBytes(_ bytes: UnsafeMutableRawPointer?, count: Int, presentationTimeStamp: CMTime) {
|
||||||
currentBufferList.unsafeMutablePointer.pointee.mBuffers.mNumberChannels = 1
|
guard isRunning.value else {
|
||||||
currentBufferList.unsafeMutablePointer.pointee.mBuffers.mData = bytes
|
currentAudioBuffer.clear()
|
||||||
currentBufferList.unsafeMutablePointer.pointee.mBuffers.mDataByteSize = UInt32(count)
|
return
|
||||||
convert(Int(1024 * destination.bytesPerFrame), presentationTimeStamp: presentationTimeStamp)
|
}
|
||||||
|
currentAudioBuffer.input.unsafeMutablePointer.pointee.mBuffers.mNumberChannels = 1
|
||||||
|
currentAudioBuffer.input.unsafeMutablePointer.pointee.mBuffers.mData = bytes
|
||||||
|
currentAudioBuffer.input.unsafeMutablePointer.pointee.mBuffers.mDataByteSize = UInt32(count)
|
||||||
|
convert(numSamples * Int(destination.bytesPerFrame), presentationTimeStamp: presentationTimeStamp)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func encodeSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
public func encodeSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
|
||||||
guard let format: CMAudioFormatDescription = sampleBuffer.formatDescription, isRunning.value else {
|
guard let format = sampleBuffer.formatDescription, sampleBuffer.isValid && isRunning.value else {
|
||||||
|
currentAudioBuffer.clear()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,42 +184,28 @@ public class AudioConverter {
|
||||||
inSourceFormat = format.streamBasicDescription?.pointee
|
inSourceFormat = format.streamBasicDescription?.pointee
|
||||||
}
|
}
|
||||||
|
|
||||||
var blockBuffer: CMBlockBuffer?
|
do {
|
||||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
|
let numSamples = try currentAudioBuffer.write(sampleBuffer, offset: offset)
|
||||||
sampleBuffer,
|
if currentAudioBuffer.isReady {
|
||||||
bufferListSizeNeededOut: nil,
|
|
||||||
bufferListOut: currentBufferList.unsafeMutablePointer,
|
|
||||||
bufferListSize: bufferListSize,
|
|
||||||
blockBufferAllocator: kCFAllocatorDefault,
|
|
||||||
blockBufferMemoryAllocator: kCFAllocatorDefault,
|
|
||||||
flags: 0,
|
|
||||||
blockBufferOut: &blockBuffer
|
|
||||||
)
|
|
||||||
|
|
||||||
if blockBuffer == nil {
|
|
||||||
logger.warn("IllegalState for blockBuffer")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if !effects.isEmpty {
|
|
||||||
for effect in effects {
|
for effect in effects {
|
||||||
effect.execute(currentBufferList, format: inSourceFormat)
|
effect.execute(currentAudioBuffer.input, format: inSourceFormat)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if muted {
|
if muted {
|
||||||
for i in 0..<currentBufferList.count {
|
currentAudioBuffer.muted()
|
||||||
memset(currentBufferList[i].mData, 0, Int(currentBufferList[i].mDataByteSize))
|
|
||||||
}
|
}
|
||||||
|
convert(currentAudioBuffer.maxLength, presentationTimeStamp: currentAudioBuffer.presentationTimeStamp)
|
||||||
|
}
|
||||||
|
if numSamples < sampleBuffer.numSamples {
|
||||||
|
encodeSampleBuffer(sampleBuffer, offset: numSamples)
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
logger.error(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
convert(blockBuffer!.dataLength, presentationTimeStamp: sampleBuffer.presentationTimeStamp)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@inline(__always)
|
@inline(__always)
|
||||||
private func convert(_ dataBytesSize: Int, presentationTimeStamp: CMTime) {
|
private func convert(_ dataBytesSize: Int, presentationTimeStamp: CMTime) {
|
||||||
filled = false
|
var finished = false
|
||||||
var finished: Bool = false
|
|
||||||
repeat {
|
repeat {
|
||||||
var ioOutputDataPacketSize: UInt32 = destination.packetSize
|
var ioOutputDataPacketSize: UInt32 = destination.packetSize
|
||||||
|
|
||||||
|
@ -241,7 +217,7 @@ public class AudioConverter {
|
||||||
outOutputData[i].mData = UnsafeMutableRawPointer.allocate(byteCount: dataBytesSize, alignment: 0)
|
outOutputData[i].mData = UnsafeMutableRawPointer.allocate(byteCount: dataBytesSize, alignment: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let status: OSStatus = AudioConverterFillComplexBuffer(
|
let status = AudioConverterFillComplexBuffer(
|
||||||
converter,
|
converter,
|
||||||
inputDataProc,
|
inputDataProc,
|
||||||
Unmanaged.passUnretained(self).toOpaque(),
|
Unmanaged.passUnretained(self).toOpaque(),
|
||||||
|
@ -252,7 +228,8 @@ public class AudioConverter {
|
||||||
|
|
||||||
switch status {
|
switch status {
|
||||||
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
|
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
|
||||||
case noErr, kAudioConverterErr_InvalidInputSize:
|
case noErr,
|
||||||
|
kAudioConverterErr_InvalidInputSize:
|
||||||
delegate?.sampleOutput(
|
delegate?.sampleOutput(
|
||||||
audio: outOutputData,
|
audio: outOutputData,
|
||||||
presentationTimeStamp: presentationTimeStamp
|
presentationTimeStamp: presentationTimeStamp
|
||||||
|
@ -292,19 +269,20 @@ public class AudioConverter {
|
||||||
_ ioNumberDataPackets: UnsafeMutablePointer<UInt32>,
|
_ ioNumberDataPackets: UnsafeMutablePointer<UInt32>,
|
||||||
ioData: UnsafeMutablePointer<AudioBufferList>,
|
ioData: UnsafeMutablePointer<AudioBufferList>,
|
||||||
outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?) -> OSStatus {
|
outDataPacketDescription: UnsafeMutablePointer<UnsafeMutablePointer<AudioStreamPacketDescription>?>?) -> OSStatus {
|
||||||
guard !filled else {
|
guard currentAudioBuffer.isReady else {
|
||||||
ioNumberDataPackets.pointee = 0
|
ioNumberDataPackets.pointee = 0
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
memcpy(ioData, currentBufferList.unsafePointer, bufferListSize)
|
memcpy(ioData, currentAudioBuffer.input.unsafePointer, currentAudioBuffer.listSize)
|
||||||
ioNumberDataPackets.pointee = 1
|
ioNumberDataPackets.pointee = 1
|
||||||
|
|
||||||
if destination == .pcm && outDataPacketDescription != nil {
|
if destination == .pcm && outDataPacketDescription != nil {
|
||||||
audioStreamPacketDescription.mDataByteSize = currentBufferList.unsafePointer.pointee.mBuffers.mDataByteSize
|
audioStreamPacketDescription.mDataByteSize = currentAudioBuffer.input.unsafePointer.pointee.mBuffers.mDataByteSize
|
||||||
outDataPacketDescription?.pointee = audioStreamPacketDescriptionPointer
|
outDataPacketDescription?.pointee = audioStreamPacketDescriptionPointer
|
||||||
}
|
}
|
||||||
filled = true
|
|
||||||
|
currentAudioBuffer.clear()
|
||||||
|
|
||||||
return noErr
|
return noErr
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,13 @@ extension CMSampleBuffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@available(iOS, obsoleted: 13.0)
|
||||||
|
@available(tvOS, obsoleted: 13.0)
|
||||||
|
@available(macOS, obsoleted: 10.15)
|
||||||
|
var isValid: Bool {
|
||||||
|
CMSampleBufferIsValid(self)
|
||||||
|
}
|
||||||
|
|
||||||
@available(iOS, obsoleted: 13.0)
|
@available(iOS, obsoleted: 13.0)
|
||||||
@available(tvOS, obsoleted: 13.0)
|
@available(tvOS, obsoleted: 13.0)
|
||||||
@available(macOS, obsoleted: 10.15)
|
@available(macOS, obsoleted: 10.15)
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
import AVFoundation
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
final class AudioBuffer {
|
||||||
|
enum AudioBufferError: Error {
|
||||||
|
case notReady
|
||||||
|
}
|
||||||
|
|
||||||
|
static let numSamples = 1024
|
||||||
|
|
||||||
|
let input: UnsafeMutableAudioBufferListPointer
|
||||||
|
|
||||||
|
var isReady: Bool {
|
||||||
|
numSamples == index
|
||||||
|
}
|
||||||
|
|
||||||
|
var maxLength: Int {
|
||||||
|
numSamples * bytesPerFrame
|
||||||
|
}
|
||||||
|
|
||||||
|
let listSize: Int
|
||||||
|
|
||||||
|
private var index = 0
|
||||||
|
private var buffers: [Data]
|
||||||
|
private let numSamples: Int
|
||||||
|
private let bytesPerFrame: Int
|
||||||
|
private let maximumBuffers: Int
|
||||||
|
private(set) var presentationTimeStamp: CMTime = .invalid
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
input.unsafeMutablePointer.deallocate()
|
||||||
|
}
|
||||||
|
|
||||||
|
init(_ inSourceFormat: AudioStreamBasicDescription, numSamples: Int = AudioBuffer.numSamples) {
|
||||||
|
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||||
|
self.numSamples = nonInterleaved ? numSamples / 2 : numSamples
|
||||||
|
bytesPerFrame = Int(inSourceFormat.mBytesPerFrame)
|
||||||
|
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : 1
|
||||||
|
listSize = AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers)
|
||||||
|
buffers = .init(repeating: .init(repeating: 0, count: self.numSamples * bytesPerFrame), count: maximumBuffers)
|
||||||
|
input = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||||
|
input.unsafeMutablePointer.pointee.mNumberBuffers = UInt32(maximumBuffers)
|
||||||
|
for i in 0..<maximumBuffers {
|
||||||
|
input[i].mNumberChannels = nonInterleaved ? 1 : inSourceFormat.mChannelsPerFrame
|
||||||
|
input[i].mDataByteSize = UInt32(buffers[i].count)
|
||||||
|
buffers[i].withUnsafeMutableBytes { pointer in
|
||||||
|
input[i].mData = pointer.baseAddress
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func write(_ sampleBuffer: CMSampleBuffer, offset: Int) throws -> Int {
|
||||||
|
guard let data = sampleBuffer.dataBuffer?.data, !isReady else {
|
||||||
|
throw AudioBufferError.notReady
|
||||||
|
}
|
||||||
|
if presentationTimeStamp == .invalid {
|
||||||
|
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
|
||||||
|
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
|
||||||
|
}
|
||||||
|
let numSamples = min(self.numSamples - index, sampleBuffer.numSamples - offset)
|
||||||
|
for i in 0..<maximumBuffers {
|
||||||
|
buffers[i].replaceSubrange(index * bytesPerFrame..<index * bytesPerFrame + numSamples * bytesPerFrame, with: data.advanced(by: offset * bytesPerFrame + numSamples * bytesPerFrame * i))
|
||||||
|
}
|
||||||
|
index += numSamples
|
||||||
|
return numSamples
|
||||||
|
}
|
||||||
|
|
||||||
|
func muted() {
|
||||||
|
for i in 0..<maximumBuffers {
|
||||||
|
buffers[i].resetBytes(in: 0...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func clear() {
|
||||||
|
presentationTimeStamp = .invalid
|
||||||
|
index = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension AudioBuffer: CustomDebugStringConvertible {
|
||||||
|
// MARK: CustomDebugStringConvertible
|
||||||
|
var debugDescription: String {
|
||||||
|
Mirror(reflecting: self).debugDescription
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue