Update TestCase.
This commit is contained in:
parent
45b64b58e6
commit
06acf94c75
|
@ -19,8 +19,8 @@ final class AppDelegate: UIResponder, UIApplicationDelegate {
|
|||
} else {
|
||||
session.perform(NSSelectorFromString("setCategory:withOptions:error:"), with: AVAudioSession.Category.playAndRecord, with: [
|
||||
AVAudioSession.CategoryOptions.allowBluetooth,
|
||||
AVAudioSession.CategoryOptions.defaultToSpeaker]
|
||||
)
|
||||
AVAudioSession.CategoryOptions.defaultToSpeaker
|
||||
])
|
||||
try session.setMode(.default)
|
||||
}
|
||||
try session.setActive(true)
|
||||
|
|
|
@ -16,7 +16,7 @@ open class SampleHandler: RPBroadcastSampleHandler {
|
|||
logger.appender = socket
|
||||
*/
|
||||
broadcaster.streamName = Preference.defaultInstance.streamName
|
||||
broadcaster.connect(Preference.defaultInstance.uri, arguments: nil)
|
||||
broadcaster.connect(Preference.defaultInstance.uri!, arguments: nil)
|
||||
}
|
||||
|
||||
override open func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
|
||||
|
|
|
@ -354,10 +354,13 @@
|
|||
29FD1B5422FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||
29FD1B5522FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||
29FD1B5622FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */; };
|
||||
BC44A1A923D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||
BC44A1AA23D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||
BC44A1AB23D31E92002D4297 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioBuffer.swift */; };
|
||||
BC44A1A923D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
|
||||
BC44A1AA23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
|
||||
BC44A1AB23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */; };
|
||||
BC4C9EAC23F00F3A004A14F2 /* Preference.swift in Sources */ = {isa = PBXBuildFile; fileRef = 291468161E581C7D00E619BA /* Preference.swift */; };
|
||||
BC4C9EAF23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */; };
|
||||
BC4C9EB023F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */; };
|
||||
BC4C9EB123F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */; };
|
||||
BC9CFA9323BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||
BC9CFA9423BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||
BC9CFA9523BDE8B700917EEF /* NetStreamRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */; };
|
||||
|
@ -649,7 +652,8 @@
|
|||
29F97F232336A4FA00A4C317 /* SettingTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingTests.swift; sourceTree = "<group>"; };
|
||||
29FD1B4F22FF13190095A0BE /* VTSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||
29FD1B5322FF1C2D0095A0BE /* VTCompressionSessionPropertyKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTCompressionSessionPropertyKey.swift; sourceTree = "<group>"; };
|
||||
BC44A1A823D31E92002D4297 /* AudioBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioBuffer.swift; sourceTree = "<group>"; };
|
||||
BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioConverter.AudioBuffer.swift; sourceTree = "<group>"; wrapsLines = 1; };
|
||||
BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioStreamBasicDescription+Extension.swift"; sourceTree = "<group>"; };
|
||||
BC9CFA9223BDE8B700917EEF /* NetStreamRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamRenderer.swift; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
|
@ -737,6 +741,7 @@
|
|||
290907CD1C3961AD00F2E80C /* Codec */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
BC44A1A823D31E92002D4297 /* AudioConverter.AudioBuffer.swift */,
|
||||
297E69112324E38800D418AB /* AudioConverter.Destination.swift */,
|
||||
29B876571CD70A7900FC07DA /* AudioConverter.swift */,
|
||||
29B876581CD70A7900FC07DA /* H264Decoder.swift */,
|
||||
|
@ -752,7 +757,6 @@
|
|||
children = (
|
||||
298BCF321DD4C44A007FF86A /* AnyUtil.swift */,
|
||||
29DC17B221D0CC0600E26CED /* Atomic.swift */,
|
||||
BC44A1A823D31E92002D4297 /* AudioBuffer.swift */,
|
||||
29B876B81CD70B3900FC07DA /* ByteArray.swift */,
|
||||
294B2D3123785E3800CE7BDC /* CircularBuffer.swift */,
|
||||
29B876631CD70AB300FC07DA /* Constants.swift */,
|
||||
|
@ -1140,6 +1144,7 @@
|
|||
29EA87D11E799EFF0043A5F8 /* Extension */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */,
|
||||
29EA87E51E79A2780043A5F8 /* CMAudioFormatDescription+Extension.swift */,
|
||||
29EA87E91E79A3B70043A5F8 /* CMBlockBuffer+Extension.swift */,
|
||||
2916196B1E7F0768009FB344 /* CMFormatDescription+Extension.swift */,
|
||||
|
@ -1633,7 +1638,7 @@
|
|||
294B2D3223785E3800CE7BDC /* CircularBuffer.swift in Sources */,
|
||||
2958910E1EEB8D3C00CE51E1 /* FLVVideoCodec.swift in Sources */,
|
||||
299B13271D3B751400A1E8F5 /* HKView.swift in Sources */,
|
||||
BC44A1A923D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||
BC44A1A923D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */,
|
||||
2926A9EC1DE6B71E0074E3D2 /* MachUtil.swift in Sources */,
|
||||
29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */,
|
||||
29D3D4CF1ED04C4C00DD4AA6 /* VideoIOComponent+Extension.swift in Sources */,
|
||||
|
@ -1643,6 +1648,7 @@
|
|||
29B8769B1CD70B1100FC07DA /* MIME.swift in Sources */,
|
||||
29B8769C1CD70B1100FC07DA /* NetClient.swift in Sources */,
|
||||
29B876871CD70AE800FC07DA /* ProgramSpecific.swift in Sources */,
|
||||
BC4C9EAF23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */,
|
||||
298BCF331DD4C44A007FF86A /* AnyUtil.swift in Sources */,
|
||||
29B876B01CD70B2800FC07DA /* RTMPConnection.swift in Sources */,
|
||||
295891221EEB8EC500CE51E1 /* FLVAVCPacketType.swift in Sources */,
|
||||
|
@ -1761,6 +1767,7 @@
|
|||
29FD1B5122FF13190095A0BE /* VTSessionPropertyKey.swift in Sources */,
|
||||
29B876F61CD70D5900FC07DA /* HTTPRequest.swift in Sources */,
|
||||
29B876F71CD70D5900FC07DA /* HTTPResponse.swift in Sources */,
|
||||
BC4C9EB023F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */,
|
||||
2942A4F921A9418A004E1BEE /* Running.swift in Sources */,
|
||||
29D3D4D31ED04D9600DD4AA6 /* NetStream+Extension-macOS.swift in Sources */,
|
||||
29EA87D61E799F6A0043A5F8 /* Mirror+Extension.swift in Sources */,
|
||||
|
@ -1796,7 +1803,7 @@
|
|||
2992D1541ED04A2C008D9DC1 /* VideoIOComponent+Extension-macOS.swift in Sources */,
|
||||
2926A9EF1DE6B83F0074E3D2 /* MachUtil.swift in Sources */,
|
||||
2976A47F1D48FD6900B53EF2 /* AVRecorder.swift in Sources */,
|
||||
BC44A1AA23D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||
BC44A1AA23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */,
|
||||
29B877071CD70D5A00FC07DA /* SoundTransform.swift in Sources */,
|
||||
29B877081CD70D5A00FC07DA /* VideoIOComponent.swift in Sources */,
|
||||
294CC9B422D9BEC000F9DD5C /* DisplayLink-macOS.swift in Sources */,
|
||||
|
@ -1890,7 +1897,7 @@
|
|||
29EB3DF01ED05768001CAE8B /* H264Encoder.swift in Sources */,
|
||||
29EB3E351ED05A33001CAE8B /* DeviceUtil.swift in Sources */,
|
||||
29DC17B521D0CC0600E26CED /* Atomic.swift in Sources */,
|
||||
BC44A1AB23D31E92002D4297 /* AudioBuffer.swift in Sources */,
|
||||
BC44A1AB23D31E92002D4297 /* AudioConverter.AudioBuffer.swift in Sources */,
|
||||
29EB3E261ED05A07001CAE8B /* RTMPStream.swift in Sources */,
|
||||
29DF20642312A3DD004057C3 /* RTMPNWSocket.swift in Sources */,
|
||||
29EB3E111ED05881001CAE8B /* IOComponent.swift in Sources */,
|
||||
|
@ -1941,6 +1948,7 @@
|
|||
292F6DB11EEBB2040097EDBE /* AVFoundation-tvOS.swift in Sources */,
|
||||
29EB3E011ED05856001CAE8B /* HTTPStream.swift in Sources */,
|
||||
29EB3E171ED05893001CAE8B /* NetClient.swift in Sources */,
|
||||
BC4C9EB123F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */,
|
||||
2958912C1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */,
|
||||
295891241EEB8EC500CE51E1 /* FLVAVCPacketType.swift in Sources */,
|
||||
299D6A532051A9920090E10A /* MTHKView-tvOS.swift in Sources */,
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AudioConverter {
|
||||
final class AudioBuffer {
|
||||
// swiftlint:disable nesting
|
||||
enum Error: Swift.Error {
|
||||
case isReady
|
||||
case noBlockBuffer
|
||||
}
|
||||
|
||||
static let numSamples = 1024
|
||||
|
||||
let input: UnsafeMutableAudioBufferListPointer
|
||||
|
||||
var isReady: Bool {
|
||||
numSamples == index
|
||||
}
|
||||
|
||||
var maxLength: Int {
|
||||
numSamples * bytesPerFrame * numberChannels * maximumBuffers
|
||||
}
|
||||
|
||||
let listSize: Int
|
||||
|
||||
private var index = 0
|
||||
private var buffers: [Data]
|
||||
private let numSamples: Int
|
||||
private let bytesPerFrame: Int
|
||||
private let maximumBuffers: Int
|
||||
private let numberChannels: Int
|
||||
private let bufferList: UnsafeMutableAudioBufferListPointer
|
||||
private(set) var presentationTimeStamp: CMTime = .invalid
|
||||
|
||||
deinit {
|
||||
input.unsafeMutablePointer.deallocate()
|
||||
bufferList.unsafeMutablePointer.deallocate()
|
||||
}
|
||||
|
||||
init(_ inSourceFormat: AudioStreamBasicDescription, numSamples: Int = AudioBuffer.numSamples) {
|
||||
self.numSamples = numSamples
|
||||
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||
bytesPerFrame = Int(inSourceFormat.mBytesPerFrame)
|
||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : 1
|
||||
listSize = AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers)
|
||||
input = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
bufferList = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
numberChannels = nonInterleaved ? 1 : Int(inSourceFormat.mChannelsPerFrame)
|
||||
let dataByteSize = numSamples * bytesPerFrame
|
||||
buffers = .init(repeating: .init(repeating: 0, count: numSamples * bytesPerFrame), count: maximumBuffers)
|
||||
input.unsafeMutablePointer.pointee.mNumberBuffers = UInt32(maximumBuffers)
|
||||
for i in 0..<maximumBuffers {
|
||||
input[i].mNumberChannels = UInt32(numberChannels)
|
||||
buffers[i].withUnsafeMutableBytes { pointer in
|
||||
input[i].mData = pointer.baseAddress
|
||||
}
|
||||
input[i].mDataByteSize = UInt32(dataByteSize)
|
||||
}
|
||||
}
|
||||
|
||||
func write(_ sampleBuffer: CMSampleBuffer, offset: Int) throws -> Int {
|
||||
guard !isReady else {
|
||||
throw Error.isReady
|
||||
}
|
||||
|
||||
if presentationTimeStamp == .invalid {
|
||||
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
|
||||
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
|
||||
}
|
||||
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
|
||||
sampleBuffer,
|
||||
bufferListSizeNeededOut: nil,
|
||||
bufferListOut: bufferList.unsafeMutablePointer,
|
||||
bufferListSize: listSize,
|
||||
blockBufferAllocator: kCFAllocatorDefault,
|
||||
blockBufferMemoryAllocator: kCFAllocatorDefault,
|
||||
flags: 0,
|
||||
blockBufferOut: &blockBuffer
|
||||
)
|
||||
|
||||
guard blockBuffer != nil else {
|
||||
throw Error.noBlockBuffer
|
||||
}
|
||||
|
||||
let numSamples = min(self.numSamples - index, sampleBuffer.numSamples - offset)
|
||||
for i in 0..<maximumBuffers {
|
||||
guard let data = bufferList[i].mData else {
|
||||
continue
|
||||
}
|
||||
buffers[i].replaceSubrange(
|
||||
index * bytesPerFrame..<index * bytesPerFrame + numSamples * bytesPerFrame,
|
||||
with: data.advanced(by: offset * bytesPerFrame),
|
||||
count: numSamples * bytesPerFrame
|
||||
)
|
||||
}
|
||||
index += numSamples
|
||||
|
||||
return numSamples
|
||||
}
|
||||
|
||||
func muted() {
|
||||
for i in 0..<maximumBuffers {
|
||||
buffers[i].resetBytes(in: 0...)
|
||||
}
|
||||
}
|
||||
|
||||
func clear() {
|
||||
presentationTimeStamp = .invalid
|
||||
index = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioConverter.AudioBuffer: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -75,18 +75,23 @@ public class AudioConverter {
|
|||
var channels: UInt32 = AudioConverter.defaultChannels
|
||||
var formatDescription: CMFormatDescription? {
|
||||
didSet {
|
||||
if !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) {
|
||||
delegate?.didSetFormatDescription(audio: formatDescription)
|
||||
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
|
||||
return
|
||||
}
|
||||
logger.info(formatDescription.debugDescription)
|
||||
delegate?.didSetFormatDescription(audio: formatDescription)
|
||||
}
|
||||
}
|
||||
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioConverter.lock")
|
||||
var inSourceFormat: AudioStreamBasicDescription? {
|
||||
didSet {
|
||||
guard let inSourceFormat = inSourceFormat else {
|
||||
guard let inSourceFormat = inSourceFormat, inSourceFormat != oldValue else {
|
||||
return
|
||||
}
|
||||
logger.info("\(String(describing: self.inSourceFormat))")
|
||||
_converter = nil
|
||||
formatDescription = nil
|
||||
_inDestinationFormat = nil
|
||||
logger.info("\(String(describing: inSourceFormat))")
|
||||
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : AudioConverter.defaultMaximumBuffers
|
||||
currentAudioBuffer = AudioBuffer(inSourceFormat, numSamples: AudioConverter.numSamples)
|
||||
|
@ -175,14 +180,12 @@ public class AudioConverter {
|
|||
}
|
||||
|
||||
public func encodeSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
|
||||
guard let format = sampleBuffer.formatDescription, sampleBuffer.isValid && isRunning.value else {
|
||||
guard let format = sampleBuffer.formatDescription, CMSampleBufferDataIsReady(sampleBuffer) && isRunning.value else {
|
||||
currentAudioBuffer.clear()
|
||||
return
|
||||
}
|
||||
|
||||
if inSourceFormat == nil {
|
||||
inSourceFormat = format.streamBasicDescription?.pointee
|
||||
}
|
||||
inSourceFormat = format.streamBasicDescription?.pointee
|
||||
|
||||
do {
|
||||
let numSamples = try currentAudioBuffer.write(sampleBuffer, offset: offset)
|
||||
|
@ -195,8 +198,8 @@ public class AudioConverter {
|
|||
}
|
||||
convert(currentAudioBuffer.maxLength, presentationTimeStamp: currentAudioBuffer.presentationTimeStamp)
|
||||
}
|
||||
if numSamples < sampleBuffer.numSamples {
|
||||
encodeSampleBuffer(sampleBuffer, offset: numSamples)
|
||||
if offset + numSamples < sampleBuffer.numSamples {
|
||||
encodeSampleBuffer(sampleBuffer, offset: offset + numSamples)
|
||||
}
|
||||
} catch {
|
||||
logger.error(error)
|
||||
|
@ -227,9 +230,8 @@ public class AudioConverter {
|
|||
)
|
||||
|
||||
switch status {
|
||||
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
|
||||
case noErr,
|
||||
kAudioConverterErr_InvalidInputSize:
|
||||
// kAudioConverterErr_InvalidInputSize: perhaps mistake. but can support macOS BuiltIn Mic #61
|
||||
case noErr, kAudioConverterErr_InvalidInputSize:
|
||||
delegate?.sampleOutput(
|
||||
audio: outOutputData,
|
||||
presentationTimeStamp: presentationTimeStamp
|
||||
|
@ -327,6 +329,7 @@ extension AudioConverter: Running {
|
|||
AudioConverterDispose(convert)
|
||||
self._converter = nil
|
||||
}
|
||||
self.currentAudioBuffer.clear()
|
||||
self.inSourceFormat = nil
|
||||
self.formatDescription = nil
|
||||
self._inDestinationFormat = nil
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
import CoreAudio
|
||||
import Foundation
|
||||
|
||||
extension AudioStreamBasicDescription: Equatable {
|
||||
public static func == (lhs: AudioStreamBasicDescription, rhs: AudioStreamBasicDescription) -> Bool {
|
||||
lhs.mBitsPerChannel == rhs.mBitsPerChannel &&
|
||||
lhs.mBytesPerFrame == rhs.mBytesPerFrame &&
|
||||
lhs.mBytesPerPacket == rhs.mBytesPerPacket &&
|
||||
lhs.mChannelsPerFrame == rhs.mChannelsPerFrame &&
|
||||
lhs.mFormatFlags == rhs.mFormatFlags &&
|
||||
lhs.mFormatID == rhs.mFormatID &&
|
||||
lhs.mFramesPerPacket == rhs.mFramesPerPacket &&
|
||||
lhs.mReserved == rhs.mReserved &&
|
||||
lhs.mSampleRate == rhs.mSampleRate
|
||||
}
|
||||
}
|
|
@ -12,20 +12,20 @@ final class RTMPMuxer {
|
|||
|
||||
weak var delegate: RTMPMuxerDelegate?
|
||||
private var configs: [Int: Data] = [:]
|
||||
private var audioTimestamp = CMTime.zero
|
||||
private var videoTimestamp = CMTime.zero
|
||||
private var audioTimeStamp = CMTime.zero
|
||||
private var videoTimeStamp = CMTime.zero
|
||||
|
||||
func dispose() {
|
||||
configs.removeAll()
|
||||
audioTimestamp = CMTime.zero
|
||||
videoTimestamp = CMTime.zero
|
||||
audioTimeStamp = CMTime.zero
|
||||
videoTimeStamp = CMTime.zero
|
||||
}
|
||||
}
|
||||
|
||||
extension RTMPMuxer: AudioConverterDelegate {
|
||||
// MARK: AudioConverterDelegate
|
||||
func didSetFormatDescription(audio formatDescription: CMFormatDescription?) {
|
||||
guard let formatDescription: CMFormatDescription = formatDescription else {
|
||||
guard let formatDescription = formatDescription else {
|
||||
return
|
||||
}
|
||||
var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.seq.rawValue])
|
||||
|
@ -34,14 +34,14 @@ extension RTMPMuxer: AudioConverterDelegate {
|
|||
}
|
||||
|
||||
func sampleOutput(audio data: UnsafeMutableAudioBufferListPointer, presentationTimeStamp: CMTime) {
|
||||
let delta: Double = (audioTimestamp == CMTime.zero ? 0 : presentationTimeStamp.seconds - audioTimestamp.seconds) * 1000
|
||||
let delta: Double = (audioTimeStamp == CMTime.zero ? 0 : presentationTimeStamp.seconds - audioTimeStamp.seconds) * 1000
|
||||
guard let bytes = data[0].mData, 0 < data[0].mDataByteSize && 0 <= delta else {
|
||||
return
|
||||
}
|
||||
var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.raw.rawValue])
|
||||
buffer.append(bytes.assumingMemoryBound(to: UInt8.self), count: Int(data[0].mDataByteSize))
|
||||
delegate?.sampleOutput(audio: buffer, withTimestamp: delta, muxer: self)
|
||||
audioTimestamp = presentationTimeStamp
|
||||
audioTimeStamp = presentationTimeStamp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -49,8 +49,8 @@ extension RTMPMuxer: VideoEncoderDelegate {
|
|||
// MARK: VideoEncoderDelegate
|
||||
func didSetFormatDescription(video formatDescription: CMFormatDescription?) {
|
||||
guard
|
||||
let formatDescription: CMFormatDescription = formatDescription,
|
||||
let avcC: Data = AVCConfigurationRecord.getData(formatDescription) else {
|
||||
let formatDescription = formatDescription,
|
||||
let avcC = AVCConfigurationRecord.getData(formatDescription) else {
|
||||
return
|
||||
}
|
||||
var buffer = Data([FLVFrameType.key.rawValue << 4 | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.seq.rawValue, 0, 0, 0])
|
||||
|
@ -68,7 +68,7 @@ extension RTMPMuxer: VideoEncoderDelegate {
|
|||
} else {
|
||||
compositionTime = Int32((presentationTimeStamp.seconds - decodeTimeStamp.seconds) * 1000)
|
||||
}
|
||||
let delta: Double = (videoTimestamp == CMTime.zero ? 0 : decodeTimeStamp.seconds - videoTimestamp.seconds) * 1000
|
||||
let delta: Double = (videoTimeStamp == CMTime.zero ? 0 : decodeTimeStamp.seconds - videoTimeStamp.seconds) * 1000
|
||||
guard let data = sampleBuffer.dataBuffer?.data, 0 <= delta else {
|
||||
return
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ extension RTMPMuxer: VideoEncoderDelegate {
|
|||
buffer.append(contentsOf: compositionTime.bigEndian.data[1..<4])
|
||||
buffer.append(data)
|
||||
delegate?.sampleOutput(video: buffer, withTimestamp: delta, muxer: self)
|
||||
videoTimestamp = decodeTimeStamp
|
||||
videoTimeStamp = decodeTimeStamp
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,85 +0,0 @@
|
|||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
final class AudioBuffer {
|
||||
enum AudioBufferError: Error {
|
||||
case notReady
|
||||
}
|
||||
|
||||
static let numSamples = 1024
|
||||
|
||||
let input: UnsafeMutableAudioBufferListPointer
|
||||
|
||||
var isReady: Bool {
|
||||
numSamples == index
|
||||
}
|
||||
|
||||
var maxLength: Int {
|
||||
numSamples * bytesPerFrame
|
||||
}
|
||||
|
||||
let listSize: Int
|
||||
|
||||
private var index = 0
|
||||
private var buffers: [Data]
|
||||
private let numSamples: Int
|
||||
private let bytesPerFrame: Int
|
||||
private let maximumBuffers: Int
|
||||
private(set) var presentationTimeStamp: CMTime = .invalid
|
||||
|
||||
deinit {
|
||||
input.unsafeMutablePointer.deallocate()
|
||||
}
|
||||
|
||||
init(_ inSourceFormat: AudioStreamBasicDescription, numSamples: Int = AudioBuffer.numSamples) {
|
||||
let nonInterleaved = inSourceFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved != 0
|
||||
self.numSamples = nonInterleaved ? numSamples / 2 : numSamples
|
||||
bytesPerFrame = Int(inSourceFormat.mBytesPerFrame)
|
||||
maximumBuffers = nonInterleaved ? Int(inSourceFormat.mChannelsPerFrame) : 1
|
||||
listSize = AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers)
|
||||
buffers = .init(repeating: .init(repeating: 0, count: self.numSamples * bytesPerFrame), count: maximumBuffers)
|
||||
input = AudioBufferList.allocate(maximumBuffers: maximumBuffers)
|
||||
input.unsafeMutablePointer.pointee.mNumberBuffers = UInt32(maximumBuffers)
|
||||
for i in 0..<maximumBuffers {
|
||||
input[i].mNumberChannels = nonInterleaved ? 1 : inSourceFormat.mChannelsPerFrame
|
||||
input[i].mDataByteSize = UInt32(buffers[i].count)
|
||||
buffers[i].withUnsafeMutableBytes { pointer in
|
||||
input[i].mData = pointer.baseAddress
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func write(_ sampleBuffer: CMSampleBuffer, offset: Int) throws -> Int {
|
||||
guard let data = sampleBuffer.dataBuffer?.data, !isReady else {
|
||||
throw AudioBufferError.notReady
|
||||
}
|
||||
if presentationTimeStamp == .invalid {
|
||||
let offsetTimeStamp: CMTime = offset == 0 ? .zero : CMTime(value: CMTimeValue(offset), timescale: sampleBuffer.presentationTimeStamp.timescale)
|
||||
presentationTimeStamp = CMTimeAdd(sampleBuffer.presentationTimeStamp, offsetTimeStamp)
|
||||
}
|
||||
let numSamples = min(self.numSamples - index, sampleBuffer.numSamples - offset)
|
||||
for i in 0..<maximumBuffers {
|
||||
buffers[i].replaceSubrange(index * bytesPerFrame..<index * bytesPerFrame + numSamples * bytesPerFrame, with: data.advanced(by: offset * bytesPerFrame + numSamples * bytesPerFrame * i))
|
||||
}
|
||||
index += numSamples
|
||||
return numSamples
|
||||
}
|
||||
|
||||
func muted() {
|
||||
for i in 0..<maximumBuffers {
|
||||
buffers[i].resetBytes(in: 0...)
|
||||
}
|
||||
}
|
||||
|
||||
func clear() {
|
||||
presentationTimeStamp = .invalid
|
||||
index = 0
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioBuffer: CustomDebugStringConvertible {
|
||||
// MARK: CustomDebugStringConvertible
|
||||
var debugDescription: String {
|
||||
Mirror(reflecting: self).debugDescription
|
||||
}
|
||||
}
|
|
@ -55,6 +55,40 @@ final class AudioConverterTests: XCTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testEncoderCMSampleBuffer8000_960() {
|
||||
let encoder: AudioConverter = AudioConverter()
|
||||
encoder.delegate = self
|
||||
encoder.startRunning()
|
||||
for _ in 0..<10 {
|
||||
if let sampleBuffer: CMSampleBuffer = SinWaveUtil.createCMSampleBuffer(8000.0, numSamples: 960) {
|
||||
encoder.encodeSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testEncoderCMSampleBuffer8000_1224() {
|
||||
let encoder: AudioConverter = AudioConverter()
|
||||
encoder.delegate = self
|
||||
encoder.startRunning()
|
||||
for _ in 0..<10 {
|
||||
if let sampleBuffer: CMSampleBuffer = SinWaveUtil.createCMSampleBuffer(44100.0, numSamples: 1224) {
|
||||
encoder.encodeSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testEncoderCMSampleBuffer8000_numSamples() {
|
||||
let numSamples: [Int] = [1024, 1024, 1028, 1024, 1028, 1028, 962, 962, 960, 2237, 2236]
|
||||
let encoder: AudioConverter = AudioConverter()
|
||||
encoder.delegate = self
|
||||
encoder.startRunning()
|
||||
for numSample in numSamples {
|
||||
if let sampleBuffer: CMSampleBuffer = SinWaveUtil.createCMSampleBuffer(44100.0, numSamples: numSample) {
|
||||
encoder.encodeSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension AudioConverterTests: AudioConverterDelegate {
|
||||
|
|
Loading…
Reference in New Issue