Refactoring and Code style. #95

This commit is contained in:
shogo4405 2016-09-04 22:48:58 +09:00
parent 4b32d74f75
commit db7a9d2195
79 changed files with 1424 additions and 1214 deletions

View File

@ -1,2 +1,2 @@
github "DaveWoodCom/XCGLogger" ~> 3.5
github "DaveWoodCom/XCGLogger" ~> 4.0.0

View File

@ -18,7 +18,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate {
return controller
}()
private func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: Any]?) -> Bool {
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: Any]?) -> Bool {
XCGLogger.defaultInstance().outputLogLevel = .info
XCGLogger.defaultInstance().xcodeColorsEnabled = true

View File

@ -1,5 +1,15 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
@ -35,6 +45,16 @@
"filename" : "Icon-60@3x.png",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",

View File

@ -2,6 +2,11 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>

View File

@ -93,24 +93,6 @@ final class LiveViewController: UIViewController {
fpsControl.addTarget(self, action: #selector(LiveViewController.onFPSValueChanged(_:)), for: .valueChanged)
effectSegmentControl.addTarget(self, action: #selector(LiveViewController.onEffectValueChanged(_:)), for: .valueChanged)
/*
navigationItem.leftBarButtonItem =
UIBarButtonItem(title: "Preference", style: .Plain, target: self, action: "showPreference:")
sharedObject = RTMPSharedObject.getRemote("test", remotePath: Preference.defaultInstance.uri!, persistence: false)
*/
/*
httpStream = HTTPStream()
//httpStream.attachScreen(ScreenCaptureSession())
httpStream.syncOrientation = true
httpStream.attachCamera(AVMixer.deviceWithPosition(.Back))
httpStream.publish("hello")
httpService = HTTPService(domain: "", type: "_http._tcp", name: "lf", port: 8080)
httpService.startRunning()
httpService.addHTTPStream(httpStream)
*/
navigationItem.rightBarButtonItems = [
UIBarButtonItem(title: "Torch", style: .plain, target: self, action: #selector(LiveViewController.toggleTorch(_:))),
UIBarButtonItem(title: "Camera", style: .plain, target: self, action: #selector(LiveViewController.rotateCamera(_:)))
@ -119,10 +101,9 @@ final class LiveViewController: UIViewController {
rtmpStream = RTMPStream(rtmpConnection: rtmpConnection)
rtmpStream.syncOrientation = true
rtmpStream.attachAudio(AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
rtmpStream.attachCamera(DeviceUtil.deviceWithPosition(.back))
rtmpStream.attach(audio: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
rtmpStream.attach(camera: DeviceUtil.device(withPosition: .back))
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: NSKeyValueObservingOptions.new, context: nil)
//rtmpStream.attachScreen(ScreenCaptureSession())
rtmpStream.captureSettings = [
"sessionPreset": AVCaptureSessionPreset1280x720,
@ -145,7 +126,7 @@ final class LiveViewController: UIViewController {
videoBitrateSlider.value = Float(RTMPStream.defaultVideoBitrate) / 1024
audioBitrateSlider.value = Float(RTMPStream.defaultAudioBitrate) / 1024
lfView.attachStream(rtmpStream)
lfView.attach(stream: rtmpStream)
view.addSubview(lfView)
view.addSubview(touchView)
@ -179,7 +160,7 @@ final class LiveViewController: UIViewController {
func rotateCamera(_ sender:UIBarButtonItem) {
let position:AVCaptureDevicePosition = currentPosition == .back ? .front : .back
rtmpStream.attachCamera(DeviceUtil.deviceWithPosition(position))
rtmpStream.attach(camera: DeviceUtil.device(withPosition: position))
currentPosition = position
}
@ -206,7 +187,7 @@ final class LiveViewController: UIViewController {
rtmpStream.videoSettings["bitrate"] = slider.value * 1024
}
if (slider == zoomSlider) {
rtmpStream.rampToVideoZoomFactor(CGFloat(slider.value), withRate: 5.0)
rtmpStream.ramp(toVideoZoomFactor: CGFloat(slider.value), withRate: 5.0)
}
}
@ -214,12 +195,12 @@ final class LiveViewController: UIViewController {
if (sender.isSelected) {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.removeEventListener(type: Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
sender.setTitle("", for: UIControlState())
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
rtmpConnection.addEventListener(type: Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(withCommand: Preference.defaultInstance.uri!)
sender.setTitle("", for: UIControlState())
}
sender.isSelected = !sender.isSelected
@ -229,8 +210,8 @@ final class LiveViewController: UIViewController {
let e:Event = Event.from(notification)
if let data:ASObject = e.data as? ASObject , let code:String = data["code"] as? String {
switch code {
case RTMPConnection.Code.ConnectSuccess.rawValue:
rtmpStream!.publish(Preference.defaultInstance.streamName!)
case RTMPConnection.Code.connectSuccess.rawValue:
rtmpStream!.publish(withName: Preference.defaultInstance.streamName!)
// sharedObject!.connect(rtmpConnection)
default:
break
@ -263,25 +244,23 @@ final class LiveViewController: UIViewController {
func onEffectValueChanged(_ segment:UISegmentedControl) {
if let currentEffect:VisualEffect = currentEffect {
rtmpStream.unregisterEffect(video: currentEffect)
let _:Bool = rtmpStream.unregisterEffect(video: currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
rtmpStream.registerEffect(video: currentEffect!)
let _:Bool = rtmpStream.registerEffect(video: currentEffect!)
case 2:
currentEffect = PronamaEffect()
rtmpStream.registerEffect(video: currentEffect!)
let _:Bool = rtmpStream.registerEffect(video: currentEffect!)
default:
break
}
}
/*
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [String : Any]?, context: UnsafeMutableRawPointer?) {
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if (Thread.isMainThread) {
currentFPSLabel.text = "\(rtmpStream.currentFPS)"
}
}
*/
}

View File

@ -11,7 +11,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
XCGLogger.defaultInstance().xcodeColorsEnabled = true
XCGLogger.defaultInstance().setup(
.info,
.verbose,
showThreadName: true, showLogLevel: true, showFileNames: true, showLineNumbers: true, writeToFile: nil, fileLogLevel: nil)
let viewController:LiveViewController = LiveViewController()

View File

@ -2,6 +2,11 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
@ -16,14 +21,14 @@
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSMinimumSystemVersion</key>
<string>$(MACOSX_DEPLOYMENT_TARGET)</string>
<key>LSApplicationCategoryType</key>
<string></string>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2016年 Shogo Endo. All rights reserved.</string>
<key>NSMainNibFile</key>

View File

@ -3,7 +3,7 @@ import Cocoa
import AVFoundation
final class LiveViewController: NSViewController {
static let defaultURL:String = "rtmp://test:test@localhost:1935/live"
static let defaultURL:String = "rtmp://test:test@192.168.179.4:1935/live"
var enabledSharedObject:Bool = false
var rtmpConnection:RTMPConnection = RTMPConnection()
@ -85,16 +85,16 @@ final class LiveViewController: NSViewController {
audioPopUpButton.target = self
cameraPopUpButton.target = self
rtmpStream = RTMPStream(rtmpConnection: rtmpConnection)
rtmpStream.attachAudio(
DeviceUtil.deviceWithLocalizedName(audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio)
rtmpStream.attach(
audio: DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio)
)
rtmpStream.attachCamera(
DeviceUtil.deviceWithLocalizedName(cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo)
rtmpStream.attach(
camera: DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo)
)
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
publishButton.target = self
lfView.attachStream(stream: rtmpStream)
lfView.attach(stream: rtmpStream)
view.addSubview(lfView)
view.addSubview(fpsPopUpButton)
@ -123,11 +123,11 @@ final class LiveViewController: NSViewController {
segmentedControl.isEnabled = false
switch segmentedControl.selectedSegment {
case 0:
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(urlField.stringValue)
rtmpConnection.addEventListener(type: Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(withCommand: urlField.stringValue)
case 1:
httpStream.publish("hello")
httpService.addHTTPStream(httpStream)
httpStream.publish(withName: "hello")
httpService.add(stream: httpStream)
httpService.startRunning()
default:
break
@ -139,12 +139,12 @@ final class LiveViewController: NSViewController {
segmentedControl.isEnabled = true
switch segmentedControl.selectedSegment {
case 0:
rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.removeEventListener(type: Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.close()
case 1:
httpService.removeHTTPStream(httpStream)
httpService.remove(stream: httpStream)
httpService.stopRunning()
httpStream.publish(nil)
httpStream.publish(withName: nil)
default:
break
}
@ -154,16 +154,16 @@ final class LiveViewController: NSViewController {
func modeChanged(_ sender:NSSegmentedControl) {
switch sender.selectedSegment {
case 0:
httpStream.attachAudio(nil)
httpStream.attachCamera(nil)
rtmpStream.attachAudio(DeviceUtil.deviceWithLocalizedName(audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
rtmpStream.attachCamera(DeviceUtil.deviceWithLocalizedName(cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
httpStream.attach(audio: nil)
httpStream.attach(camera: nil)
rtmpStream.attach(audio: DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
rtmpStream.attach(camera: DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
urlField.stringValue = LiveViewController.defaultURL
case 1:
rtmpStream.attachAudio(nil)
rtmpStream.attachCamera(nil)
httpStream.attachAudio(DeviceUtil.deviceWithLocalizedName(audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
httpStream.attachCamera(DeviceUtil.deviceWithLocalizedName(cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
rtmpStream.attach(audio: nil)
rtmpStream.attach(camera: nil)
httpStream.attach(audio: DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
httpStream.attach(camera: DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
urlField.stringValue = "http://{ipAddress}:8080/hello/playlist.m3u8"
default:
break
@ -171,39 +171,38 @@ final class LiveViewController: NSViewController {
}
func selectAudio(_ sender:AnyObject) {
let device:AVCaptureDevice? = DeviceUtil.deviceWithLocalizedName(
let device:AVCaptureDevice? = DeviceUtil.device(withLocalizedName:
audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio
)
switch segmentedControl.selectedSegment {
case 0:
rtmpStream.attachAudio(device)
httpStream.attachAudio(nil)
rtmpStream.attach(audio: device)
httpStream.attach(audio: nil)
case 1:
rtmpStream.attachAudio(nil)
httpStream.attachAudio(device)
rtmpStream.attach(audio: nil)
httpStream.attach(audio: device)
default:
break
}
}
func selectCamera(_ sender:AnyObject) {
let device:AVCaptureDevice? = DeviceUtil.deviceWithLocalizedName(
let device:AVCaptureDevice? = DeviceUtil.device(withLocalizedName:
cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo
)
switch segmentedControl.selectedSegment {
case 0:
rtmpStream.attachCamera(device)
httpStream.attachCamera(nil)
rtmpStream.attach(camera: device)
httpStream.attach(camera: nil)
case 1:
rtmpStream.attachCamera(nil)
httpStream.attachCamera(device)
rtmpStream.attach(camera: nil)
httpStream.attach(camera: device)
default:
break
}
}
/*
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [String : Any]?, context: UnsafeMutableRawPointer?) {
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
guard let keyPath:String = keyPath , Thread.isMainThread else {
return
}
@ -214,7 +213,6 @@ final class LiveViewController: NSViewController {
break
}
}
*/
func selectFPS(_ sender:AnyObject) {
let value:String = fpsPopUpButton.itemTitles[fpsPopUpButton.indexOfSelectedItem]
@ -226,10 +224,10 @@ final class LiveViewController: NSViewController {
let e:Event = Event.from(notification)
if let data:ASObject = e.data as? ASObject , let code:String = data["code"] as? String {
switch code {
case RTMPConnection.Code.ConnectSuccess.rawValue:
rtmpStream!.publish("live")
case RTMPConnection.Code.connectSuccess.rawValue:
rtmpStream!.publish(withName: "live")
if (enabledSharedObject) {
sharedObject = RTMPSharedObject.getRemote("test", remotePath: urlField.stringValue, persistence: false)
sharedObject = RTMPSharedObject.getRemote(withName: "test", remotePath: urlField.stringValue, persistence: false)
sharedObject.connect(rtmpConnection)
sharedObject.setProperty("Hello", "World!!")
}

View File

@ -3,7 +3,7 @@ import Foundation
import AVFoundation
final class AudioUtil {
fileprivate init() {
private init() {
}
static func getInputGain() -> Float32 {

View File

@ -10,8 +10,8 @@ open class GLLFView: GLKView {
open var videoGravity:String = AVLayerVideoGravityResizeAspect
var orientation:AVCaptureVideoOrientation = .portrait
var position:AVCaptureDevicePosition = .front {
internal var orientation:AVCaptureVideoOrientation = .portrait
internal var position:AVCaptureDevicePosition = .front {
didSet {
switch position {
case .front:
@ -26,9 +26,9 @@ open class GLLFView: GLKView {
fileprivate var ciContext:CIContext!
fileprivate var displayImage:CIImage?
fileprivate weak var currentStream:Stream? {
fileprivate weak var currentStream:NetStream? {
didSet {
guard let oldValue:Stream = oldValue else {
guard let oldValue:NetStream = oldValue else {
return
}
oldValue.mixer.videoIO.drawable = nil
@ -62,8 +62,8 @@ open class GLLFView: GLKView {
ciContext.draw(displayImage, in: inRect, from: fromRect)
}
open func attachStream(_ stream:Stream?) {
if let stream:Stream = stream {
open func attach(stream:NetStream?) {
if let stream:NetStream = stream {
stream.mixer.videoIO.drawable = self
}
currentStream = stream
@ -71,11 +71,11 @@ open class GLLFView: GLKView {
}
// MARK: - StreamDrawable
extension GLLFView: StreamDrawable {
func render(_ image: CIImage, toCVPixelBuffer: CVPixelBuffer) {
extension GLLFView: NetStreamDrawable {
internal func render(image: CIImage, to toCVPixelBuffer: CVPixelBuffer) {
ciContext.render(image, to: toCVPixelBuffer)
}
func drawImage(_ image:CIImage) {
internal func draw(image:CIImage) {
displayImage = image
DispatchQueue.main.async {
self.setNeedsDisplay()

View File

@ -8,13 +8,13 @@ open class LFView: UIView {
return AVCaptureVideoPreviewLayer.self
}
open var videoGravity:String = AVLayerVideoGravityResizeAspect {
public var videoGravity:String = AVLayerVideoGravityResizeAspect {
didSet {
layer.setValue(videoGravity, forKey: "videoGravity")
}
}
var orientation:AVCaptureVideoOrientation = .portrait {
internal var orientation:AVCaptureVideoOrientation = .portrait {
didSet {
guard let connection:AVCaptureConnection = layer.value(forKey: "connection") as? AVCaptureConnection else {
return
@ -24,7 +24,16 @@ open class LFView: UIView {
}
}
}
var position:AVCaptureDevicePosition = .front
internal var position:AVCaptureDevicePosition = .front
private weak var currentStream:NetStream? {
didSet {
guard let oldValue:NetStream = oldValue else {
return
}
oldValue.mixer.videoIO.drawable = nil
}
}
public override init(frame:CGRect) {
super.init(frame:frame)
@ -41,26 +50,16 @@ open class LFView: UIView {
layer.backgroundColor = LFView.defaultBackgroundColor.cgColor
}
fileprivate weak var currentStream:Stream? {
didSet {
guard let oldValue:Stream = oldValue else {
return
}
oldValue.mixer.videoIO.drawable = nil
}
}
open func attachStream(_ stream:Stream?) {
open func attach(stream:NetStream?) {
layer.setValue(stream?.mixer.session, forKey: "session")
stream?.mixer.videoIO.drawable = self
currentStream = stream
}
}
// MARK: - StreamDrawable
extension LFView: StreamDrawable {
func render(_ image: CIImage, toCVPixelBuffer: CVPixelBuffer) {
extension LFView: NetStreamDrawable {
internal func render(image: CIImage, to toCVPixelBuffer: CVPixelBuffer) {
}
func drawImage(_ image:CIImage) {
internal func draw(image:CIImage) {
}
}

View File

@ -5,8 +5,8 @@ import AVFoundation
// MARK: ScreenCaptureOutputPixelBufferDelegate
public protocol ScreenCaptureOutputPixelBufferDelegate: class {
func didSetSize(_ size:CGSize)
func pixelBufferOutput(_ pixelBuffer:CVPixelBuffer, timestamp:CMTime)
func didSet(size:CGSize)
func output(pixelBuffer:CVPixelBuffer, withTimestamp:CMTime)
}
// MARK: -
@ -31,22 +31,11 @@ public final class ScreenCaptureSession: NSObject {
public weak var delegate:ScreenCaptureOutputPixelBufferDelegate?
internal(set) var running:Bool = false
fileprivate var context:CIContext = {
if let context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)]) {
logger.info("cicontext use hardware renderer")
return context
}
logger.info("cicontext use software renderer")
return CIContext()
}()
fileprivate var context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)])
fileprivate let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1)
fileprivate let lockQueue:DispatchQueue = {
var queue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.ScreenCaptureSession.lock", attributes: []
)
queue.setTarget(queue: DispatchQueue.global(priority: DispatchQueue.GlobalQueuePriority.high))
return queue
}()
fileprivate let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.ScreenCaptureSession.lock", qos: DispatchQoS.userInteractive, attributes: []
)
fileprivate var colorSpace:CGColorSpace!
fileprivate var displayLink:CADisplayLink!
@ -55,7 +44,7 @@ public final class ScreenCaptureSession: NSObject {
guard size != oldValue else {
return
}
delegate?.didSetSize(CGSize(width: size.width * scale, height: size.height * scale))
delegate?.didSet(size: CGSize(width: size.width * scale, height: size.height * scale))
pixelBufferPool = nil
}
}
@ -115,7 +104,7 @@ public final class ScreenCaptureSession: NSObject {
let image:UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
context.render(CIImage(cgImage: image.cgImage!), to: pixelBuffer!)
delegate?.pixelBufferOutput(pixelBuffer!, timestamp: CMTimeMakeWithSeconds(displayLink.timestamp, 1000))
delegate?.output(pixelBuffer: pixelBuffer!, withTimestamp: CMTimeMakeWithSeconds(displayLink.timestamp, 1000))
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}
}

View File

@ -3,7 +3,7 @@ import OpenGL.GL3
import Foundation
import AVFoundation
public class GLLFView: NSOpenGLView {
open class GLLFView: NSOpenGLView {
static let pixelFormatAttributes: [NSOpenGLPixelFormatAttribute] = [
UInt32(NSOpenGLPFAAccelerated),
UInt32(NSOpenGLPFANoRecovery),
@ -12,7 +12,7 @@ public class GLLFView: NSOpenGLView {
UInt32(0)
]
override public class func defaultPixelFormat() -> NSOpenGLPixelFormat {
override open class func defaultPixelFormat() -> NSOpenGLPixelFormat {
guard let pixelFormat:NSOpenGLPixelFormat = NSOpenGLPixelFormat(
attributes: GLLFView.pixelFormatAttributes) else {
return NSOpenGLPixelFormat()
@ -21,15 +21,15 @@ public class GLLFView: NSOpenGLView {
}
public var videoGravity:String! = AVLayerVideoGravityResizeAspect
var orientation:AVCaptureVideoOrientation = .portrait
var position:AVCaptureDevicePosition = .front
internal var orientation:AVCaptureVideoOrientation = .portrait
internal var position:AVCaptureDevicePosition = .front
fileprivate var displayImage:CIImage!
fileprivate var ciContext:CIContext!
fileprivate var originalFrame:CGRect = CGRect.zero
fileprivate var scale:CGRect = CGRect.zero
fileprivate weak var currentStream:Stream?
fileprivate weak var currentStream:NetStream?
public override func prepareOpenGL() {
open override func prepareOpenGL() {
if let openGLContext:NSOpenGLContext = openGLContext {
ciContext = CIContext(
cglContext: openGLContext.cglContextObj!,
@ -56,7 +56,7 @@ public class GLLFView: NSOpenGLView {
originalFrame = frame
}
public override func draw(_ dirtyRect: NSRect) {
open override func draw(_ dirtyRect: NSRect) {
guard
let image:CIImage = displayImage,
let glContext:NSOpenGLContext = openGLContext else {
@ -79,7 +79,7 @@ public class GLLFView: NSOpenGLView {
glFlush()
}
override public func reshape() {
override open func reshape() {
let rect:CGRect = frame
scale = CGRect(x: 0, y: 0, width: originalFrame.size.width / rect.size.width, height: originalFrame.size.height / rect.size.height)
glViewport(0, 0, Int32(rect.width), Int32(rect.height))
@ -90,24 +90,24 @@ public class GLLFView: NSOpenGLView {
glLoadIdentity()
}
public func attachStream(stream: Stream?) {
if let currentStream:Stream = currentStream {
public func attach(stream: NetStream?) {
if let currentStream:NetStream = currentStream {
currentStream.mixer.videoIO.drawable = nil
}
if let stream:Stream = stream {
if let stream:NetStream = stream {
stream.mixer.videoIO.drawable = self
}
currentStream = stream
}
}
extension GLLFView: StreamDrawable {
// MARK: - StreamDrawable
func render(_ image: CIImage, toCVPixelBuffer: CVPixelBuffer) {
extension GLLFView: NetStreamDrawable {
// MARK: NetStreamDrawable
internal func render(image: CIImage, to toCVPixelBuffer: CVPixelBuffer) {
ciContext.render(image, to: toCVPixelBuffer)
}
func drawImage(_ image:CIImage) {
internal func draw(image:CIImage) {
displayImage = image
DispatchQueue.main.async {
self.needsDisplay = true

View File

@ -1,10 +1,10 @@
import Foundation
import AVFoundation
public class LFView: NSView {
open class LFView: NSView {
public static var defaultBackgroundColor:NSColor = NSColor.black
var position:AVCaptureDevicePosition = .front {
internal var position:AVCaptureDevicePosition = .front {
didSet {
/*
let when:dispatch_time_t = DispatchTime.now(dispatch_time_t(DISPATCH_TIME_NOW), Int64(0.1 * Double(NSEC_PER_SEC)))
@ -14,7 +14,7 @@ public class LFView: NSView {
*/
}
}
var orientation:AVCaptureVideoOrientation = .portrait
internal var orientation:AVCaptureVideoOrientation = .portrait
public var videoGravity:String = AVLayerVideoGravityResizeAspect {
didSet {
@ -22,9 +22,9 @@ public class LFView: NSView {
}
}
private weak var currentStream:Stream? {
private weak var currentStream:NetStream? {
didSet {
guard let oldValue:Stream = oldValue else {
guard let oldValue:NetStream = oldValue else {
return
}
oldValue.mixer.videoIO.drawable = nil
@ -40,24 +40,24 @@ public class LFView: NSView {
super.init(coder: coder)
}
public override func awakeFromNib() {
open override func awakeFromNib() {
wantsLayer = true
layer = AVCaptureVideoPreviewLayer()
layer?.backgroundColor = LFView.defaultBackgroundColor.cgColor
layer?.setValue(videoGravity, forKey: "videoGravity")
}
public func attachStream(stream:Stream?) {
public func attach(stream:NetStream?) {
layer?.setValue(stream?.mixer.session, forKey: "session")
stream?.mixer.videoIO.drawable = self
currentStream = stream
}
}
// MARK: - StreamDrawable
extension LFView: StreamDrawable {
func render(_ image: CIImage, toCVPixelBuffer: CVPixelBuffer) {
extension LFView: NetStreamDrawable {
// MARK: NetStreamDrawable
internal func render(image:CIImage, to toCVPixelBuffer:CVPixelBuffer) {
}
func drawImage(_ image:CIImage) {
internal func draw(image:CIImage) {
}
}

View File

@ -2,7 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
use_frameworks!
def import_pods
pod 'XCGLogger', '~> 3.3'
pod 'XCGLogger', '~> 4.0.0'
end
target 'lf iOS' do

View File

@ -1,12 +1,12 @@
PODS:
- XCGLogger (3.5)
- XCGLogger (3.5.1)
DEPENDENCIES:
- XCGLogger (~> 3.3)
- XCGLogger (~> 3.5.0)
SPEC CHECKSUMS:
XCGLogger: 3a510f8235655d016ca24c52629e1a276fb8103a
XCGLogger: 94829865eea6fcc1f9a49e23bdc9f120a6848f30
PODFILE CHECKSUM: f1758dea822e4d5049e634e2e645bef2ddc5872e
PODFILE CHECKSUM: 1e1fc8cb6db07b0a49b430f0297fe05501abf785
COCOAPODS: 1.0.1

View File

@ -1,12 +1,12 @@
import Foundation
import AVFoundation
// MARK: AudioEncoderDelegate
protocol AudioEncoderDelegate: class {
func didSetFormatDescription(audio formatDescription:CMFormatDescription?)
func sampleOutput(audio sampleBuffer: CMSampleBuffer)
}
// MARK: -
/**
- seealse:
- https://developer.apple.com/library/ios/technotes/tn2236/_index.html
@ -248,8 +248,8 @@ final class AACEncoder: NSObject {
}
}
// MARK: Encoder
extension AACEncoder: Runnable {
// MARK: Runnable
func startRunning() {
lockQueue.async {
self.running = true
@ -270,8 +270,8 @@ extension AACEncoder: Runnable {
}
}
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
extension AACEncoder: AVCaptureAudioDataOutputSampleBufferDelegate {
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
encodeSampleBuffer(sampleBuffer)
}

View File

@ -4,7 +4,6 @@ import AVFoundation
import VideoToolbox
import CoreFoundation
// MARK: VideoDecoderDelegate
protocol VideoDecoderDelegate: class {
func sampleOutput(video sampleBuffer: CMSampleBuffer)
}

View File

@ -3,7 +3,6 @@ import AVFoundation
import VideoToolbox
import CoreFoundation
// MARK: VideoEncoderDelegate
protocol VideoEncoderDelegate: class {
func didSetFormatDescription(video formatDescription:CMFormatDescription?)
func sampleOutput(video sampleBuffer: CMSampleBuffer)
@ -285,8 +284,8 @@ final class AVCEncoder: NSObject {
#endif
}
// MARK: Encoder
extension AVCEncoder: Runnable {
// MARK: Runnable
func startRunning() {
lockQueue.async {
self.running = true

View File

@ -3,7 +3,12 @@ import CoreImage
extension CMSampleBuffer {
var dependsOnOthers:Bool {
return false
guard
let attachments:CFArray = CMSampleBufferGetSampleAttachmentsArray(self, false) else {
return false
}
let attachment:[NSObject: AnyObject] = unsafeBitCast(CFArrayGetValueAtIndex(attachments, 0), to: CFDictionary.self) as [NSObject : AnyObject]
return attachment["DependsOnOthers" as NSObject] as! Bool
}
var dataBuffer:CMBlockBuffer? {
get {
@ -37,8 +42,8 @@ extension CMSampleBuffer {
}
}
// MARK: BytesConvertible
extension CMSampleBuffer: BytesConvertible {
// MARK: BytesConvertible
var bytes:[UInt8] {
get {
guard let buffer:CMBlockBuffer = dataBuffer else {

View File

@ -1,5 +1,13 @@
import Foundation
extension Data {
var bytes:[UInt8] {
return self.withUnsafeBytes {
[UInt8](UnsafeBufferPointer(start: $0, count: self.count))
}
}
}
extension URL {
var absoluteWithoutAuthenticationString:String {
var target:String = ""

View File

@ -24,7 +24,7 @@ extension Mirror {
// MARK: -
extension ExpressibleByIntegerLiteral {
var bytes:[UInt8] {
var data = [UInt8](repeating: 0, count: MemoryLayout<`Self`>.size)
var data:[UInt8] = [UInt8](repeating: 0, count: MemoryLayout<`Self`>.size)
data.withUnsafeMutableBufferPointer {
UnsafeMutableRawPointer($0.baseAddress!).storeBytes(of: self, as: Self.self)
}

View File

@ -1,6 +1,5 @@
import Foundation
// MARK: HTTPRequestConvertible
protocol HTTPRequestConvertible: BytesConvertible {
var uri:String { get set }
var method:String { get set }
@ -8,30 +7,12 @@ protocol HTTPRequestConvertible: BytesConvertible {
var headerFields:[String: String] { get set }
}
// MARK: -
struct HTTPRequest: HTTPRequestConvertible {
static let separator:UInt8 = 0x0a
var uri:String = "/"
var method:String = ""
var version:String = HTTPVersion.version11.description
var headerFields:[String: String] = [:]
init() {
}
init?(bytes:[UInt8]) {
self.bytes = bytes
}
}
// MARK: -
extension HTTPRequestConvertible {
var description:String {
return Mirror(reflecting: self).description
}
var bytes:[UInt8] {
get {
var lines:[String] = ["\(method) \(uri) \(version)"]
@ -70,3 +51,19 @@ extension HTTPRequestConvertible {
}
}
// MARK: -
struct HTTPRequest: HTTPRequestConvertible {
static let separator:UInt8 = 0x0a
var uri:String = "/"
var method:String = ""
var version:String = HTTPVersion.version11.description
var headerFields:[String: String] = [:]
init() {
}
init?(bytes:[UInt8]) {
self.bytes = bytes
}
}

View File

@ -1,6 +1,5 @@
import Foundation
// MARK: HTTPResponseConvertible
protocol HTTPResponseConvertible: BytesConvertible, CustomStringConvertible {
var version:String { get set }
var statusCode:String { get set }
@ -8,24 +7,13 @@ protocol HTTPResponseConvertible: BytesConvertible, CustomStringConvertible {
var body:[UInt8] { get set }
}
// MARK: -
struct HTTPResponse: HTTPResponseConvertible {
static let separator:[UInt8] = [0x0d, 0x0a, 0x0d, 0x0a]
var version:String = HTTPVersion.version11.rawValue
var statusCode:String = ""
var headerFields:[String: String] = [:]
var body:[UInt8] = []
}
// MARK: -
extension HTTPResponseConvertible {
var description:String {
internal var description:String {
return Mirror(reflecting: self).description
}
var bytes:[UInt8] {
internal var bytes:[UInt8] {
get {
var lines:[String] = []
lines.append("\(version) \(statusCode)")
@ -37,31 +25,40 @@ extension HTTPResponseConvertible {
set {
var count:Int = 0
var lines:[String] = []
let bytes:[ArraySlice<UInt8>] = newValue.split(separator: HTTPRequest.separator)
for i in 0..<bytes.count {
count += bytes[i].count + 1
guard let line:String = String(bytes: Array(bytes[i]), encoding: String.Encoding.utf8)
, line != "\r" else {
break
break
}
lines.append(line.trimmingCharacters(in: CharacterSet.newlines))
}
guard let first:[String] = lines.first?.components(separatedBy: " ") else {
return
}
version = first[0]
statusCode = first[1]
for i in 1..<lines.count {
let pairs:[String] = lines[i].components(separatedBy: ": ")
headerFields[pairs[0]] = pairs[1]
}
body = Array(newValue[count..<newValue.count])
}
}
}
// MARK: -
struct HTTPResponse: HTTPResponseConvertible {
static let separator:[UInt8] = [0x0d, 0x0a, 0x0d, 0x0a]
internal var version:String = HTTPVersion.version11.rawValue
internal var statusCode:String = ""
internal var headerFields:[String: String] = [:]
internal var body:[UInt8] = []
}

View File

@ -1,19 +1,18 @@
import Foundation
// MARK: HTTPVersion
enum HTTPVersion: String {
case version10 = "HTTP/1.0"
case version11 = "HTTP/1.1"
}
// MARK: CustomStringConvertible
extension HTTPVersion: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return rawValue
}
}
// MARK: - HTTPMethod
// MARK: -
enum HTTPMethod: String {
case get = "GET"
case post = "POST"
@ -25,9 +24,9 @@ enum HTTPMethod: String {
case connect = "CONNECT"
}
// MARK: HTTPStatusCode
// MARK: -
enum HTTPStatusCode: Int {
case `continue` = 100
case `continue` = 100
case switchingProtocols = 101
case ok = 200
case created = 201
@ -68,7 +67,7 @@ enum HTTPStatusCode: Int {
case gatewayTimeOut = 504
case httpVersionNotSupported = 505
var message:String {
internal var message:String {
switch self {
case .continue:
return "Continue"
@ -154,9 +153,9 @@ enum HTTPStatusCode: Int {
}
}
// MARK: CustomStringConvertible
extension HTTPStatusCode: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return "\(rawValue) \(message)"
}
}
@ -167,10 +166,10 @@ open class HTTPService: NetService {
static open let defaultPort:Int32 = 8080
static open let defaultDocument:String = "<!DOCTYPE html><html><head><meta charset=\"UTF-8\" /><title>lf</title></head><body>lf</body></html>"
var document:String = HTTPService.defaultDocument
fileprivate(set) var streams:[HTTPStream] = []
internal var document:String = HTTPService.defaultDocument
internal fileprivate(set) var streams:[HTTPStream] = []
open func addHTTPStream(_ stream:HTTPStream) {
open func add(stream:HTTPStream) {
for i in 0..<streams.count {
if (stream.name == streams[i].name) {
return
@ -179,7 +178,7 @@ open class HTTPService: NetService {
streams.append(stream)
}
open func removeHTTPStream(_ stream:HTTPStream) {
open func remove(stream:HTTPStream) {
for i in 0..<streams.count {
if (stream.name == streams[i].name) {
streams.remove(at: i)
@ -188,7 +187,7 @@ open class HTTPService: NetService {
}
}
func get(_ request:HTTPRequest, client:NetClient) {
internal func get(_ request:HTTPRequest, client:NetClient) {
logger.verbose("\(request)")
var response:HTTPResponse = HTTPResponse()
response.headerFields["Connection"] = "close"
@ -229,7 +228,7 @@ open class HTTPService: NetService {
}
}
func client(inputBuffer client:NetClient) {
internal func client(inputBuffer client:NetClient) {
guard let request:HTTPRequest = HTTPRequest(bytes: client.inputBuffer) else {
disconnect(client)
return
@ -243,4 +242,3 @@ open class HTTPService: NetService {
}
}
}

View File

@ -1,14 +1,14 @@
import Foundation
import AVFoundation
open class HTTPStream: Stream {
fileprivate(set) var name:String?
open class HTTPStream: NetStream {
internal fileprivate(set) var name:String?
fileprivate var tsWriter:TSWriter = TSWriter()
open func publish(_ name:String?) {
open func publish(withName:String?) {
lockQueue.async {
if (name == nil) {
self.name = name
if (withName == nil) {
self.name = withName
#if os(iOS)
self.mixer.videoIO.screen?.stopRunning()
#endif
@ -19,7 +19,7 @@ open class HTTPStream: Stream {
self.tsWriter.stopRunning()
return
}
self.name = name
self.name = withName
#if os(iOS)
self.mixer.videoIO.screen?.startRunning()
#endif
@ -31,7 +31,7 @@ open class HTTPStream: Stream {
}
}
func getResource(_ resourceName:String) -> (MIME, String)? {
internal func getResource(_ resourceName:String) -> (MIME, String)? {
let url:URL = URL(fileURLWithPath: resourceName)
guard let name:String = name, 2 <= url.pathComponents.count && url.pathComponents[1] == name else {
return nil

View File

@ -7,15 +7,15 @@ struct M3U {
static let header:String = "#EXTM3U"
static let defaultVersion:Int = 3
var version:Int = M3U.defaultVersion
var mediaList:[M3UMediaInfo] = []
var mediaSequence:Int = 0
var targetDuration:Double = 5
internal var version:Int = M3U.defaultVersion
internal var mediaList:[M3UMediaInfo] = []
internal var mediaSequence:Int = 0
internal var targetDuration:Double = 5
}
// MARK: CustomStringConvertible
extension M3U: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
var lines:[String] = [
"#EXTM3U",
"#EXT-X-VERSION:\(version)",
@ -32,6 +32,6 @@ extension M3U: CustomStringConvertible {
// MARK: -
struct M3UMediaInfo {
var url:URL
var duration:Double
internal var url:URL
internal var duration:Double
}

View File

@ -1,19 +1,14 @@
import Foundation
// MARK: TSReaderDelegate
protocol TSReaderDelegate: class {
func didReadPacketizedElementaryStream(_ data:ElementaryStreamSpecificData, PES:PacketizedElementaryStream)
}
// MARK: -
class TSReader {
weak var delegate:TSReaderDelegate?
internal weak var delegate:TSReaderDelegate?
fileprivate var eof:UInt64 = 0
fileprivate var cursor:Int = 0
fileprivate var fileHandle:FileHandle?
fileprivate(set) var PAT:ProgramAssociationSpecific? {
internal fileprivate(set) var PAT:ProgramAssociationSpecific? {
didSet {
guard let PAT:ProgramAssociationSpecific = PAT else {
return
@ -23,7 +18,7 @@ class TSReader {
}
}
}
fileprivate(set) var PMT:[UInt16: ProgramMapSpecific] = [:] {
internal fileprivate(set) var PMT:[UInt16: ProgramMapSpecific] = [:] {
didSet {
for (_, pmt) in PMT {
for data in pmt.elementaryStreamSpecificData {
@ -32,18 +27,21 @@ class TSReader {
}
}
}
fileprivate(set) var numberOfPackets:Int = 0
internal fileprivate(set) var numberOfPackets:Int = 0
fileprivate var eof:UInt64 = 0
fileprivate var cursor:Int = 0
fileprivate var fileHandle:FileHandle?
fileprivate var dictionaryForPrograms:[UInt16:UInt16] = [:]
fileprivate var dictionaryForESSpecData:[UInt16:ElementaryStreamSpecificData] = [:]
fileprivate var packetizedElementaryStreams:[UInt16:PacketizedElementaryStream] = [:]
init(url:URL) throws {
internal init(url:URL) throws {
fileHandle = try FileHandle(forReadingFrom: url)
eof = fileHandle!.seekToEndOfFile()
}
func read() {
internal func read() {
while (hasNext()) {
guard let packet:TSPacket = next() else {
continue
@ -63,7 +61,7 @@ class TSReader {
}
}
func readPacketizedElementaryStream(_ data:ElementaryStreamSpecificData, packet: TSPacket) {
internal func readPacketizedElementaryStream(_ data:ElementaryStreamSpecificData, packet: TSPacket) {
if (packet.payloadUnitStartIndicator) {
if let PES:PacketizedElementaryStream = packetizedElementaryStreams[packet.PID] {
delegate?.didReadPacketizedElementaryStream(data, PES: PES)
@ -74,16 +72,16 @@ class TSReader {
packetizedElementaryStreams[packet.PID]?.append(packet.payload)
}
func close() {
internal func close() {
fileHandle?.closeFile()
}
}
// MARK: Iterator
extension TSReader: Iterator {
// MARK: Iterator
typealias T = TSPacket
func next() -> TSPacket? {
internal func next() -> TSPacket? {
guard let fileHandle = fileHandle else {
return nil
}
@ -94,14 +92,14 @@ extension TSReader: Iterator {
return TSPacket(data: fileHandle.readData(ofLength: TSPacket.size))
}
func hasNext() -> Bool {
internal func hasNext() -> Bool {
return UInt64(cursor * TSPacket.size) < eof
}
}
// MARK: CustomStringConvertible
extension TSReader: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}

View File

@ -2,15 +2,15 @@ import CoreMedia
import Foundation
class TSWriter {
static let defaultPATPID:UInt16 = 0
static let defaultPMTPID:UInt16 = 4096
static let defaultVideoPID:UInt16 = 256
static let defaultAudioPID:UInt16 = 257
static let defaultSegmentCount:Int = 3
static let defaultSegmentMaxCount:Int = 12
static let defaultSegmentDuration:Double = 2
static internal let defaultPATPID:UInt16 = 0
static internal let defaultPMTPID:UInt16 = 4096
static internal let defaultVideoPID:UInt16 = 256
static internal let defaultAudioPID:UInt16 = 257
static internal let defaultSegmentCount:Int = 3
static internal let defaultSegmentMaxCount:Int = 12
static internal let defaultSegmentDuration:Double = 2
var playlist:String {
internal var playlist:String {
var m3u8:M3U = M3U()
m3u8.targetDuration = segmentDuration
if (sequence <= TSWriter.defaultSegmentMaxCount) {
@ -22,20 +22,20 @@ class TSWriter {
m3u8.mediaList = Array(files[files.count - TSWriter.defaultSegmentCount..<files.count])
return m3u8.description
}
var lockQueue:DispatchQueue = DispatchQueue(
internal var lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.TSWriter.lock", attributes: []
)
var segmentMaxCount:Int = TSWriter.defaultSegmentMaxCount
var segmentDuration:Double = TSWriter.defaultSegmentDuration
internal var segmentMaxCount:Int = TSWriter.defaultSegmentMaxCount
internal var segmentDuration:Double = TSWriter.defaultSegmentDuration
fileprivate(set) var PAT:ProgramAssociationSpecific = {
internal fileprivate(set) var PAT:ProgramAssociationSpecific = {
let PAT:ProgramAssociationSpecific = ProgramAssociationSpecific()
PAT.programs = [1: TSWriter.defaultPMTPID]
return PAT
}()
fileprivate(set) var PMT:ProgramMapSpecific = ProgramMapSpecific()
fileprivate(set) var files:[M3UMediaInfo] = []
fileprivate(set) var running:Bool = false
internal fileprivate(set) var PMT:ProgramMapSpecific = ProgramMapSpecific()
internal fileprivate(set) var files:[M3UMediaInfo] = []
internal fileprivate(set) var running:Bool = false
fileprivate var PCRPID:UInt16 = TSWriter.defaultVideoPID
fileprivate var sequence:Int = 0
@ -48,7 +48,7 @@ class TSWriter {
fileprivate var currentFileHandle:FileHandle?
fileprivate var continuityCounters:[UInt16:UInt8] = [:]
func getFilePath(_ fileName:String) -> String? {
internal func getFilePath(_ fileName:String) -> String? {
for info in files {
if (info.url.absoluteString.contains(fileName)) {
return info.url.path
@ -57,7 +57,7 @@ class TSWriter {
return nil
}
func writeSampleBuffer(_ PID:UInt16, streamID:UInt8, sampleBuffer:CMSampleBuffer) {
internal func writeSampleBuffer(_ PID:UInt16, streamID:UInt8, sampleBuffer:CMSampleBuffer) {
if (timestamps[PID] == nil) {
timestamps[PID] = sampleBuffer.presentationTimeStamp
if (PCRPID == PID) {
@ -92,7 +92,7 @@ class TSWriter {
}
}
func split(_ PID:UInt16, PES:PacketizedElementaryStream, timestamp:CMTime) -> [TSPacket] {
internal func split(_ PID:UInt16, PES:PacketizedElementaryStream, timestamp:CMTime) -> [TSPacket] {
var PCR:UInt64?
let duration:Double = timestamp.seconds - PCRTimestamp.seconds
if (PCRPID == PID && 0.1 <= duration) {
@ -106,7 +106,7 @@ class TSWriter {
return packets
}
func rorateFileHandle(_ timestamp:CMTime, next:CMTime) -> Bool {
internal func rorateFileHandle(_ timestamp:CMTime, next:CMTime) -> Bool {
let duration:Double = timestamp.seconds - rotatedTimestamp.seconds
if (duration <= segmentDuration) {
return false
@ -168,7 +168,7 @@ class TSWriter {
return true
}
func removeFiles() {
internal func removeFiles() {
let fileManager:FileManager = FileManager.default
for info in files {
do { try fileManager.removeItem(at: info.url as URL) }
@ -180,7 +180,7 @@ class TSWriter {
extension TSWriter: Runnable {
// MARK: Runnable
func startRunning() {
internal func startRunning() {
lockQueue.async {
if (!self.running) {
return
@ -188,7 +188,7 @@ extension TSWriter: Runnable {
self.running = true
}
}
func stopRunning() {
internal func stopRunning() {
lockQueue.async {
if (self.running) {
return
@ -203,7 +203,7 @@ extension TSWriter: Runnable {
extension TSWriter: AudioEncoderDelegate {
// MARK: AudioEncoderDelegate
func didSetFormatDescription(audio formatDescription: CMFormatDescription?) {
internal func didSetFormatDescription(audio formatDescription: CMFormatDescription?) {
guard let formatDescription:CMAudioFormatDescription = formatDescription else {
return
}
@ -215,14 +215,14 @@ extension TSWriter: AudioEncoderDelegate {
continuityCounters[TSWriter.defaultAudioPID] = 0
}
func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
internal func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
writeSampleBuffer(TSWriter.defaultAudioPID, streamID:192, sampleBuffer:sampleBuffer)
}
}
extension TSWriter: VideoEncoderDelegate {
// MARK: VideoEncoderDelegate
func didSetFormatDescription(video formatDescription: CMFormatDescription?) {
internal func didSetFormatDescription(video formatDescription: CMFormatDescription?) {
guard let
formatDescription:CMFormatDescription = formatDescription,
let avcC:Data = AVCConfigurationRecord.getData(formatDescription) else {
@ -236,7 +236,7 @@ extension TSWriter: VideoEncoderDelegate {
continuityCounters[TSWriter.defaultVideoPID] = 0
}
func sampleOutput(video sampleBuffer: CMSampleBuffer) {
internal func sampleOutput(video sampleBuffer: CMSampleBuffer) {
writeSampleBuffer(TSWriter.defaultVideoPID, streamID:224, sampleBuffer:sampleBuffer)
}
}

View File

@ -9,21 +9,21 @@ import AVFoundation
- http://wiki.multimedia.cx/?title=Understanding_AAC
*/
struct AudioSpecificConfig {
static let ADTSHeaderSize:Int = 7
static internal let ADTSHeaderSize:Int = 7
var type:AudioObjectType
var frequency:SamplingFrequency
var channel:ChannelConfiguration
var frameLengthFlag:Bool = false
internal var type:AudioObjectType
internal var frequency:SamplingFrequency
internal var channel:ChannelConfiguration
internal var frameLengthFlag:Bool = false
var bytes:[UInt8] {
internal var bytes:[UInt8] {
var bytes:[UInt8] = [UInt8](repeating: 0, count: 2)
bytes[0] = type.rawValue << 3 | (frequency.rawValue >> 1 & 0x3)
bytes[1] = (frequency.rawValue & 0x1) << 7 | (channel.rawValue & 0xF) << 3
return bytes
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
guard let
type:AudioObjectType = AudioObjectType(rawValue: bytes[0] >> 3),
let frequency:SamplingFrequency = SamplingFrequency(rawValue: (bytes[0] & 0b00000111) << 1 | (bytes[1] >> 7)),
@ -35,20 +35,20 @@ struct AudioSpecificConfig {
self.channel = channel
}
init(type:AudioObjectType, frequency:SamplingFrequency, channel:ChannelConfiguration) {
internal init(type:AudioObjectType, frequency:SamplingFrequency, channel:ChannelConfiguration) {
self.type = type
self.frequency = frequency
self.channel = channel
}
init(formatDescription: CMFormatDescription) {
internal init(formatDescription: CMFormatDescription) {
let asbd:AudioStreamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription)!.pointee
type = AudioObjectType(objectID: MPEG4ObjectID(rawValue: Int(asbd.mFormatFlags))!)
frequency = SamplingFrequency(sampleRate: asbd.mSampleRate)
channel = ChannelConfiguration(rawValue: UInt8(asbd.mChannelsPerFrame))!
}
func adts(_ length:Int) -> [UInt8] {
internal func adts(_ length:Int) -> [UInt8] {
let size:Int = 7
let fullSize:Int = size + length
var adts:[UInt8] = [UInt8](repeating: 0x00, count: size)
@ -62,7 +62,7 @@ struct AudioSpecificConfig {
return adts
}
func createAudioStreamBasicDescription() -> AudioStreamBasicDescription {
internal func createAudioStreamBasicDescription() -> AudioStreamBasicDescription {
var asbd:AudioStreamBasicDescription = AudioStreamBasicDescription()
asbd.mSampleRate = frequency.sampleRate
asbd.mFormatID = kAudioFormatMPEG4AAC
@ -77,14 +77,14 @@ struct AudioSpecificConfig {
}
}
// MARK: CustomStringConvertible
extension AudioSpecificConfig: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: - AudioObjectType
// MARK: -
enum AudioObjectType: UInt8 {
case unknown = 0
case aacMain = 1
@ -97,7 +97,7 @@ enum AudioObjectType: UInt8 {
case celp = 8
case hxvc = 9
init (objectID: MPEG4ObjectID) {
internal init(objectID: MPEG4ObjectID) {
switch objectID {
case .aac_Main:
self = .aacMain
@ -121,7 +121,7 @@ enum AudioObjectType: UInt8 {
}
}
// MARK: - SamplingFrequency
// MARK: -
enum SamplingFrequency: UInt8 {
case hz96000 = 0
case hz88200 = 1
@ -137,7 +137,7 @@ enum SamplingFrequency: UInt8 {
case hz8000 = 11
case hz7350 = 12
var sampleRate:Float64 {
internal var sampleRate:Float64 {
switch self {
case .hz96000:
return 96000
@ -168,7 +168,7 @@ enum SamplingFrequency: UInt8 {
}
}
init(sampleRate:Float64) {
internal init(sampleRate:Float64) {
switch Int(sampleRate) {
case 96000:
self = .hz96000
@ -202,7 +202,7 @@ enum SamplingFrequency: UInt8 {
}
}
// MARK: - ChannelConfiguration
// MARK: -
enum ChannelConfiguration: UInt8 {
case definedInAOTSpecificConfig = 0
case frontCenter = 1

View File

@ -3,13 +3,13 @@ import AVFoundation
import VideoToolbox
struct AVCFormatStream {
var bytes:[UInt8] = []
internal var bytes:[UInt8] = []
init(bytes:[UInt8]) {
internal init(bytes:[UInt8]) {
self.bytes = bytes
}
func toByteStream() -> [UInt8] {
internal func toByteStream() -> [UInt8] {
let buffer:ByteArray = ByteArray(bytes: bytes)
var result:[UInt8] = []
while (0 < buffer.bytesAvailable) {
@ -32,7 +32,7 @@ struct AVCFormatStream {
*/
struct AVCConfigurationRecord {
static func getData(_ formatDescription:CMFormatDescription?) -> Data? {
static internal func getData(_ formatDescription:CMFormatDescription?) -> Data? {
guard let formatDescription:CMFormatDescription = formatDescription else {
return nil
}
@ -42,40 +42,40 @@ struct AVCConfigurationRecord {
return nil
}
static let reserveLengthSizeMinusOne:UInt8 = 0x3F
static let reserveNumOfSequenceParameterSets:UInt8 = 0xE0
static let reserveChromaFormat:UInt8 = 0xFC
static let reserveBitDepthLumaMinus8:UInt8 = 0xF8
static let reserveBitDepthChromaMinus8 = 0xF8
static internal let reserveLengthSizeMinusOne:UInt8 = 0x3F
static internal let reserveNumOfSequenceParameterSets:UInt8 = 0xE0
static internal let reserveChromaFormat:UInt8 = 0xFC
static internal let reserveBitDepthLumaMinus8:UInt8 = 0xF8
static internal let reserveBitDepthChromaMinus8 = 0xF8
var configurationVersion:UInt8 = 1
var AVCProfileIndication:UInt8 = 0
var profileCompatibility:UInt8 = 0
var AVCLevelIndication:UInt8 = 0
var lengthSizeMinusOneWithReserved:UInt8 = 0
var numOfSequenceParameterSetsWithReserved:UInt8 = 0
var sequenceParameterSets:[[UInt8]] = []
var pictureParameterSets:[[UInt8]] = []
internal var configurationVersion:UInt8 = 1
internal var AVCProfileIndication:UInt8 = 0
internal var profileCompatibility:UInt8 = 0
internal var AVCLevelIndication:UInt8 = 0
internal var lengthSizeMinusOneWithReserved:UInt8 = 0
internal var numOfSequenceParameterSetsWithReserved:UInt8 = 0
internal var sequenceParameterSets:[[UInt8]] = []
internal var pictureParameterSets:[[UInt8]] = []
var chromaFormatWithReserve:UInt8 = 0
var bitDepthLumaMinus8WithReserve:UInt8 = 0
var bitDepthChromaMinus8WithReserve:UInt8 = 0
var sequenceParameterSetExt:[[UInt8]] = []
internal var chromaFormatWithReserve:UInt8 = 0
internal var bitDepthLumaMinus8WithReserve:UInt8 = 0
internal var bitDepthChromaMinus8WithReserve:UInt8 = 0
internal var sequenceParameterSetExt:[[UInt8]] = []
var naluLength:Int32 {
internal var naluLength:Int32 {
return Int32((lengthSizeMinusOneWithReserved >> 6) + 1)
}
init() {
internal init() {
}
init(data: Data) {
internal init(data: Data) {
var bytes:[UInt8] = [UInt8](repeating: 0x00, count: data.count)
(data as NSData).getBytes(&bytes, length: bytes.count)
self.bytes = bytes
}
func createFormatDescription(_ formatDescriptionOut: UnsafeMutablePointer<CMFormatDescription?>) -> OSStatus {
internal func createFormatDescription(_ formatDescriptionOut: UnsafeMutablePointer<CMFormatDescription?>) -> OSStatus {
var parameterSetPointers:[UnsafePointer<UInt8>] = [
UnsafePointer<UInt8>(sequenceParameterSets[0]),
UnsafePointer<UInt8>(pictureParameterSets[0])
@ -95,9 +95,9 @@ struct AVCConfigurationRecord {
}
}
// MARK: BytesConvertible
extension AVCConfigurationRecord: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
let buffer:ByteArray = ByteArray()
.writeUInt8(configurationVersion)
@ -145,9 +145,9 @@ extension AVCConfigurationRecord: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension AVCConfigurationRecord: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}

View File

@ -2,7 +2,6 @@ import Foundation
import AVFoundation
import VideoToolbox
// MARK: NALType
enum NALType: UInt8 {
case unspec = 0
case slice = 1 // P frame
@ -21,14 +20,14 @@ enum NALType: UInt8 {
// MARK: -
struct NALUnit {
var refIdc:UInt8 = 0
var type:NALType = .unspec
var payload:[UInt8] = []
internal var refIdc:UInt8 = 0
internal var type:NALType = .unspec
internal var payload:[UInt8] = []
}
// MARK: BytesConvertible
extension NALUnit: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
return ByteArray()
.writeUInt8(refIdc << 5 | type.rawValue)

View File

@ -1,7 +1,6 @@
import Foundation
import AVFoundation
// MARK: PESPacketHeader
/**
- seealso: https://en.wikipedia.org/wiki/Packetized_elementary_stream
*/
@ -13,7 +12,7 @@ protocol PESPacketHeader {
var data:[UInt8] { get set }
}
// MARK: - PESPTSDTSIndicator
// MARK: -
enum PESPTSDTSIndicator:UInt8 {
case none = 0
case onlyPTS = 1
@ -23,34 +22,34 @@ enum PESPTSDTSIndicator:UInt8 {
// MARK: -
struct PESOptionalHeader {
static let fixedSectionSize:Int = 3
static let defaultMarkerBits:UInt8 = 2
static internal let fixedSectionSize:Int = 3
static internal let defaultMarkerBits:UInt8 = 2
var markerBits:UInt8 = PESOptionalHeader.defaultMarkerBits
var scramblingControl:UInt8 = 0
var priority:Bool = false
var dataAlignmentIndicator:Bool = false
var copyright:Bool = false
var originalOrCopy:Bool = false
var PTSDTSIndicator:UInt8 = PESPTSDTSIndicator.none.rawValue
var ESCRFlag:Bool = false
var ESRateFlag:Bool = false
var DSMTrickModeFlag:Bool = false
var additionalCopyInfoFlag:Bool = false
var CRCFlag:Bool = false
var extentionFlag:Bool = false
var PESHeaderLength:UInt8 = 0
var optionalFields:[UInt8] = []
var stuffingBytes:[UInt8] = []
internal var markerBits:UInt8 = PESOptionalHeader.defaultMarkerBits
internal var scramblingControl:UInt8 = 0
internal var priority:Bool = false
internal var dataAlignmentIndicator:Bool = false
internal var copyright:Bool = false
internal var originalOrCopy:Bool = false
internal var PTSDTSIndicator:UInt8 = PESPTSDTSIndicator.none.rawValue
internal var ESCRFlag:Bool = false
internal var ESRateFlag:Bool = false
internal var DSMTrickModeFlag:Bool = false
internal var additionalCopyInfoFlag:Bool = false
internal var CRCFlag:Bool = false
internal var extentionFlag:Bool = false
internal var PESHeaderLength:UInt8 = 0
internal var optionalFields:[UInt8] = []
internal var stuffingBytes:[UInt8] = []
init() {
internal init() {
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
}
mutating func setTimestamp(_ timestamp:CMTime, presentationTimeStamp:CMTime, decodeTimeStamp:CMTime) {
mutating internal func setTimestamp(_ timestamp:CMTime, presentationTimeStamp:CMTime, decodeTimeStamp:CMTime) {
let base:Double = Double(timestamp.seconds)
if (presentationTimeStamp != kCMTimeInvalid) {
PTSDTSIndicator |= 0x02
@ -70,9 +69,9 @@ struct PESOptionalHeader {
}
}
// MARK: BytesConvertible
extension PESOptionalHeader: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
var bytes:[UInt8] = [0x00, 0x00]
bytes[0] |= markerBits << 6
@ -120,19 +119,19 @@ extension PESOptionalHeader: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension PESOptionalHeader: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: -
struct PacketizedElementaryStream: PESPacketHeader {
static let untilPacketLengthSize:Int = 6
static let startCode:[UInt8] = [0x00, 0x00, 0x01]
static internal let untilPacketLengthSize:Int = 6
static internal let startCode:[UInt8] = [0x00, 0x00, 0x01]
static func create(_ sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:Any?) -> PacketizedElementaryStream? {
static internal func create(_ sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:Any?) -> PacketizedElementaryStream? {
if let config:AudioSpecificConfig = config as? AudioSpecificConfig {
return PacketizedElementaryStream(sampleBuffer: sampleBuffer, timestamp: timestamp, config: config)
}
@ -142,20 +141,20 @@ struct PacketizedElementaryStream: PESPacketHeader {
return nil
}
var startCode:[UInt8] = PacketizedElementaryStream.startCode
var streamID:UInt8 = 0
var packetLength:UInt16 = 0
var optionalPESHeader:PESOptionalHeader?
var data:[UInt8] = []
internal var startCode:[UInt8] = PacketizedElementaryStream.startCode
internal var streamID:UInt8 = 0
internal var packetLength:UInt16 = 0
internal var optionalPESHeader:PESOptionalHeader?
internal var data:[UInt8] = []
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
if (startCode != PacketizedElementaryStream.startCode) {
return nil
}
}
init?(sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:AudioSpecificConfig?) {
internal init?(sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:AudioSpecificConfig?) {
let payload:[UInt8] = sampleBuffer.bytes
data += config!.adts(payload.count)
data += payload
@ -168,7 +167,7 @@ struct PacketizedElementaryStream: PESPacketHeader {
packetLength = UInt16(data.count + optionalPESHeader!.bytes.count)
}
init?(sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:AVCConfigurationRecord?) {
internal init?(sampleBuffer:CMSampleBuffer, timestamp:CMTime, config:AVCConfigurationRecord?) {
data += [0x00, 0x00, 0x00, 0x01, 0x09, 0xf0]
if let config:AVCConfigurationRecord = config {
data += [0x00, 0x00, 0x00, 0x01] + config.sequenceParameterSets[0]
@ -184,7 +183,7 @@ struct PacketizedElementaryStream: PESPacketHeader {
packetLength = UInt16(data.count + optionalPESHeader!.bytes.count)
}
func arrayOfPackets(_ PID:UInt16, PCR:UInt64?) -> [TSPacket] {
internal func arrayOfPackets(_ PID:UInt16, PCR:UInt64?) -> [TSPacket] {
let payload:[UInt8] = bytes
var packets:[TSPacket] = []
@ -245,15 +244,15 @@ struct PacketizedElementaryStream: PESPacketHeader {
return packets
}
mutating func append(_ bytes:[UInt8]) -> Int {
mutating internal func append(_ bytes:[UInt8]) -> Int {
data += bytes
return bytes.count
}
}
// MARK: BytesConvertible
extension PacketizedElementaryStream: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
return ByteArray()
.writeBytes(startCode)
@ -283,9 +282,9 @@ extension PacketizedElementaryStream: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension PacketizedElementaryStream: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}

View File

@ -1,6 +1,5 @@
import Foundation
// MARK: PSIPointer
/**
- seealso: https://en.wikipedia.org/wiki/Program-specific_information
*/
@ -9,7 +8,7 @@ protocol PSIPointer {
var pointerFillerBytes:[UInt8] { get set }
}
// MARK: - PSITableHeader
// MARK: -
protocol PSITableHeader {
var tableID:UInt8 { get set }
var sectionSyntaxIndicator:Bool { get set }
@ -17,7 +16,7 @@ protocol PSITableHeader {
var sectionLength:UInt16 { get set }
}
// MARK: - PSITableSyntax
// MARK: -
protocol PSITableSyntax {
var tableIDExtension:UInt16 { get set }
var versionNumber:UInt8 { get set }
@ -77,15 +76,15 @@ class ProgramSpecific: PSIPointer, PSITableHeader, PSITableSyntax {
}
}
// MARK: CustomStringConvertible
extension ProgramSpecific: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: BytesConvertible
extension ProgramSpecific: BytesConvertible {
// MARK: BytesConvertible
var bytes:[UInt8] {
get {
let data:[UInt8] = self.data
@ -217,7 +216,7 @@ final class ProgramMapSpecific: ProgramSpecific {
}
}
// MARK: ElementaryStreamType
// MARK: -
enum ElementaryStreamType: UInt8 {
case mpeg1Video = 0x01
case mpeg2Video = 0x02
@ -233,6 +232,7 @@ enum ElementaryStreamType: UInt8 {
case h265 = 0x24
}
// MARK: -
struct ElementaryStreamSpecificData {
static let fixedHeaderSize:Int = 5
@ -249,9 +249,9 @@ struct ElementaryStreamSpecificData {
}
}
// MARK: BytesConvertible
extension ElementaryStreamSpecificData: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
return ByteArray()
.writeUInt8(streamType)
@ -274,8 +274,8 @@ extension ElementaryStreamSpecificData: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension ElementaryStreamSpecificData: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}

View File

@ -5,21 +5,21 @@ import AVFoundation
- seealso: https://en.wikipedia.org/wiki/MPEG_transport_stream#Packet
*/
struct TSPacket {
static let size:Int = 188
static let headerSize:Int = 4
static let defaultSyncByte:UInt8 = 0x47
static internal let size:Int = 188
static internal let headerSize:Int = 4
static internal let defaultSyncByte:UInt8 = 0x47
var syncByte:UInt8 = TSPacket.defaultSyncByte
var transportErrorIndicator:Bool = false
var payloadUnitStartIndicator:Bool = false
var transportPriority:Bool = false
var PID:UInt16 = 0
var scramblingControl:UInt8 = 0
var adaptationFieldFlag:Bool = false
var payloadFlag:Bool = false
var continuityCounter:UInt8 = 0
var adaptationField:TSAdaptationField?
var payload:[UInt8] = []
internal var syncByte:UInt8 = TSPacket.defaultSyncByte
internal var transportErrorIndicator:Bool = false
internal var payloadUnitStartIndicator:Bool = false
internal var transportPriority:Bool = false
internal var PID:UInt16 = 0
internal var scramblingControl:UInt8 = 0
internal var adaptationFieldFlag:Bool = false
internal var payloadFlag:Bool = false
internal var continuityCounter:UInt8 = 0
internal var adaptationField:TSAdaptationField?
internal var payload:[UInt8] = []
fileprivate var remain:Int {
var adaptationFieldSize:Int = 0
@ -30,10 +30,10 @@ struct TSPacket {
return TSPacket.size - TSPacket.headerSize - adaptationFieldSize - payload.count
}
init() {
internal init() {
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
guard TSPacket.size == bytes.count else {
return nil
}
@ -43,7 +43,7 @@ struct TSPacket {
}
}
init?(data:Data) {
internal init?(data:Data) {
guard TSPacket.size == data.count else {
return nil
}
@ -53,7 +53,7 @@ struct TSPacket {
}
}
mutating func fill(_ data:[UInt8]?, useAdaptationField:Bool) -> Int {
mutating internal func fill(_ data:[UInt8]?, useAdaptationField:Bool) -> Int {
guard let data:[UInt8] = data else {
payload += [UInt8](repeating: 0xff, count: remain)
return 0
@ -78,9 +78,9 @@ struct TSPacket {
}
}
// MARK: BytesConvertible
extension TSPacket: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
var bytes:[UInt8] = [syncByte, 0x00, 0x00, 0x00]
bytes[1] |= transportErrorIndicator ? 0x80 : 0
@ -128,11 +128,11 @@ extension TSPacket: BytesConvertible {
// MARK: -
struct TSTimestamp {
static let resolution:Double = 90 * 1000 // 90kHz
static let PTSMask:UInt8 = 0x10
static let PTSDTSMask:UInt8 = 0x30
static internal let resolution:Double = 90 * 1000 // 90kHz
static internal let PTSMask:UInt8 = 0x10
static internal let PTSDTSMask:UInt8 = 0x30
static func decode(_ bytes:[UInt8]) -> UInt64 {
static internal func decode(_ bytes:[UInt8]) -> UInt64 {
var result:UInt64 = 0
result |= UInt64(bytes[0] & 0x0e) << 29
result |= UInt64(bytes[1]) << 22 | UInt64(bytes[2] & 0xfe) << 14
@ -140,7 +140,7 @@ struct TSTimestamp {
return result
}
static func encode(_ b:UInt64, _ m:UInt8) -> [UInt8] {
static internal func encode(_ b:UInt64, _ m:UInt8) -> [UInt8] {
var bytes:[UInt8] = [UInt8](repeating: 0x00, count: 5)
bytes[0] = UInt8(truncatingBitPattern: b >> 29) | 0x01 | m
bytes[1] = UInt8(truncatingBitPattern: b >> 22)
@ -153,10 +153,10 @@ struct TSTimestamp {
// MARK: -
struct TSProgramClockReference {
static let resolutionForBase:Int32 = 90 * 1000 // 90kHz
static let resolutionForExtension:Int32 = 27 * 1000 * 1000 // 27MHz
static internal let resolutionForBase:Int32 = 90 * 1000 // 90kHz
static internal let resolutionForExtension:Int32 = 27 * 1000 * 1000 // 27MHz
static func decode(_ bytes:[UInt8]) -> (UInt64, UInt16) {
static internal func decode(_ bytes:[UInt8]) -> (UInt64, UInt16) {
var b:UInt64 = 0
var e:UInt16 = 0
b |= UInt64(bytes[0]) << 25
@ -169,7 +169,7 @@ struct TSProgramClockReference {
return (b, e)
}
static func encode(_ b:UInt64, _ e:UInt16) -> [UInt8] {
static internal func encode(_ b:UInt64, _ e:UInt16) -> [UInt8] {
var bytes:[UInt8] = [UInt8](repeating: 0, count: 6)
bytes[0] = UInt8(truncatingBitPattern: b >> 25)
bytes[1] = UInt8(truncatingBitPattern: b >> 17)
@ -191,43 +191,43 @@ struct TSProgramClockReference {
}
}
// MARK: CustomStringConvertible
extension TSPacket: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: -
struct TSAdaptationField {
static let PCRSize:Int = 6
static let fixedSectionSize:Int = 2
static internal let PCRSize:Int = 6
static internal let fixedSectionSize:Int = 2
var length:UInt8 = 0
var discontinuityIndicator:Bool = false
var randomAccessIndicator:Bool = false
var elementaryStreamPriorityIndicator = false
var PCRFlag:Bool = false
var OPCRFlag:Bool = false
var splicingPointFlag:Bool = false
var transportPrivateDataFlag:Bool = false
var adaptationFieldExtensionFlag:Bool = false
var PCR:[UInt8] = []
var OPCR:[UInt8] = []
var spliceCountdown:UInt8 = 0
var transportPrivateDataLength:UInt8 = 0
var transportPrivateData:[UInt8] = []
var adaptationExtension:TSAdaptationExtensionField?
var stuffingBytes:[UInt8] = []
internal var length:UInt8 = 0
internal var discontinuityIndicator:Bool = false
internal var randomAccessIndicator:Bool = false
internal var elementaryStreamPriorityIndicator = false
internal var PCRFlag:Bool = false
internal var OPCRFlag:Bool = false
internal var splicingPointFlag:Bool = false
internal var transportPrivateDataFlag:Bool = false
internal var adaptationFieldExtensionFlag:Bool = false
internal var PCR:[UInt8] = []
internal var OPCR:[UInt8] = []
internal var spliceCountdown:UInt8 = 0
internal var transportPrivateDataLength:UInt8 = 0
internal var transportPrivateData:[UInt8] = []
internal var adaptationExtension:TSAdaptationExtensionField?
internal var stuffingBytes:[UInt8] = []
init() {
internal init() {
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
}
mutating func compute() {
mutating internal func compute() {
length = UInt8(truncatingBitPattern: TSAdaptationField.fixedSectionSize)
length += UInt8(truncatingBitPattern: PCR.count)
length += UInt8(truncatingBitPattern: OPCR.count)
@ -239,15 +239,15 @@ struct TSAdaptationField {
length -= 1
}
mutating func stuffing(_ size:Int) {
mutating internal func stuffing(_ size:Int) {
stuffingBytes = [UInt8](repeating: 0xff, count: size)
length += UInt8(size)
}
}
// MARK: BytesConvertible
extension TSAdaptationField: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
var byte:UInt8 = 0
byte |= discontinuityIndicator ? 0x80 : 0
@ -317,33 +317,33 @@ extension TSAdaptationField: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension TSAdaptationField: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: - TSAdaptationExtensionField
// MARK: -
struct TSAdaptationExtensionField {
var length:UInt8 = 0
var legalTimeWindowFlag:Bool = false
var piecewiseRateFlag:Bool = false
var seamlessSpiceFlag:Bool = false
var legalTimeWindowValidFlag:Bool = false
var legalTimeWindowOffset:UInt16 = 0
var piecewiseRate:UInt32 = 0
var spliceType:UInt8 = 0
var DTSNextAccessUnit:[UInt8] = [UInt8](repeating: 0x00, count: 5)
internal var length:UInt8 = 0
internal var legalTimeWindowFlag:Bool = false
internal var piecewiseRateFlag:Bool = false
internal var seamlessSpiceFlag:Bool = false
internal var legalTimeWindowValidFlag:Bool = false
internal var legalTimeWindowOffset:UInt16 = 0
internal var piecewiseRate:UInt32 = 0
internal var spliceType:UInt8 = 0
internal var DTSNextAccessUnit:[UInt8] = [UInt8](repeating: 0x00, count: 5)
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
}
}
// MARK: BytesConvertible
extension TSAdaptationExtensionField: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
let buffer:ByteArray = ByteArray()
.writeUInt8(length)
@ -394,9 +394,9 @@ extension TSAdaptationExtensionField: BytesConvertible {
}
}
// MARK: CustomStringConvertible
extension TSAdaptationExtensionField: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}

View File

@ -4,9 +4,9 @@ import UIKit
import Foundation
import AVFoundation
class AVMixer: NSObject {
final internal class AVMixer: NSObject {
static let supportedSettingsKeys:[String] = [
static internal let supportedSettingsKeys:[String] = [
"fps",
"sessionPreset",
"orientation",
@ -14,34 +14,34 @@ class AVMixer: NSObject {
"continuousExposure",
]
static let defaultFPS:Float64 = 30
static let defaultSessionPreset:String = AVCaptureSessionPresetMedium
static let defaultVideoSettings:[NSObject: AnyObject] = [
static internal let defaultFPS:Float64 = 30
static internal let defaultSessionPreset:String = AVCaptureSessionPresetMedium
static internal let defaultVideoSettings:[NSObject: AnyObject] = [
kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA) as AnyObject
]
var fps:Float64 {
internal var fps:Float64 {
get { return videoIO.fps }
set { videoIO.fps = newValue }
}
var orientation:AVCaptureVideoOrientation {
internal var orientation:AVCaptureVideoOrientation {
get { return videoIO.orientation }
set { videoIO.orientation = newValue }
}
var continuousExposure:Bool {
internal var continuousExposure:Bool {
get { return videoIO.continuousExposure }
set { videoIO.continuousExposure = newValue }
}
var continuousAutofocus:Bool {
internal var continuousAutofocus:Bool {
get { return videoIO.continuousAutofocus }
set { videoIO.continuousAutofocus = newValue }
}
#if os(iOS)
var syncOrientation:Bool = false {
internal var syncOrientation:Bool = false {
didSet {
guard syncOrientation != oldValue else {
return
@ -56,7 +56,7 @@ class AVMixer: NSObject {
}
#endif
var sessionPreset:String = AVMixer.defaultSessionPreset {
internal var sessionPreset:String = AVMixer.defaultSessionPreset {
didSet {
guard sessionPreset != oldValue else {
return
@ -68,7 +68,7 @@ class AVMixer: NSObject {
}
fileprivate var _session:AVCaptureSession? = nil
var session:AVCaptureSession! {
internal var session:AVCaptureSession! {
if (_session == nil) {
_session = AVCaptureSession()
_session!.sessionPreset = AVMixer.defaultSessionPreset
@ -76,11 +76,11 @@ class AVMixer: NSObject {
return _session!
}
fileprivate(set) var audioIO:AudioIOComponent!
fileprivate(set) var videoIO:VideoIOComponent!
fileprivate(set) lazy var recorder:AVMixerRecorder = AVMixerRecorder()
internal fileprivate(set) var audioIO:AudioIOComponent!
internal fileprivate(set) var videoIO:VideoIOComponent!
internal fileprivate(set) lazy var recorder:AVMixerRecorder = AVMixerRecorder()
override init() {
override internal init() {
super.init()
audioIO = AudioIOComponent(mixer: self)
videoIO = VideoIOComponent(mixer: self)
@ -93,7 +93,7 @@ class AVMixer: NSObject {
}
#if os(iOS)
func onOrientationChanged(_ notification:Notification) {
internal func onOrientationChanged(_ notification:Notification) {
var deviceOrientation:UIDeviceOrientation = .unknown
if let device:UIDevice = notification.object as? UIDevice {
deviceOrientation = device.orientation
@ -105,13 +105,13 @@ class AVMixer: NSObject {
#endif
}
// MARK: Runnable
extension AVMixer: Runnable {
var running:Bool {
// MARK: Runnable
internal var running:Bool {
return session.isRunning
}
func startRunning() {
internal func startRunning() {
session.startRunning()
#if os(iOS)
if let orientation:AVCaptureVideoOrientation = DeviceUtil.getAVCaptureVideoOrientation(UIDevice.current.orientation) , syncOrientation {
@ -120,7 +120,7 @@ extension AVMixer: Runnable {
#endif
}
func stopRunning() {
internal func stopRunning() {
session.stopRunning()
}
}

View File

@ -4,7 +4,6 @@ import AVFoundation
import AssetsLibrary
#endif
// MARK: AVMixerRecorderDelegate
public protocol AVMixerRecorderDelegate: class {
var moviesDirectory:URL { get }
func rotateFile(_ recorder:AVMixerRecorder, sampleBuffer:CMSampleBuffer, mediaType:String)
@ -38,14 +37,14 @@ open class AVMixerRecorder: NSObject {
open let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.AVMixerRecorder.lock", attributes: []
)
fileprivate(set) var running:Bool = false
internal fileprivate(set) var running:Bool = false
public override init() {
super.init()
delegate = DefaultAVMixerRecorderDelegate()
}
final func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer, mediaType:String) {
final internal func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer, mediaType:String) {
lockQueue.async {
guard let delegate:AVMixerRecorderDelegate = self.delegate , self.running else {
@ -73,7 +72,7 @@ open class AVMixerRecorder: NSObject {
}
}
func finishWriting() {
internal func finishWriting() {
for (_, input) in writerInputs {
input.markAsFinished()
}
@ -85,9 +84,9 @@ open class AVMixerRecorder: NSObject {
}
}
// MARK: Runnable
extension AVMixerRecorder: Runnable {
final func startRunning() {
// MARK: Runnable
final internal func startRunning() {
lockQueue.async {
guard !self.running else {
return
@ -97,7 +96,7 @@ extension AVMixerRecorder: Runnable {
}
}
final func stopRunning() {
final internal func stopRunning() {
lockQueue.async {
guard self.running else {
return
@ -127,9 +126,8 @@ open class DefaultAVMixerRecorderDelegate: NSObject {
#endif
}
// MARK: AVMixerRecorderDelegate
extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
// MARK: AVMixerRecorderDelegate
public func rotateFile(_ recorder:AVMixerRecorder, sampleBuffer:CMSampleBuffer, mediaType:String) {
let presentationTimeStamp:CMTime = sampleBuffer.presentationTimeStamp
guard clockReference == mediaType && rotateTime.value < presentationTimeStamp.value else {

View File

@ -2,12 +2,12 @@ import Foundation
import AVFoundation
final class AudioIOComponent: IOComponent {
var encoder:AACEncoder = AACEncoder()
let lockQueue:DispatchQueue = DispatchQueue(
internal var encoder:AACEncoder = AACEncoder()
internal let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.AudioIOComponent.lock", attributes: []
)
var input:AVCaptureDeviceInput? = nil {
internal var input:AVCaptureDeviceInput? = nil {
didSet {
guard oldValue != input else {
return
@ -22,7 +22,7 @@ final class AudioIOComponent: IOComponent {
}
fileprivate var _output:AVCaptureAudioDataOutput? = nil
var output:AVCaptureAudioDataOutput! {
internal var output:AVCaptureAudioDataOutput! {
get {
if (_output == nil) {
_output = AVCaptureAudioDataOutput()
@ -46,7 +46,7 @@ final class AudioIOComponent: IOComponent {
encoder.lockQueue = lockQueue
}
func attachAudio(_ audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool) {
internal func attach(audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool) {
output = nil
encoder.invalidate()
guard let audio:AVCaptureDevice = audio else {
@ -66,9 +66,9 @@ final class AudioIOComponent: IOComponent {
}
}
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
extension AudioIOComponent: AVCaptureAudioDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
internal func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
encoder.captureOutput(captureOutput, didOutputSampleBuffer: sampleBuffer, from: connection)
}
}

View File

@ -266,8 +266,8 @@ class AudioStreamPlayback {
}
}
// MARK: Runnable
extension AudioStreamPlayback: Runnable {
// MARK: Runnable
func startRunning() {
lockQueue.async {
guard !self.running else {

View File

@ -3,7 +3,7 @@ import Foundation
class IOComponent: NSObject {
fileprivate(set) var mixer:AVMixer
init(mixer: AVMixer) {
internal init(mixer: AVMixer) {
self.mixer = mixer
}
}

View File

@ -15,7 +15,7 @@ public struct SoundTransform {
public var volumeRampTime:Float32 = SoundTransform.defaultVolumeRampTime
public var pan:Float32 = SoundTransform.defaultPan
func setParameter(_ inAQ: AudioQueueRef) {
internal func setParameter(_ inAQ: AudioQueueRef) {
AudioQueueSetParameter(inAQ, kAudioQueueParam_Volume, volume)
AudioQueueSetParameter(inAQ, kAudioQueueParam_PlayRate, playRate)
AudioQueueSetParameter(inAQ, kAudioQueueParam_Pitch, pitch)
@ -24,8 +24,8 @@ public struct SoundTransform {
}
}
// MARK: CustomStringConvertible
extension SoundTransform: CustomStringConvertible {
// MARK: CustomStringConvertible
public var description:String {
return Mirror(reflecting: self).description
}

View File

@ -8,7 +8,7 @@ final class VideoIOComponent: IOComponent {
)
var encoder:AVCEncoder = AVCEncoder()
var decoder:AVCDecoder = AVCDecoder()
var drawable:StreamDrawable?
var drawable:NetStreamDrawable?
var formatDescription:CMVideoFormatDescription? {
didSet {
decoder.formatDescription = formatDescription
@ -229,7 +229,7 @@ final class VideoIOComponent: IOComponent {
decoder.delegate = self
}
func attachCamera(_ camera:AVCaptureDevice?) {
func attach(camera:AVCaptureDevice?) {
output = nil
guard let camera:AVCaptureDevice = camera else {
input = nil
@ -270,7 +270,7 @@ final class VideoIOComponent: IOComponent {
}
#if os(OSX)
func attachScreen(screen:AVCaptureScreenInput?) {
func attach(screen:AVCaptureScreenInput?) {
output = nil
guard let _:AVCaptureScreenInput = screen else {
input = nil
@ -282,7 +282,7 @@ final class VideoIOComponent: IOComponent {
mixer.session.startRunning()
}
#else
func attachScreen(_ screen:ScreenCaptureSession?, useScreenSize:Bool = true) {
func attach(screen:ScreenCaptureSession?, useScreenSize:Bool = true) {
guard let screen:ScreenCaptureSession = screen else {
self.screen?.stopRunning()
self.screen = nil
@ -333,24 +333,24 @@ final class VideoIOComponent: IOComponent {
}
#if os(iOS)
func rampToVideoZoomFactor(_ factor:CGFloat, withRate:Float) {
guard let device:AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device
, 1 <= factor && factor < device.activeFormat.videoMaxZoomFactor else {
func ramp(toVideoZoomFactor:CGFloat, withRate:Float) {
guard let device:AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
1 <= toVideoZoomFactor && toVideoZoomFactor < device.activeFormat.videoMaxZoomFactor else {
return
}
do {
try device.lockForConfiguration()
device.ramp(toVideoZoomFactor: factor, withRate: withRate)
device.ramp(toVideoZoomFactor: toVideoZoomFactor, withRate: withRate)
device.unlockForConfiguration()
} catch let error as NSError {
logger.error("while locking device for rampToVideoZoomFactor: \(error)")
logger.error("while locking device for ramp: \(error)")
}
}
#endif
}
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
extension VideoIOComponent: AVCaptureVideoDataOutputSampleBufferDelegate {
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
mixer.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaTypeVideo)
guard var buffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
@ -362,48 +362,48 @@ extension VideoIOComponent: AVCaptureVideoDataOutputSampleBufferDelegate {
// green edge hack for OSX
buffer = CVPixelBuffer.create(image)!
#endif
drawable?.render(image, toCVPixelBuffer: buffer)
drawable?.render(image: image, to: buffer)
}
encoder.encodeImageBuffer(
buffer,
presentationTimeStamp: sampleBuffer.presentationTimeStamp,
duration: sampleBuffer.duration
)
drawable?.drawImage(image)
drawable?.draw(image: image)
}
}
// MARK: VideoDecoderDelegate
extension VideoIOComponent: VideoDecoderDelegate {
// MARK: VideoDecoderDelegate
func sampleOutput(video sampleBuffer:CMSampleBuffer) {
queue.enqueue(sampleBuffer)
}
}
// MARK: ClockedQueueDelegate
extension VideoIOComponent: ClockedQueueDelegate {
// MARK: ClockedQueueDelegate
func queue(_ buffer: CMSampleBuffer) {
drawable?.drawImage(CIImage(cvPixelBuffer: buffer.imageBuffer!))
drawable?.draw(image: CIImage(cvPixelBuffer: buffer.imageBuffer!))
}
}
#if os(iOS)
// MARK: ScreenCaptureOutputPixelBufferDelegate
extension VideoIOComponent: ScreenCaptureOutputPixelBufferDelegate {
func didSetSize(_ size: CGSize) {
// MARK: ScreenCaptureOutputPixelBufferDelegate
func didSet(size: CGSize) {
lockQueue.async {
self.encoder.width = Int32(size.width)
self.encoder.height = Int32(size.height)
}
}
func pixelBufferOutput(_ pixelBuffer:CVPixelBuffer, timestamp:CMTime) {
func output(pixelBuffer:CVPixelBuffer, withTimestamp:CMTime) {
if (!effects.isEmpty) {
drawable?.render(effect(pixelBuffer), toCVPixelBuffer: pixelBuffer)
drawable?.render(image: effect(pixelBuffer), to: pixelBuffer)
}
encoder.encodeImageBuffer(
pixelBuffer,
presentationTimeStamp: timestamp,
duration: timestamp
presentationTimeStamp: withTimestamp,
duration: withTimestamp
)
}
}

View File

@ -7,26 +7,26 @@ import Foundation
// MARK: -
final class NetClient: NetSocket {
static let defaultBufferSize:Int = 8192
static internal let defaultBufferSize:Int = 8192
weak var delegate:NetClientDelegate?
fileprivate(set) var service:Foundation.NetService?
internal weak var delegate:NetClientDelegate?
internal fileprivate(set) var service:Foundation.NetService?
init(service:Foundation.NetService, inputStream:InputStream, outputStream:OutputStream) {
internal init(service:Foundation.NetService, inputStream:InputStream, outputStream:OutputStream) {
super.init()
self.service = service
self.inputStream = inputStream
self.outputStream = outputStream
}
func acceptConnection() {
internal func acceptConnection() {
networkQueue.async {
self.initConnection()
self.delegate?.client?(didAccepetConnection: self)
}
}
override func listen() {
override internal func listen() {
delegate?.client?(inputBuffer: self)
}
}

View File

@ -2,24 +2,24 @@ import Foundation
open class NetService: NSObject {
var recordData:Data? {
internal var recordData:Data? {
return nil
}
let lockQueue:DispatchQueue = DispatchQueue(
internal let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.NetService.lock", attributes: []
)
var networkQueue:DispatchQueue = DispatchQueue(
internal var networkQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.NetService.network", attributes: []
)
fileprivate(set) var domain:String
fileprivate(set) var name:String
fileprivate(set) var port:Int32
fileprivate(set) var type:String
fileprivate(set) var running:Bool = false
fileprivate(set) var clients:[NetClient] = []
fileprivate(set) var service:Foundation.NetService!
internal fileprivate(set) var domain:String
internal fileprivate(set) var name:String
internal fileprivate(set) var port:Int32
internal fileprivate(set) var type:String
internal fileprivate(set) var running:Bool = false
internal fileprivate(set) var clients:[NetClient] = []
internal fileprivate(set) var service:Foundation.NetService!
fileprivate var runloop:RunLoop!
public init(domain:String, type:String, name:String, port:Int32) {
@ -29,22 +29,22 @@ open class NetService: NSObject {
self.type = type
}
func disconnect(_ client:NetClient) {
internal func disconnect(_ client:NetClient) {
guard let index:Int = clients.index(of: client) else {
return
}
clients.remove(at: index)
client.delegate = nil
client.close(true)
client.close(isDisconnected: true)
}
func willStartRunning() {
internal func willStartRunning() {
networkQueue.async {
self.initService()
}
}
func willStopRunning() {
internal func willStopRunning() {
if let runloop:RunLoop = runloop {
service.remove(from: runloop, forMode: RunLoopMode.defaultRunLoopMode)
CFRunLoopStop(runloop.getCFRunLoop())
@ -70,8 +70,8 @@ open class NetService: NSObject {
}
}
// MARK: NSNetServiceDelegate
extension NetService: NetServiceDelegate {
// MARK: NSNetServiceDelegate
public func netService(_ sender: Foundation.NetService, didAcceptConnectionWith inputStream: InputStream, outputStream: OutputStream) {
let client:NetClient = NetClient(service: sender, inputStream: inputStream, outputStream: outputStream)
clients.append(client)
@ -80,12 +80,12 @@ extension NetService: NetServiceDelegate {
}
}
// MARK: NetClientDelegate
extension NetService: NetClientDelegate {
// MARK: NetClientDelegate
}
// MARK: Runnbale
extension NetService: Runnable {
// MARK: Runnbale
final public func startRunning() {
lockQueue.async {
if (self.running) {

View File

@ -4,27 +4,27 @@ class NetSocket: NSObject {
static let defaultTimeout:Int64 = 15 // sec
static let defaultWindowSizeC:Int = 1024 * 1
var timeout:Int64 = NetSocket.defaultTimeout
var connected:Bool = false
var inputBuffer:[UInt8] = []
var inputStream:InputStream?
var windowSizeC:Int = NetSocket.defaultWindowSizeC
var outputStream:OutputStream?
var networkQueue:DispatchQueue = DispatchQueue(
internal var timeout:Int64 = NetSocket.defaultTimeout
internal var connected:Bool = false
internal var inputBuffer:[UInt8] = []
internal var inputStream:InputStream?
internal var windowSizeC:Int = NetSocket.defaultWindowSizeC
internal var outputStream:OutputStream?
internal var networkQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.NetSocket.network", attributes: []
)
var securityLevel:StreamSocketSecurityLevel = .none
fileprivate(set) var totalBytesIn:Int64 = 0
fileprivate(set) var totalBytesOut:Int64 = 0
internal var securityLevel:StreamSocketSecurityLevel = .none
private(set) var totalBytesIn:Int64 = 0
private(set) var totalBytesOut:Int64 = 0
fileprivate var runloop:RunLoop?
fileprivate let lockQueue:DispatchQueue = DispatchQueue(
private var runloop:RunLoop?
private let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.NetSocket.lock", attributes: []
)
fileprivate var timeoutHandler:(() -> Void)?
@discardableResult
final func doOutput(data:Data) -> Int {
final internal func doOutput(data:Data) -> Int {
lockQueue.async {
self.doOutputProcess((data as NSData).bytes.bindMemory(to: UInt8.self, capacity: data.count), maxLength: data.count)
}
@ -32,14 +32,14 @@ class NetSocket: NSObject {
}
@discardableResult
final func doOutput(bytes:[UInt8]) -> Int {
final internal func doOutput(bytes:[UInt8]) -> Int {
lockQueue.async {
self.doOutputProcess(UnsafePointer<UInt8>(bytes), maxLength: bytes.count)
}
return bytes.count
}
final func doOutputFromURL(_ url:URL, length:Int) {
final internal func doOutputFromURL(_ url:URL, length:Int) {
lockQueue.async {
do {
let fileHandle:FileHandle = try FileHandle(forReadingFrom: url)
@ -61,11 +61,11 @@ class NetSocket: NSObject {
}
}
final func doOutputProcess(_ data:Data) {
final internal func doOutputProcess(_ data:Data) {
doOutputProcess((data as NSData).bytes.bindMemory(to: UInt8.self, capacity: data.count), maxLength: data.count)
}
final func doOutputProcess(_ buffer:UnsafePointer<UInt8>, maxLength:Int) {
final internal func doOutputProcess(_ buffer:UnsafePointer<UInt8>, maxLength:Int) {
guard let outputStream:OutputStream = outputStream else {
return
}
@ -80,23 +80,22 @@ class NetSocket: NSObject {
}
}
func close(_ disconnect:Bool) {
internal func close(isDisconnected:Bool) {
lockQueue.async {
guard let runloop = self.runloop else {
return
}
self.deinitConnection(disconnect)
self.deinitConnection(isDisconnected: isDisconnected)
self.runloop = nil
CFRunLoopStop(runloop.getCFRunLoop())
logger.verbose("disconnect:\(disconnect)")
logger.verbose("isDisconnected:\(isDisconnected)")
}
}
func listen() {
internal func listen() {
}
func initConnection() {
/*
internal func initConnection() {
totalBytesIn = 0
totalBytesOut = 0
timeoutHandler = didTimeout
@ -106,15 +105,15 @@ class NetSocket: NSObject {
return
}
runloop = RunLoop.current
runloop = .current
inputStream.delegate = self
inputStream.schedule(in: runloop!, forMode: RunLoopMode.defaultRunLoopMode)
inputStream.setProperty(securityLevel, forKey: Foundation.Stream.PropertyKey.socketSecurityLevelKey)
inputStream.schedule(in: runloop!, forMode: .defaultRunLoopMode)
inputStream.setProperty(securityLevel.rawValue, forKey: Stream.PropertyKey.socketSecurityLevelKey)
outputStream.delegate = self
outputStream.schedule(in: runloop!, forMode: RunLoopMode.defaultRunLoopMode)
outputStream.setProperty(securityLevel, forKey: Foundation.Stream.PropertyKey.socketSecurityLevelKey)
outputStream.schedule(in: runloop!, forMode: .defaultRunLoopMode)
outputStream.setProperty(securityLevel.rawValue, forKey: Stream.PropertyKey.socketSecurityLevelKey)
inputStream.open()
outputStream.open()
@ -130,21 +129,20 @@ class NetSocket: NSObject {
runloop?.run()
connected = false
*/
}
func deinitConnection(_ disconnect:Bool) {
internal func deinitConnection(isDisconnected:Bool) {
inputStream?.close()
inputStream?.remove(from: runloop!, forMode: RunLoopMode.defaultRunLoopMode)
inputStream?.remove(from: runloop!, forMode: .defaultRunLoopMode)
inputStream?.delegate = nil
inputStream = nil
outputStream?.close()
outputStream?.remove(from: runloop!, forMode: RunLoopMode.defaultRunLoopMode)
outputStream?.remove(from: runloop!, forMode: .defaultRunLoopMode)
outputStream?.delegate = nil
outputStream = nil
}
func didTimeout() {
internal func didTimeout() {
}
fileprivate func doInput() {
@ -161,15 +159,15 @@ class NetSocket: NSObject {
}
}
// MARK: NSStreamDelegate
extension NetSocket: StreamDelegate {
func stream(_ aStream: Foundation.Stream, handle eventCode: Foundation.Stream.Event) {
// MARK: StreamDelegate
internal func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
if (logger.isEnabledForLogLevel(.debug)) {
logger.debug("eventCode: \(eventCode)")
}
switch eventCode {
// 1 = 1 << 0
case Foundation.Stream.Event.openCompleted:
case Stream.Event.openCompleted:
guard let inputStream = inputStream, let outputStream = outputStream,
inputStream.streamStatus == .open && outputStream.streamStatus == .open else {
break
@ -179,19 +177,19 @@ extension NetSocket: StreamDelegate {
connected = true
}
// 2 = 1 << 1
case Foundation.Stream.Event.hasBytesAvailable:
case Stream.Event.hasBytesAvailable:
if (aStream == inputStream) {
doInput()
}
// 4 = 1 << 2
case Foundation.Stream.Event.hasSpaceAvailable:
case Stream.Event.hasSpaceAvailable:
break
// 8 = 1 << 3
case Foundation.Stream.Event.errorOccurred:
close(true)
case Stream.Event.errorOccurred:
close(isDisconnected: true)
// 16 = 1 << 4
case Foundation.Stream.Event.endEncountered:
close(true)
case Stream.Event.endEncountered:
close(isDisconnected: true)
default:
break
}

View File

@ -1,18 +1,18 @@
import Foundation
import AVFoundation
// MARK: StreamDrawable
protocol StreamDrawable: class {
protocol NetStreamDrawable: class {
var orientation:AVCaptureVideoOrientation { get set }
var position:AVCaptureDevicePosition { get set }
func drawImage(_ image:CIImage)
func render(_ image:CIImage, toCVPixelBuffer:CVPixelBuffer)
func draw(image:CIImage)
func render(image: CIImage, to toCVPixelBuffer: CVPixelBuffer)
}
// MARK: -
open class Stream: NSObject {
var mixer:AVMixer = AVMixer()
let lockQueue:DispatchQueue = DispatchQueue(
open class NetStream: NSObject {
internal var mixer:AVMixer = AVMixer()
internal let lockQueue:DispatchQueue = DispatchQueue(
label: "com.github.shogo4405.lf.Stream.lock", attributes: []
)
@ -123,35 +123,35 @@ open class Stream: NSObject {
}
}
open func attachCamera(_ camera:AVCaptureDevice?) {
open func attach(camera:AVCaptureDevice?) {
lockQueue.async {
self.mixer.videoIO.attachCamera(camera)
self.mixer.videoIO.attach(camera: camera)
self.mixer.startRunning()
}
}
open func attachAudio(_ audio:AVCaptureDevice?, _ automaticallyConfiguresApplicationAudioSession:Bool = true) {
open func attach(audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool = true) {
lockQueue.async {
self.mixer.audioIO.attachAudio(audio, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession
self.mixer.audioIO.attach(audio: audio, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession
)
}
}
#if os(OSX)
public func attachScreen(screen:AVCaptureScreenInput?) {
public func attach(screen:AVCaptureScreenInput?) {
lockQueue.async {
self.mixer.videoIO.attachScreen(screen: screen)
self.mixer.videoIO.attach(screen: screen)
}
}
#else
open func attachScreen(_ screen:ScreenCaptureSession?, useScreenSize:Bool = true) {
open func attach(screen:ScreenCaptureSession?, useScreenSize:Bool = true) {
lockQueue.async {
self.mixer.videoIO.attachScreen(screen, useScreenSize: useScreenSize)
self.mixer.videoIO.attach(screen: screen, useScreenSize: useScreenSize)
}
}
open func rampToVideoZoomFactor(_ factor:CGFloat, withRate:Float) {
open func ramp(toVideoZoomFactor:CGFloat, withRate:Float) {
lockQueue.async {
self.mixer.videoIO.rampToVideoZoomFactor(factor, withRate: withRate)
self.mixer.videoIO.ramp(toVideoZoomFactor: toVideoZoomFactor, withRate: withRate)
}
}
#endif

View File

@ -17,13 +17,12 @@ class AMFSerializerUtil {
}
}
// MARK: - AMFSerializerError
enum AMFSerializerError: Error {
case deserialize
case outOfIndex
}
// MARK: - AMFSerializer
// MARK: -
protocol AMFSerializer: ByteArrayConvertible {
var reference:AMFReference { get set }
@ -99,9 +98,8 @@ class AMF0Serializer: ByteArray {
var reference:AMFReference = AMFReference()
}
// MARK: AMFSerializer
extension AMF0Serializer: AMFSerializer {
// MARK: AMFSerializer
@discardableResult
func serialize(_ value:Any?) -> Self {
if value == nil {
@ -162,7 +160,7 @@ extension AMF0Serializer: AMFSerializer {
return nil
case .undefined:
position += 1
return Type.undefined
return kASUndefined
case .reference:
assertionFailure("TODO")
return nil

View File

@ -88,9 +88,8 @@ class AMF3Serializer: ByteArray {
var reference:AMFReference = AMFReference()
}
// MARK: AMFSerializer
extension AMF3Serializer: AMFSerializer {
// MARK: AMFSerializer
@discardableResult
func serialize(_ value:Any?) -> Self {

View File

@ -14,8 +14,8 @@ public final class ASUndefined: NSObject {
// MARK: -
public struct ASArray {
fileprivate(set) var data:[Any?]
fileprivate(set) var dict:[String: Any?] = [:]
internal fileprivate(set) var data:[Any?]
internal fileprivate(set) var dict:[String: Any?] = [:]
public var length:Int {
return data.count
@ -30,8 +30,8 @@ public struct ASArray {
}
}
// MARK: ArrayLiteralConvertible
extension ASArray: ExpressibleByArrayLiteral {
// MARK: ExpressibleByArrayLiteral
public init (arrayLiteral elements: Any?...) {
self = ASArray(data: elements)
}
@ -70,15 +70,15 @@ extension ASArray: ExpressibleByArrayLiteral {
}
}
// MARK: CustomStringConvertible
extension ASArray: CustomStringConvertible {
// MARK: CustomStringConvertible
public var description:String {
return data.description
}
}
// MARK: Equatable
extension ASArray: Equatable {
// MARK: Equatable
}
public func ==(lhs: ASArray, rhs: ASArray) -> Bool {

View File

@ -1,7 +1,6 @@
import Foundation
import AVFoundation
// MARK: FLVVideoCodec
enum FLVVideoCodec: UInt8 {
case sorensonH263 = 2
case screen1 = 3
@ -31,7 +30,6 @@ enum FLVVideoCodec: UInt8 {
}
}
// MARK: - FLVFrameType
enum FLVFrameType: UInt8 {
case key = 1
case inter = 2
@ -40,20 +38,17 @@ enum FLVFrameType: UInt8 {
case command = 5
}
// MARK: - FLVAVCPacketType
enum FLVAVCPacketType:UInt8 {
case seq = 0
case nal = 1
case eos = 2
}
// MARK: - FLVAACPacketType
enum FLVAACPacketType:UInt8 {
case seq = 0
case raw = 1
}
// MARK: - FLVSoundRate
enum FLVSoundRate:UInt8 {
case kHz5_5 = 0
case kHz11 = 1
@ -74,19 +69,16 @@ enum FLVSoundRate:UInt8 {
}
}
// MARK: - FLVSoundSize
enum FLVSoundSize:UInt8 {
case snd8bit = 0
case snd16bit = 1
}
// MARK: - FLVSoundType
enum FLVSoundType:UInt8 {
case mono = 0
case stereo = 1
}
// MARK: - FLVAudioCodec
enum FLVAudioCodec:UInt8 {
case pcm = 0
case adpcm = 1
@ -171,9 +163,9 @@ struct FLVTag {
var streamId:UInt16 {
switch self {
case .audio:
return RTMPChunk.audio
return RTMPChunk.StreamID.audio.rawValue
case .video:
return RTMPChunk.video
return RTMPChunk.StreamID.video.rawValue
case .data:
return 0
}
@ -202,17 +194,17 @@ struct FLVTag {
}
}
static let headerSize = 11
static internal let headerSize = 11
var tagType:TagType = .data
var dataSize:UInt32 = 0
var timestamp:UInt32 = 0
var timestampExtended:UInt8 = 0
var streamId:UInt32 = 0
internal var tagType:TagType = .data
internal var dataSize:UInt32 = 0
internal var timestamp:UInt32 = 0
internal var timestampExtended:UInt8 = 0
internal var streamId:UInt32 = 0
}
// MARK: CustomStringConvertible
extension FLVTag: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}

View File

@ -4,7 +4,7 @@ import AudioToolbox
final class RTMPAudioPlayback: AudioStreamPlayback {
fileprivate var config:AudioSpecificConfig?
func onMessage(_ message:RTMPAudioMessage) {
internal func on(message:RTMPAudioMessage) {
guard message.codec.isSupported else {
return
}

View File

@ -1,25 +1,6 @@
import Foundation
final class RTMPChunk {
static let control:UInt16 = 0x02
static let command:UInt16 = 0x03
static let audio:UInt16 = 0x04
static let video:UInt16 = 0x05
static let maxTimestamp:UInt32 = 0xFFFFFF
static let defaultSize:Int = 128
static func getStreamIdSize(_ byte:UInt8) -> Int {
switch (byte & 0b00111111) {
case 0:
return 2
case 1:
return 3
default:
return 1
}
}
enum `Type`: UInt8 {
case zero = 0
case one = 1
@ -54,18 +35,39 @@ final class RTMPChunk {
}
}
var size:Int = 0
var type:Type = .zero
var streamId:UInt16 = RTMPChunk.command
enum StreamID: UInt16 {
case control = 0x02
case command = 0x03
case audio = 0x04
case video = 0x05
}
var ready:Bool {
static let defaultSize:Int = 128
static let maxTimestamp:UInt32 = 0xFFFFFF
static func getStreamIdSize(_ byte:UInt8) -> Int {
switch (byte & 0b00111111) {
case 0:
return 2
case 1:
return 3
default:
return 1
}
}
internal var size:Int = 0
internal var type:Type = .zero
internal var streamId:UInt16 = RTMPChunk.StreamID.command.rawValue
internal var ready:Bool {
guard let message:RTMPMessage = message else {
return false
}
return message.length == message.payload.count
}
var headerSize:Int {
internal var headerSize:Int {
if (streamId <= 63) {
return 1 + type.headerSize
}
@ -75,7 +77,7 @@ final class RTMPChunk {
return 3 + type.headerSize
}
var basicHeaderSize:Int {
internal var basicHeaderSize:Int {
if (streamId <= 63) {
return 1
}
@ -85,21 +87,21 @@ final class RTMPChunk {
return 3
}
fileprivate(set) var message:RTMPMessage?
fileprivate(set) var fragmented:Bool = false
internal fileprivate(set) var message:RTMPMessage?
internal fileprivate(set) var fragmented:Bool = false
fileprivate var _bytes:[UInt8] = []
init(type:Type, streamId:UInt16, message:RTMPMessage) {
internal init(type:Type, streamId:UInt16, message:RTMPMessage) {
self.type = type
self.streamId = streamId
self.message = message
}
init(message:RTMPMessage) {
internal init(message:RTMPMessage) {
self.message = message
}
init?(bytes:[UInt8], size:Int) {
internal init?(bytes:[UInt8], size:Int) {
if (bytes.isEmpty) {
return nil
}
@ -111,7 +113,7 @@ final class RTMPChunk {
self.bytes = bytes
}
func append(_ bytes:[UInt8], size:Int) -> Int {
internal func append(_ bytes:[UInt8], size:Int) -> Int {
fragmented = false
guard let message:RTMPMessage = message else {
@ -138,7 +140,7 @@ final class RTMPChunk {
return length
}
func append(_ bytes:[UInt8], message: RTMPMessage?) -> Int {
internal func append(_ bytes:[UInt8], message: RTMPMessage?) -> Int {
guard let message:RTMPMessage = message else {
return 0
}
@ -159,42 +161,35 @@ final class RTMPChunk {
return headerSize + message.length
}
func split(_ size:Int) -> [[UInt8]] {
internal func split(_ size:Int) -> [[UInt8]] {
let bytes:[UInt8] = self.bytes
message?.length = bytes.count
guard let message:RTMPMessage = message , size < message.payload.count else {
guard let message:RTMPMessage = message, size < message.payload.count else {
return [bytes]
}
/*
let header:[UInt8] = Type.three.toBasicHeader(streamId)
let startIndex:Int = size + headerSize
var result:[[UInt8]] = []
let startIndex:Int = size + headerSize
let header:[UInt8] = Type.three.toBasicHeader(streamId)
result.append(Array(bytes[0..<startIndex]))
for index in stride(from: startIndex, to: bytes.count, by: size) {
var headerCombine:[UInt8] = header
/// TODO: headerCombine.append(bytes[index..<index.advanced(by: size)])
result.append(headerCombine)
var data:[UInt8] = header
data.append(contentsOf: bytes[index..<index.advanced(by: index + size < bytes.count ? size : bytes.count - index)])
result.append(data)
}
*/
return [bytes]
return result
}
}
// MARK: CustomStringConvertible
extension RTMPChunk: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
// MARK: BytesConvertible
extension RTMPChunk: BytesConvertible {
var bytes:[UInt8] {
// MARK: BytesConvertible
internal var bytes:[UInt8] {
get {
guard let message:RTMPMessage = message else {
return _bytes

View File

@ -5,25 +5,21 @@ import Foundation
*/
open class Responder: NSObject {
fileprivate var result:(_ data:[Any?]) -> Void
fileprivate var status:((_ data:[Any?]) -> Void)?
private var result:(_ data:[Any?]) -> Void
private var status:((_ data:[Any?]) -> Void)?
public init(result:@escaping (_ data:[Any?]) -> Void, status:((_ data:[Any?]) -> Void)?) {
public init(result:@escaping (_ data:[Any?]) -> Void, status:((_ data:[Any?]) -> Void)? = nil) {
self.result = result
self.status = status
}
convenience public init (result:@escaping (_ data:[Any?]) -> Void) {
self.init(result: result, status: nil)
final internal func on(result:[Any?]) {
self.result(result)
}
func onResult(_ data:[Any?]) {
result(data)
}
func onStatus(_ data:[Any?]) {
status?(data)
status = nil
final internal func on(status:[Any?]) {
self.status?(status)
self.status = nil
}
}
@ -32,47 +28,52 @@ open class Responder: NSObject {
flash.net.NetConnection for Swift
*/
open class RTMPConnection: EventDispatcher {
static open let supportedProtocols:[String] = ["rtmp", "rtmps"]
static public let supportedProtocols:[String] = ["rtmp", "rtmps"]
static public let defaultPort:Int = 1935
static public let defaultFlashVer:String = "FMLE/3.0 (compatible; FMSc/1.0)"
static public let defaultChunkSizeS:Int = 1024 * 8
static public let defaultCapabilities:Int = 239
static public let defaultObjectEncoding:UInt8 = 0x00
/**
NetStatusEvent#info.code for NetConnection
*/
public enum Code: String {
case CallBadVersion = "NetConnection.Call.BadVersion"
case CallFailed = "NetConnection.Call.Failed"
case CallProhibited = "NetConnection.Call.Prohibited"
case ConnectAppshutdown = "NetConnection.Connect.AppShutdown"
case ConnectClosed = "NetConnection.Connect.Closed"
case ConnectFailed = "NetConnection.Connect.Failed"
case ConnectIdleTimeOut = "NetConnection.Connect.IdleTimeOut"
case ConenctInvalidApp = "NetConnection.Connect.InvalidApp"
case ConnectNetworkChange = "NetConnection.Connect.NetworkChange"
case ConnectRejected = "NetConnection.Connect.Rejected"
case ConnectSuccess = "NetConnection.Connect.Success"
case callBadVersion = "NetConnection.Call.BadVersion"
case callFailed = "NetConnection.Call.Failed"
case callProhibited = "NetConnection.Call.Prohibited"
case connectAppshutdown = "NetConnection.Connect.AppShutdown"
case connectClosed = "NetConnection.Connect.Closed"
case connectFailed = "NetConnection.Connect.Failed"
case connectIdleTimeOut = "NetConnection.Connect.IdleTimeOut"
case conenctInvalidApp = "NetConnection.Connect.InvalidApp"
case connectNetworkChange = "NetConnection.Connect.NetworkChange"
case connectRejected = "NetConnection.Connect.Rejected"
case connectSuccess = "NetConnection.Connect.Success"
public var level:String {
switch self {
case .CallBadVersion:
case .callBadVersion:
return "error"
case .CallFailed:
case .callFailed:
return "error"
case .CallProhibited:
case .callProhibited:
return "error"
case .ConnectAppshutdown:
case .connectAppshutdown:
return "status"
case .ConnectClosed:
case .connectClosed:
return "status"
case .ConnectFailed:
case .connectFailed:
return "error"
case .ConnectIdleTimeOut:
case .connectIdleTimeOut:
return "status"
case .ConenctInvalidApp:
case .conenctInvalidApp:
return "error"
case .ConnectNetworkChange:
case .connectNetworkChange:
return "status"
case .ConnectRejected:
case .connectRejected:
return "status"
case .ConnectSuccess:
case .connectSuccess:
return "status"
}
}
@ -86,7 +87,7 @@ open class RTMPConnection: EventDispatcher {
}
}
enum SupportVideo: UInt16 {
internal enum SupportVideo: UInt16 {
case unused = 0x0001
case jpeg = 0x0002
case sorenson = 0x0004
@ -98,7 +99,7 @@ open class RTMPConnection: EventDispatcher {
case all = 0x00FF
}
enum SupportSound: UInt16 {
internal enum SupportSound: UInt16 {
case none = 0x0001
case adpcm = 0x0002
case mp3 = 0x0004
@ -114,7 +115,7 @@ open class RTMPConnection: EventDispatcher {
case all = 0x0FFF
}
enum VideoFunction: UInt8 {
internal enum VideoFunction: UInt8 {
case clientSeek = 1
}
@ -143,12 +144,6 @@ open class RTMPConnection: EventDispatcher {
return command
}
static let defaultPort:Int = 1935
static let defaultFlashVer:String = "FMLE/3.0 (compatible; FMSc/1.0)"
static let defaultChunkSizeS:Int = 1024 * 8
static let defaultCapabilities:Int = 239
static let defaultObjectEncoding:UInt8 = 0x00
/// The URL of .swf.
open var swfUrl:String? = nil
/// The URL of an HTTP referer.
@ -185,7 +180,7 @@ open class RTMPConnection: EventDispatcher {
/// The statistics of outgoing bytes per second.
dynamic open fileprivate(set) var currentBytesOutPerSecond:Int32 = 0
var socket:RTMPSocket = RTMPSocket()
var socket:RTMPSocketCompatible = RTMPSocket()
var streams:[UInt32: RTMPStream] = [:]
var bandWidth:UInt32 = 0
var streamsmap:[UInt16: UInt32] = [:]
@ -212,12 +207,12 @@ open class RTMPConnection: EventDispatcher {
override public init() {
super.init()
socket.delegate = self
addEventListener(Event.RTMP_STATUS, selector: #selector(RTMPConnection.rtmpStatusHandler(_:)))
addEventListener(type: Event.RTMP_STATUS, selector: #selector(RTMPConnection.rtmpStatusHandler(_:)))
}
deinit {
timer = nil
removeEventListener(Event.RTMP_STATUS, selector: #selector(RTMPConnection.rtmpStatusHandler(_:)))
removeEventListener(type: Event.RTMP_STATUS, selector: #selector(RTMPConnection.rtmpStatusHandler(_:)))
}
open func call(_ commandName:String, responder:Responder?, arguments:Any?...) {
@ -241,52 +236,52 @@ open class RTMPConnection: EventDispatcher {
@available(*, unavailable)
open func connect(_ command:String) {
connect(command, arguments: nil)
connect(withCommand: command, arguments: nil)
}
open func connect(_ command: String, arguments: Any?...) {
guard let uri:URL = URL(string: command) , !connected && RTMPConnection.supportedProtocols.contains(uri.scheme!) else {
open func connect(withCommand: String, arguments: Any?...) {
guard let uri:URL = URL(string: withCommand) , !connected && RTMPConnection.supportedProtocols.contains(uri.scheme!) else {
return
}
self.uri = uri
self.arguments = arguments
timer = Timer(timeInterval: 1.0, target: self, selector: #selector(RTMPConnection.didTimerInterval(_:)), userInfo: nil, repeats: true)
timer = Timer(timeInterval: 1.0, target: self, selector: #selector(RTMPConnection.on(timer:)), userInfo: nil, repeats: true)
socket.securityLevel = uri.scheme == "rtmps" ? .negotiatedSSL : .none
socket.connect(uri.host!, port: (uri as NSURL).port == nil ? RTMPConnection.defaultPort : (uri as NSURL).port!.intValue)
socket.connect(withName: uri.host!, port: (uri as NSURL).port == nil ? RTMPConnection.defaultPort : (uri as NSURL).port!.intValue)
}
open func close() {
close(false)
close(isDisconnected: false)
}
func close(_ disconnect:Bool) {
guard connected || disconnect else {
internal func close(isDisconnected:Bool) {
guard connected || isDisconnected else {
return
}
if (!disconnect) {
if (!isDisconnected) {
uri = nil
}
for (id, stream) in streams {
stream.close()
streams.removeValue(forKey: id)
}
socket.close(false)
socket.close(isDisconnected: false)
timer = nil
}
func createStream(_ stream: RTMPStream) {
let responder:Responder = Responder { (data) -> Void in
internal func create(stream: RTMPStream) {
let responder:Responder = Responder(result: { (data) -> Void in
let id:Any? = data[0]
if let id:Double = id as? Double {
stream.id = UInt32(id)
self.streams[stream.id] = stream
stream.readyState = .open
}
}
})
call("createStream", responder: responder)
}
func rtmpStatusHandler(_ notification: Notification) {
internal func rtmpStatusHandler(_ notification: Notification) {
let e:Event = Event.from(notification)
guard let data:ASObject = e.data as? ASObject, let code:String = data["code"] as? String else {
@ -294,19 +289,19 @@ open class RTMPConnection: EventDispatcher {
}
switch code {
case Code.ConnectSuccess.rawValue:
case Code.connectSuccess.rawValue:
connected = true
socket.chunkSizeS = chunkSize
socket.doOutput(chunk: RTMPChunk(
type: .one,
streamId: RTMPChunk.control,
streamId: RTMPChunk.StreamID.control.rawValue,
message: RTMPSetChunkSizeMessage(size: UInt32(socket.chunkSizeS))
))
case Code.ConnectRejected.rawValue:
case Code.connectRejected.rawValue:
guard let uri:URL = uri, let user:String = uri.user, let password:String = uri.password else {
break
}
socket.deinitConnection(false)
socket.deinitConnection(isDisconnected: false)
let description:String = data["description"] as! String
switch true {
case description.contains("reason=nosuchuser"):
@ -315,26 +310,26 @@ open class RTMPConnection: EventDispatcher {
break
case description.contains("reason=needauth"):
let command:String = RTMPConnection.createSanJoseAuthCommand(uri, description: description)
connect(command, arguments: arguments)
connect(withCommand: command, arguments: arguments)
case description.contains("authmod=adobe"):
if (user == "" || password == "") {
close(true)
close(isDisconnected: true)
break
}
let query:String = uri.query ?? ""
let command:String = uri.absoluteString + (query == "" ? "?" : "&") + "authmod=adobe&user=\(user)"
connect(command, arguments: arguments)
connect(withCommand: command, arguments: arguments)
default:
break
}
case Code.ConnectClosed.rawValue:
close(true)
case Code.connectClosed.rawValue:
close(isDisconnected: true)
default:
break
}
}
func didTimerInterval(_ timer:Timer) {
func on(timer:Timer) {
let totalBytesIn:Int64 = self.totalBytesIn
let totalBytesOut:Int64 = self.totalBytesOut
currentBytesInPerSecond = Int32(totalBytesIn - previousTotalBytesIn)
@ -342,7 +337,7 @@ open class RTMPConnection: EventDispatcher {
previousTotalBytesIn = totalBytesIn
previousTotalBytesOut = totalBytesOut
for (_, stream) in streams {
stream.didTimerInterval(timer)
stream.on(timer: timer)
}
}
@ -384,11 +379,10 @@ open class RTMPConnection: EventDispatcher {
}
}
// MARK: RTMPSocketDelegate
extension RTMPConnection: RTMPSocketDelegate {
func didSetReadyState(_ socket: RTMPSocket, readyState: RTMPSocket.ReadyState) {
switch socket.readyState {
// MARK: RTMPSocketDelegate
internal func didSet(readyState: RTMPSocket.ReadyState) {
switch readyState {
case .handshakeDone:
guard let chunk:RTMPChunk = createConnectionChunk() else {
close()
@ -407,7 +401,7 @@ extension RTMPConnection: RTMPSocketDelegate {
}
}
func listen(_ socket:RTMPSocket, bytes:[UInt8]) {
internal func listen(bytes:[UInt8]) {
guard let chunk:RTMPChunk = currentChunk ?? RTMPChunk(bytes: bytes, size: socket.chunkSizeC) else {
socket.inputBuffer.append(contentsOf: bytes)
return
@ -440,7 +434,7 @@ extension RTMPConnection: RTMPSocketDelegate {
message.execute(self)
currentChunk = nil
messages[chunk.streamId] = message
listen(socket, bytes: Array(bytes[position..<bytes.count]))
listen(bytes: Array(bytes[position..<bytes.count]))
return
}
@ -453,7 +447,7 @@ extension RTMPConnection: RTMPSocketDelegate {
}
if (position < bytes.count) {
listen(socket, bytes: Array(bytes[position..<bytes.count]))
listen(bytes: Array(bytes[position..<bytes.count]))
}
}
}

View File

@ -63,23 +63,23 @@ class RTMPMessage {
}
}
let type:Type
var length:Int = 0
var streamId:UInt32 = 0
var timestamp:UInt32 = 0
var payload:[UInt8] = []
internal let type:Type
internal var length:Int = 0
internal var streamId:UInt32 = 0
internal var timestamp:UInt32 = 0
internal var payload:[UInt8] = []
init(type:Type) {
internal init(type:Type) {
self.type = type
}
func execute(_ connection:RTMPConnection) {
internal func execute(_ connection:RTMPConnection) {
}
}
// MARK: CustomStringConvertible
extension RTMPMessage: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
@ -89,7 +89,7 @@ extension RTMPMessage: CustomStringConvertible {
5.4.1. Set Chunk Size (1)
*/
final class RTMPSetChunkSizeMessage: RTMPMessage {
var size:UInt32 = 0
internal var size:UInt32 = 0
override var payload:[UInt8] {
get {
@ -108,16 +108,16 @@ final class RTMPSetChunkSizeMessage: RTMPMessage {
}
}
init() {
internal init() {
super.init(type: .chunkSize)
}
init(size:UInt32) {
internal init(size:UInt32) {
super.init(type: .chunkSize)
self.size = size
}
override func execute(_ connection:RTMPConnection) {
override internal func execute(_ connection:RTMPConnection) {
connection.socket.chunkSizeC = Int(size)
}
}
@ -127,9 +127,9 @@ final class RTMPSetChunkSizeMessage: RTMPMessage {
5.4.2. Abort Message (2)
*/
final class RTMPAbortMessge: RTMPMessage {
var chunkStreamId:UInt32 = 0
internal var chunkStreamId:UInt32 = 0
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -146,7 +146,7 @@ final class RTMPAbortMessge: RTMPMessage {
}
}
init() {
internal init() {
super.init(type: .abort)
}
}
@ -156,9 +156,9 @@ final class RTMPAbortMessge: RTMPMessage {
5.4.3. Acknowledgement (3)
*/
final class RTMPAcknowledgementMessage: RTMPMessage {
var sequence:UInt32 = 0
internal var sequence:UInt32 = 0
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -175,7 +175,7 @@ final class RTMPAcknowledgementMessage: RTMPMessage {
}
}
init() {
internal init() {
super.init(type: .ack)
}
}
@ -185,22 +185,18 @@ final class RTMPAcknowledgementMessage: RTMPMessage {
5.4.4. Window Acknowledgement Size (5)
*/
final class RTMPWindowAcknowledgementSizeMessage: RTMPMessage {
var size:UInt32 = 0 {
didSet {
super.payload.removeAll()
}
}
internal var size:UInt32 = 0
init() {
internal init() {
super.init(type: .windowAck)
}
init(size:UInt32) {
internal init(size:UInt32) {
super.init(type: .windowAck)
self.size = size
}
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -217,10 +213,10 @@ final class RTMPWindowAcknowledgementSizeMessage: RTMPMessage {
}
}
override func execute(_ connection: RTMPConnection) {
override internal func execute(_ connection: RTMPConnection) {
connection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.control,
streamId: RTMPChunk.StreamID.control.rawValue,
message: RTMPWindowAcknowledgementSizeMessage(size: size)
))
}
@ -239,14 +235,14 @@ final class RTMPSetPeerBandwidthMessage: RTMPMessage {
case unknown = 0xFF
}
var size:UInt32 = 0
var limit:Limit = .hard
internal var size:UInt32 = 0
internal var limit:Limit = .hard
init() {
internal init() {
super.init(type: .bandwidth)
}
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -265,7 +261,7 @@ final class RTMPSetPeerBandwidthMessage: RTMPMessage {
}
}
override func execute(_ connection: RTMPConnection) {
override internal func execute(_ connection: RTMPConnection) {
connection.bandWidth = size
}
}
@ -276,13 +272,13 @@ final class RTMPSetPeerBandwidthMessage: RTMPMessage {
*/
final class RTMPCommandMessage: RTMPMessage {
let objectEncoding:UInt8
var commandName:String = ""
var transactionId:Int = 0
var commandObject:ASObject? = nil
var arguments:[Any?] = []
internal let objectEncoding:UInt8
internal var commandName:String = ""
internal var transactionId:Int = 0
internal var commandObject:ASObject? = nil
internal var arguments:[Any?] = []
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -327,12 +323,12 @@ final class RTMPCommandMessage: RTMPMessage {
fileprivate var serializer:AMFSerializer = AMF0Serializer()
init(objectEncoding:UInt8) {
internal init(objectEncoding:UInt8) {
self.objectEncoding = objectEncoding
super.init(type: objectEncoding == 0x00 ? .amf0Command : .amf3Command)
}
init(streamId:UInt32, transactionId:Int, objectEncoding:UInt8, commandName:String, commandObject: ASObject?, arguments:[Any?]) {
internal init(streamId:UInt32, transactionId:Int, objectEncoding:UInt8, commandName:String, commandObject: ASObject?, arguments:[Any?]) {
self.transactionId = transactionId
self.objectEncoding = objectEncoding
self.commandName = commandName
@ -342,23 +338,23 @@ final class RTMPCommandMessage: RTMPMessage {
self.streamId = streamId
}
override func execute(_ connection: RTMPConnection) {
override internal func execute(_ connection: RTMPConnection) {
guard let responder:Responder = connection.operations.removeValue(forKey: transactionId) else {
switch commandName {
case "close":
connection.close()
default:
connection.dispatchEventWith(Event.RTMP_STATUS, bubbles: false, data: arguments.isEmpty ? nil : arguments[0])
connection.dispatch(type: Event.RTMP_STATUS, bubbles: false, data: arguments.isEmpty ? nil : arguments[0])
}
return
}
switch commandName {
case "_result":
responder.onResult(arguments)
responder.on(result: arguments)
case "_error":
responder.onStatus(arguments)
responder.on(status: arguments)
default:
break
}
@ -371,13 +367,13 @@ final class RTMPCommandMessage: RTMPMessage {
*/
final class RTMPDataMessage: RTMPMessage {
let objectEncoding:UInt8
var handlerName:String = ""
var arguments:[Any?] = []
internal let objectEncoding:UInt8
internal var handlerName:String = ""
internal var arguments:[Any?] = []
fileprivate var serializer:AMFSerializer = AMF0Serializer()
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -421,12 +417,12 @@ final class RTMPDataMessage: RTMPMessage {
}
}
init(objectEncoding:UInt8) {
internal init(objectEncoding:UInt8) {
self.objectEncoding = objectEncoding
super.init(type: objectEncoding == 0x00 ? .amf0Data : .amf3Data)
}
init(streamId:UInt32, objectEncoding:UInt8, handlerName:String, arguments:[Any?]) {
internal init(streamId:UInt32, objectEncoding:UInt8, handlerName:String, arguments:[Any?]) {
self.objectEncoding = objectEncoding
self.handlerName = handlerName
self.arguments = arguments
@ -434,7 +430,7 @@ final class RTMPDataMessage: RTMPMessage {
self.streamId = streamId
}
convenience init(streamId:UInt32, objectEncoding:UInt8, handlerName:String) {
convenience internal init(streamId:UInt32, objectEncoding:UInt8, handlerName:String) {
self.init(streamId: streamId, objectEncoding: objectEncoding, handlerName: handlerName, arguments: [])
}
@ -452,13 +448,13 @@ final class RTMPDataMessage: RTMPMessage {
*/
final class RTMPSharedObjectMessage: RTMPMessage {
let objectEncoding:UInt8
var sharedObjectName:String = ""
var currentVersion:UInt32 = 0
var flags:[UInt8] = [UInt8](repeating: 0x00, count: 8)
var events:[RTMPSharedObjectEvent] = []
internal let objectEncoding:UInt8
internal var sharedObjectName:String = ""
internal var currentVersion:UInt32 = 0
internal var flags:[UInt8] = [UInt8](repeating: 0x00, count: 8)
internal var events:[RTMPSharedObjectEvent] = []
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -513,12 +509,12 @@ final class RTMPSharedObjectMessage: RTMPMessage {
fileprivate var serializer:AMFSerializer = AMF0Serializer()
init(objectEncoding:UInt8) {
internal init(objectEncoding:UInt8) {
self.objectEncoding = objectEncoding
super.init(type: objectEncoding == 0x00 ? .amf0Shared : .amf3Shared)
}
init(timestamp:UInt32, objectEncoding:UInt8, sharedObjectName:String, currentVersion:UInt32, flags:[UInt8], events:[RTMPSharedObjectEvent]) {
internal init(timestamp:UInt32, objectEncoding:UInt8, sharedObjectName:String, currentVersion:UInt32, flags:[UInt8], events:[RTMPSharedObjectEvent]) {
self.objectEncoding = objectEncoding
self.sharedObjectName = sharedObjectName
self.currentVersion = currentVersion
@ -528,9 +524,9 @@ final class RTMPSharedObjectMessage: RTMPMessage {
self.timestamp = timestamp
}
override func execute(_ connection:RTMPConnection) {
override internal func execute(_ connection:RTMPConnection) {
let persistence:Bool = flags[0] == 0x01
RTMPSharedObject.getRemote(sharedObjectName, remotePath: connection.uri!.absoluteWithoutQueryString, persistence: persistence).onMessage(self)
RTMPSharedObject.getRemote(withName: sharedObjectName, remotePath: connection.uri!.absoluteWithoutQueryString, persistence: persistence).on(message: self)
}
}
@ -539,14 +535,14 @@ final class RTMPSharedObjectMessage: RTMPMessage {
7.1.5. Audio Message (9)
*/
final class RTMPAudioMessage: RTMPMessage {
var config:AudioSpecificConfig?
internal var config:AudioSpecificConfig?
fileprivate(set) var codec:FLVAudioCodec = .unknown
fileprivate(set) var soundRate:FLVSoundRate = .kHz44
fileprivate(set) var soundSize:FLVSoundSize = .snd8bit
fileprivate(set) var soundType:FLVSoundType = .stereo
internal fileprivate(set) var codec:FLVAudioCodec = .unknown
internal fileprivate(set) var soundRate:FLVSoundRate = .kHz44
internal fileprivate(set) var soundSize:FLVSoundSize = .snd8bit
internal fileprivate(set) var soundType:FLVSoundType = .stereo
var soundData:[UInt8] {
internal var soundData:[UInt8] {
let data:[UInt8] = payload.isEmpty ? [] : Array(payload[codec.headerSize..<payload.count])
guard let config:AudioSpecificConfig = config else {
return data
@ -555,7 +551,7 @@ final class RTMPAudioMessage: RTMPMessage {
return adts + data
}
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
return super.payload
}
@ -599,7 +595,7 @@ final class RTMPAudioMessage: RTMPMessage {
return
}
OSAtomicAdd64(Int64(payload.count), &stream.info.byteCount)
stream.audioPlayback.onMessage(self)
stream.audioPlayback.on(message: self)
}
func createAudioSpecificConfig() -> AudioSpecificConfig? {
@ -626,14 +622,14 @@ final class RTMPAudioMessage: RTMPMessage {
7.1.5. Video Message (9)
*/
final class RTMPVideoMessage: RTMPMessage {
fileprivate(set) var codec:FLVVideoCodec = .unknown
fileprivate(set) var status:OSStatus = noErr
internal fileprivate(set) var codec:FLVVideoCodec = .unknown
internal fileprivate(set) var status:OSStatus = noErr
init() {
internal init() {
super.init(type: .video)
}
init(streamId: UInt32, timestamp: UInt32, buffer:Data) {
internal init(streamId: UInt32, timestamp: UInt32, buffer:Data) {
super.init(type: .video)
self.streamId = streamId
self.timestamp = timestamp
@ -641,7 +637,7 @@ final class RTMPVideoMessage: RTMPMessage {
(buffer as NSData).getBytes(&payload, length: payload.count)
}
override func execute(_ connection:RTMPConnection) {
internal override func execute(_ connection:RTMPConnection) {
guard let stream:RTMPStream = connection.streams[streamId] else {
return
}
@ -659,7 +655,7 @@ final class RTMPVideoMessage: RTMPMessage {
}
}
func enqueueSampleBuffer(_ stream: RTMPStream) {
internal func enqueueSampleBuffer(_ stream: RTMPStream) {
stream.videoTimestamp += Double(timestamp)
let compositionTimeoffset:Int32 = Int32(bytes: [0] + payload[2..<5]).bigEndian
@ -686,7 +682,7 @@ final class RTMPVideoMessage: RTMPMessage {
status = stream.mixer.videoIO.decoder.decodeSampleBuffer(sampleBuffer!)
}
func createFormatDescription(_ stream: RTMPStream) -> OSStatus {
internal func createFormatDescription(_ stream: RTMPStream) -> OSStatus {
var config:AVCConfigurationRecord = AVCConfigurationRecord()
config.bytes = Array(payload[FLVTag.TagType.video.headerSize..<payload.count])
return config.createFormatDescription(&stream.mixer.videoIO.formatDescription)
@ -698,7 +694,7 @@ final class RTMPVideoMessage: RTMPMessage {
7.1.6. Aggregate Message (22)
*/
final class RTMPAggregateMessage: RTMPMessage {
init() {
internal init() {
super.init(type: .aggregate)
}
}
@ -709,7 +705,7 @@ final class RTMPAggregateMessage: RTMPMessage {
*/
final class RTMPUserControlMessage: RTMPMessage {
enum Event: UInt8 {
internal enum Event: UInt8 {
case streamBegin = 0x00
case streamEof = 0x01
case streamDry = 0x02
@ -721,15 +717,15 @@ final class RTMPUserControlMessage: RTMPMessage {
case bufferFull = 0x20
case unknown = 0xFF
var bytes:[UInt8] {
internal var bytes:[UInt8] {
return [0x00, rawValue]
}
}
var event:Event = .unknown
var value:Int32 = 0
internal var event:Event = .unknown
internal var value:Int32 = 0
override var payload:[UInt8] {
override internal var payload:[UInt8] {
get {
guard super.payload.isEmpty else {
return super.payload
@ -753,26 +749,26 @@ final class RTMPUserControlMessage: RTMPMessage {
}
}
init() {
internal init() {
super.init(type: .user)
}
init(event:Event, value:Int32) {
internal init(event:Event, value:Int32) {
super.init(type: .user)
self.event = event
self.value = value
}
override func execute(_ connection: RTMPConnection) {
override internal func execute(_ connection: RTMPConnection) {
switch event {
case .ping:
connection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.control,
streamId: RTMPChunk.StreamID.control.rawValue,
message: RTMPUserControlMessage(event: .pong, value: value)
))
case .bufferEmpty, .bufferFull:
connection.streams[UInt32(value)]?.dispatchEventWith("rtmpStatus", bubbles: false, data: [
connection.streams[UInt32(value)]?.dispatch(type: "rtmpStatus", bubbles: false, data: [
"level": "status",
"code": description,
"description": ""

View File

@ -1,20 +1,19 @@
import Foundation
import AVFoundation
// MARK: - RTMPMuxerDelegate
protocol RTMPMuxerDelegate: class {
func sampleOutput(_ muxer:RTMPMuxer, audio buffer:Data, timestamp:Double)
func sampleOutput(_ muxer:RTMPMuxer, video buffer:Data, timestamp:Double)
}
// MARK: - RTMPMuxer
// MARK: -
final class RTMPMuxer {
weak var delegate:RTMPMuxerDelegate? = nil
internal weak var delegate:RTMPMuxerDelegate? = nil
fileprivate var audioTimestamp:CMTime = kCMTimeZero
fileprivate var videoTimestamp:CMTime = kCMTimeZero
func dispose() {
internal func dispose() {
audioTimestamp = kCMTimeZero
videoTimestamp = kCMTimeZero
}
@ -22,7 +21,7 @@ final class RTMPMuxer {
extension RTMPMuxer: AudioEncoderDelegate {
// MARK: AudioEncoderDelegate
func didSetFormatDescription(audio formatDescription: CMFormatDescription?) {
internal func didSetFormatDescription(audio formatDescription: CMFormatDescription?) {
guard let formatDescription:CMFormatDescription = formatDescription else {
return
}
@ -35,7 +34,7 @@ extension RTMPMuxer: AudioEncoderDelegate {
delegate?.sampleOutput(self, audio: buffer as Data, timestamp: 0)
}
func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
internal func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
var blockBuffer:CMBlockBuffer?
var audioBufferList:AudioBufferList = AudioBufferList()
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
@ -56,10 +55,9 @@ extension RTMPMuxer: AudioEncoderDelegate {
}
}
// MARK: VideoEncoderDelegate
extension RTMPMuxer: VideoEncoderDelegate {
func didSetFormatDescription(video formatDescription: CMFormatDescription?) {
// MARK: VideoEncoderDelegate
internal func didSetFormatDescription(video formatDescription: CMFormatDescription?) {
guard let
formatDescription:CMFormatDescription = formatDescription,
let avcC:Data = AVCConfigurationRecord.getData(formatDescription) else {
@ -74,7 +72,7 @@ extension RTMPMuxer: VideoEncoderDelegate {
delegate?.sampleOutput(self, video: buffer as Data, timestamp: 0)
}
func sampleOutput(video sampleBuffer: CMSampleBuffer) {
internal func sampleOutput(video sampleBuffer: CMSampleBuffer) {
guard let block:CMBlockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else {
return
}

View File

@ -16,21 +16,21 @@ struct RTMPSharedObjectEvent {
case unknown = 255
}
var type:Type = .unknown
var name:String? = nil
var data:Any? = nil
internal var type:Type = .unknown
internal var name:String? = nil
internal var data:Any? = nil
init(type:Type) {
internal init(type:Type) {
self.type = type
}
init(type:Type, name:String, data:Any?) {
internal init(type:Type, name:String, data:Any?) {
self.type = type
self.name = name
self.data = data
}
init?(serializer:inout AMFSerializer) throws {
internal init?(serializer:inout AMFSerializer) throws {
guard let byte:UInt8 = try? serializer.readUInt8(), let type:Type = Type(rawValue: byte) else {
return nil
}
@ -50,7 +50,7 @@ struct RTMPSharedObjectEvent {
}
}
func serialize(_ serializer:inout AMFSerializer) {
internal func serialize(_ serializer:inout AMFSerializer) {
serializer.writeUInt8(type.rawValue)
guard let name:String = name else {
serializer.writeUInt32(0)
@ -69,9 +69,9 @@ struct RTMPSharedObjectEvent {
}
}
// MARK: CustomStringConvertible
extension RTMPSharedObjectEvent: CustomStringConvertible {
var description:String {
// MARK: CustomStringConvertible
internal var description:String {
return Mirror(reflecting: self).description
}
}
@ -83,21 +83,21 @@ extension RTMPSharedObjectEvent: CustomStringConvertible {
open class RTMPSharedObject: EventDispatcher {
static fileprivate var remoteSharedObjects:[String: RTMPSharedObject] = [:]
static open func getRemote(_ name: String, remotePath: String, persistence: Bool) -> RTMPSharedObject {
let key:String = remotePath + "/" + name + "?persistence=" + persistence.description
static open func getRemote(withName: String, remotePath: String, persistence: Bool) -> RTMPSharedObject {
let key:String = remotePath + "/" + withName + "?persistence=" + persistence.description
objc_sync_enter(remoteSharedObjects)
if (remoteSharedObjects[key] == nil) {
remoteSharedObjects[key] = RTMPSharedObject(name: name, path: remotePath, persistence: persistence)
remoteSharedObjects[key] = RTMPSharedObject(name: withName, path: remotePath, persistence: persistence)
}
objc_sync_exit(remoteSharedObjects)
return remoteSharedObjects[key]!
}
var name:String
var path:String
var timestamp:TimeInterval = 0
var persistence:Bool
var currentVersion:UInt32 = 0
internal var name:String
internal var path:String
internal var timestamp:TimeInterval = 0
internal var persistence:Bool
internal var currentVersion:UInt32 = 0
open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
open fileprivate(set) var data:[String: Any?] = [:]
@ -119,7 +119,7 @@ open class RTMPSharedObject: EventDispatcher {
fileprivate var rtmpConnection:RTMPConnection? = nil
init(name:String, path:String, persistence:Bool) {
internal init(name:String, path:String, persistence:Bool) {
self.name = name
self.path = path
self.persistence = persistence
@ -141,7 +141,7 @@ open class RTMPSharedObject: EventDispatcher {
close()
}
self.rtmpConnection = rtmpConnection
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector: #selector(RTMPSharedObject.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.addEventListener(type: Event.RTMP_STATUS, selector: #selector(RTMPSharedObject.rtmpStatusHandler(_:)), observer: self)
if (rtmpConnection.connected) {
timestamp = rtmpConnection.socket.timestamp
rtmpConnection.socket.doOutput(chunk: createChunk([RTMPSharedObjectEvent(type: .use)]))
@ -155,12 +155,12 @@ open class RTMPSharedObject: EventDispatcher {
open func close() {
data.removeAll(keepingCapacity: false)
rtmpConnection?.removeEventListener(Event.RTMP_STATUS, selector: #selector(RTMPSharedObject.rtmpStatusHandler(_:)), observer: self)
rtmpConnection?.removeEventListener(type: Event.RTMP_STATUS, selector: #selector(RTMPSharedObject.rtmpStatusHandler(_:)), observer: self)
rtmpConnection?.socket.doOutput(chunk: createChunk([RTMPSharedObjectEvent(type: .release)]))
rtmpConnection = nil
}
final func onMessage(_ message:RTMPSharedObjectMessage) {
final internal func on(message:RTMPSharedObjectMessage) {
currentVersion = message.currentVersion
var changeList:[[String: Any?]] = []
for event in message.events {
@ -192,10 +192,10 @@ open class RTMPSharedObject: EventDispatcher {
}
changeList.append(change)
}
dispatchEventWith(Event.SYNC, bubbles: false, data: changeList)
dispatch(type: Event.SYNC, bubbles: false, data: changeList)
}
func createChunk(_ events:[RTMPSharedObjectEvent]) -> RTMPChunk {
internal func createChunk(_ events:[RTMPSharedObjectEvent]) -> RTMPChunk {
let now:Date = Date()
let timestamp:TimeInterval = now.timeIntervalSince1970 - self.timestamp
self.timestamp = now.timeIntervalSince1970
@ -204,7 +204,7 @@ open class RTMPSharedObject: EventDispatcher {
}
return RTMPChunk(
type: succeeded ? .one : .zero,
streamId: RTMPChunk.command,
streamId: RTMPChunk.StreamID.command.rawValue,
message: RTMPSharedObjectMessage(
timestamp: UInt32(timestamp * 1000),
objectEncoding: objectEncoding,
@ -216,11 +216,11 @@ open class RTMPSharedObject: EventDispatcher {
)
}
func rtmpStatusHandler(_ notification:Notification) {
internal func rtmpStatusHandler(_ notification:Notification) {
let e:Event = Event.from(notification)
if let data:ASObject = e.data as? ASObject, let code:String = data["code"] as? String {
switch code {
case RTMPConnection.Code.ConnectSuccess.rawValue:
case RTMPConnection.Code.connectSuccess.rawValue:
timestamp = rtmpConnection!.socket.timestamp
rtmpConnection!.socket.doOutput(chunk: createChunk([RTMPSharedObjectEvent(type: .use)]))
default:

View File

@ -1,15 +1,34 @@
import Foundation
// MARK: RTMPSocketDelegate
protocol RTMPSocketDelegate: IEventDispatcher {
func listen(_ socket:RTMPSocket, bytes:[UInt8])
func didSetReadyState(_ socket:RTMPSocket, readyState:RTMPSocket.ReadyState)
protocol RTMPSocketCompatible: class {
var timeout:Int64 { get set }
var timestamp:TimeInterval { get }
var chunkSizeC:Int { get set }
var chunkSizeS:Int { get set }
var totalBytesIn:Int64 { get }
var totalBytesOut:Int64 { get }
var inputBuffer:[UInt8] { get set }
var securityLevel:StreamSocketSecurityLevel { get set }
var objectEncoding:UInt8 { get set }
weak var delegate:RTMPSocketDelegate? { get set }
@discardableResult
func doOutput(chunk:RTMPChunk) -> Int
func close(isDisconnected:Bool)
func connect(withName:String, port:Int)
func deinitConnection(isDisconnected:Bool)
}
// MARK: -
final class RTMPSocket: NetSocket {
protocol RTMPSocketDelegate: IEventDispatcher {
func listen(bytes:[UInt8])
func didSet(readyState:RTMPSocket.ReadyState)
}
enum ReadyState: UInt8 {
// MARK: -
final internal class RTMPSocket: NetSocket, RTMPSocketCompatible {
internal enum ReadyState: UInt8 {
case uninitialized = 0
case versionSent = 1
case ackSent = 2
@ -18,20 +37,20 @@ final class RTMPSocket: NetSocket {
case closed = 5
}
static let sigSize:Int = 1536
static let protocolVersion:UInt8 = 3
static let defaultBufferSize:Int = 1024
static internal let sigSize:Int = 1536
static internal let protocolVersion:UInt8 = 3
static internal let defaultBufferSize:Int = 1024
var readyState:ReadyState = .uninitialized {
internal var readyState:ReadyState = .uninitialized {
didSet {
delegate?.didSetReadyState(self, readyState: readyState)
delegate?.didSet(readyState: readyState)
}
}
var chunkSizeC:Int = RTMPChunk.defaultSize
var chunkSizeS:Int = RTMPChunk.defaultSize
var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
weak var delegate:RTMPSocketDelegate? = nil
override var connected:Bool {
internal var chunkSizeC:Int = RTMPChunk.defaultSize
internal var chunkSizeS:Int = RTMPChunk.defaultSize
internal var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
internal weak var delegate:RTMPSocketDelegate? = nil
override internal var connected:Bool {
didSet {
if (connected) {
timestamp = Date().timeIntervalSince1970
@ -48,16 +67,17 @@ final class RTMPSocket: NetSocket {
}
readyState = .closed
for event in events {
delegate?.dispatchEvent(event)
delegate?.dispatch(event: event)
}
events.removeAll()
}
}
fileprivate(set) var timestamp:TimeInterval = 0
internal fileprivate(set) var timestamp:TimeInterval = 0
fileprivate var events:[Event] = []
@discardableResult
func doOutput(chunk:RTMPChunk) -> Int {
internal func doOutput(chunk:RTMPChunk) -> Int {
let chunks:[[UInt8]] = chunk.split(chunkSizeS)
for chunk in chunks {
doOutput(bytes: chunk)
@ -68,10 +88,10 @@ final class RTMPSocket: NetSocket {
return chunk.message!.length
}
func connect(_ hostname:String, port:Int) {
internal func connect(withName:String, port:Int) {
networkQueue.async {
Foundation.Stream.getStreamsToHost(
withName: hostname,
Stream.getStreamsToHost(
withName: withName,
port: port,
inputStream: &self.inputStream,
outputStream: &self.outputStream
@ -80,14 +100,13 @@ final class RTMPSocket: NetSocket {
}
}
override func listen() {
override internal func listen() {
switch readyState {
case .versionSent:
if (inputBuffer.count < RTMPSocket.sigSize + 1) {
break
}
let c2packet:ByteArray = ByteArray()
c2packet
.writeBytes(Array(inputBuffer[1...4]))
.writeInt32(Int32(Date().timeIntervalSince1970 - timestamp))
.writeBytes(Array(inputBuffer[9...RTMPSocket.sigSize]))
@ -106,13 +125,13 @@ final class RTMPSocket: NetSocket {
}
let bytes:[UInt8] = inputBuffer
inputBuffer.removeAll()
delegate?.listen(self, bytes: bytes)
delegate?.listen(bytes: bytes)
default:
break
}
}
override func initConnection() {
override internal func initConnection() {
readyState = .uninitialized
timestamp = 0
chunkSizeS = RTMPChunk.defaultSize
@ -120,19 +139,19 @@ final class RTMPSocket: NetSocket {
super.initConnection()
}
override func deinitConnection(_ disconnect:Bool) {
if (disconnect) {
override internal func deinitConnection(isDisconnected:Bool) {
if (isDisconnected) {
let data:ASObject = (readyState == .handshakeDone) ?
RTMPConnection.Code.ConnectClosed.data("") : RTMPConnection.Code.ConnectFailed.data("")
RTMPConnection.Code.connectClosed.data("") : RTMPConnection.Code.connectFailed.data("")
events.append(Event(type: Event.RTMP_STATUS, bubbles: false, data: data))
}
readyState = .closing
super.deinitConnection(disconnect)
super.deinitConnection(isDisconnected: isDisconnected)
}
override func didTimeout() {
deinitConnection(false)
delegate?.dispatchEventWith(Event.IO_ERROR, bubbles: false, data: nil)
override internal func didTimeout() {
deinitConnection(isDisconnected: false)
delegate?.dispatch(type: Event.IO_ERROR, bubbles: false, data: nil)
logger.warning("connection timedout")
}
}

View File

@ -11,21 +11,21 @@ public struct RTMPStreamInfo {
fileprivate var previousByteCount:Int64 = 0
mutating func didTimerInterval(_ timer:Timer) {
mutating internal func on(timer:Timer) {
let byteCount:Int64 = self.byteCount
currentBytesPerSecond = Int32(byteCount - previousByteCount)
previousByteCount = byteCount
}
mutating func clear() {
mutating internal func clear() {
byteCount = 0
currentBytesPerSecond = 0
previousByteCount = 0
}
}
// MARK: CustomStringConvertible
extension RTMPStreamInfo: CustomStringConvertible {
// MARK: CustomStringConvertible
public var description:String {
return Mirror(reflecting: self).description
}
@ -35,7 +35,7 @@ extension RTMPStreamInfo: CustomStringConvertible {
/**
flash.net.NetStream for Swift
*/
open class RTMPStream: Stream {
open class RTMPStream: NetStream {
open static var rootPath:String = NSTemporaryDirectory()
@ -43,130 +43,130 @@ open class RTMPStream: Stream {
NetStatusEvent#info.code for NetStream
*/
public enum Code: String {
case BufferEmpty = "NetStream.Buffer.Empty"
case BufferFlush = "NetStream.Buffer.Flush"
case BufferFull = "NetStream.Buffer.Full"
case ConnectClosed = "NetStream.Connect.Closed"
case ConnectFailed = "NetStream.Connect.Failed"
case ConnectRejected = "NetStream.Connect.Rejected"
case ConnectSuccess = "NetStream.Connect.Success"
case DRMUpdateNeeded = "NetStream.DRM.UpdateNeeded"
case Failed = "NetStream.Failed"
case MulticastStreamReset = "NetStream.MulticastStream.Reset"
case PauseNotify = "NetStream.Pause.Notify"
case PlayFailed = "NetStream.Play.Failed"
case PlayFileStructureInvalid = "NetStream.Play.FileStructureInvalid"
case PlayInsufficientBW = "NetStream.Play.InsufficientBW"
case PlayNoSupportedTrackFound = "NetStream.Play.NoSupportedTrackFound"
case PlayReset = "NetStream.Play.Reset"
case PlayStart = "NetStream.Play.Start"
case PlayStop = "NetStream.Play.Stop"
case PlayStreamNotFound = "NetStream.Play.StreamNotFound"
case PlayTransition = "NetStream.Play.Transition"
case PlayUnpublishNotify = "NetStream.Play.UnpublishNotify"
case PublishBadName = "NetStream.Publish.BadName"
case PublishIdle = "NetStream.Publish.Idle"
case PublishStart = "NetStream.Publish.Start"
case RecordAlreadyExists = "NetStream.Record.AlreadyExists"
case RecordFailed = "NetStream.Record.Failed"
case RecordNoAccess = "NetStream.Record.NoAccess"
case RecordStart = "NetStream.Record.Start"
case RecordStop = "NetStream.Record.Stop"
case RecordDiskQuotaExceeded = "NetStream.Record.DiskQuotaExceeded"
case SecondScreenStart = "NetStream.SecondScreen.Start"
case SecondScreenStop = "NetStream.SecondScreen.Stop"
case SeekFailed = "NetStream.Seek.Failed"
case SeekInvalidTime = "NetStream.Seek.InvalidTime"
case SeekNotify = "NetStream.Seek.Notify"
case StepNotify = "NetStream.Step.Notify"
case UnpauseNotify = "NetStream.Unpause.Notify"
case UnpublishSuccess = "NetStream.Unpublish.Success"
case VideoDimensionChange = "NetStream.Video.DimensionChange"
case bufferEmpty = "NetStream.Buffer.Empty"
case bufferFlush = "NetStream.Buffer.Flush"
case bufferFull = "NetStream.Buffer.Full"
case connectClosed = "NetStream.Connect.Closed"
case connectFailed = "NetStream.Connect.Failed"
case connectRejected = "NetStream.Connect.Rejected"
case connectSuccess = "NetStream.Connect.Success"
case drmUpdateNeeded = "NetStream.DRM.UpdateNeeded"
case failed = "NetStream.Failed"
case multicastStreamReset = "NetStream.MulticastStream.Reset"
case pauseNotify = "NetStream.Pause.Notify"
case playFailed = "NetStream.Play.Failed"
case playFileStructureInvalid = "NetStream.Play.FileStructureInvalid"
case playInsufficientBW = "NetStream.Play.InsufficientBW"
case playNoSupportedTrackFound = "NetStream.Play.NoSupportedTrackFound"
case playReset = "NetStream.Play.Reset"
case playStart = "NetStream.Play.Start"
case playStop = "NetStream.Play.Stop"
case playStreamNotFound = "NetStream.Play.StreamNotFound"
case playTransition = "NetStream.Play.Transition"
case playUnpublishNotify = "NetStream.Play.UnpublishNotify"
case publishBadName = "NetStream.Publish.BadName"
case publishIdle = "NetStream.Publish.Idle"
case publishStart = "NetStream.Publish.Start"
case recordAlreadyExists = "NetStream.Record.AlreadyExists"
case recordFailed = "NetStream.Record.Failed"
case recordNoAccess = "NetStream.Record.NoAccess"
case recordStart = "NetStream.Record.Start"
case recordStop = "NetStream.Record.Stop"
case recordDiskQuotaExceeded = "NetStream.Record.DiskQuotaExceeded"
case secondScreenStart = "NetStream.SecondScreen.Start"
case secondScreenStop = "NetStream.SecondScreen.Stop"
case seekFailed = "NetStream.Seek.Failed"
case seekInvalidTime = "NetStream.Seek.InvalidTime"
case seekNotify = "NetStream.Seek.Notify"
case stepNotify = "NetStream.Step.Notify"
case unpauseNotify = "NetStream.Unpause.Notify"
case unpublishSuccess = "NetStream.Unpublish.Success"
case videoDimensionChange = "NetStream.Video.DimensionChange"
public var level:String {
switch self {
case .BufferEmpty:
case .bufferEmpty:
return "status"
case .BufferFlush:
case .bufferFlush:
return "status"
case .BufferFull:
case .bufferFull:
return "status"
case .ConnectClosed:
case .connectClosed:
return "status"
case .ConnectFailed:
case .connectFailed:
return "error"
case .ConnectRejected:
case .connectRejected:
return "error"
case .ConnectSuccess:
case .connectSuccess:
return "status"
case .DRMUpdateNeeded:
case .drmUpdateNeeded:
return "status"
case .Failed:
case .failed:
return "error"
case .MulticastStreamReset:
case .multicastStreamReset:
return "status"
case .PauseNotify:
case .pauseNotify:
return "status"
case .PlayFailed:
case .playFailed:
return "error"
case .PlayFileStructureInvalid:
case .playFileStructureInvalid:
return "error"
case .PlayInsufficientBW:
case .playInsufficientBW:
return "warning"
case .PlayNoSupportedTrackFound:
case .playNoSupportedTrackFound:
return "status"
case .PlayReset:
case .playReset:
return "status"
case .PlayStart:
case .playStart:
return "status"
case .PlayStop:
case .playStop:
return "status"
case .PlayStreamNotFound:
case .playStreamNotFound:
return "status"
case .PlayTransition:
case .playTransition:
return "status"
case .PlayUnpublishNotify:
case .playUnpublishNotify:
return "status"
case .PublishBadName:
case .publishBadName:
return "error"
case .PublishIdle:
case .publishIdle:
return "status"
case .PublishStart:
case .publishStart:
return "status"
case .RecordAlreadyExists:
case .recordAlreadyExists:
return "status"
case .RecordFailed:
case .recordFailed:
return "error"
case .RecordNoAccess:
case .recordNoAccess:
return "error"
case .RecordStart:
case .recordStart:
return "status"
case .RecordStop:
case .recordStop:
return "status"
case .RecordDiskQuotaExceeded:
case .recordDiskQuotaExceeded:
return "error"
case .SecondScreenStart:
case .secondScreenStart:
return "status"
case .SecondScreenStop:
case .secondScreenStop:
return "status"
case .SeekFailed:
case .seekFailed:
return "error"
case .SeekInvalidTime:
case .seekInvalidTime:
return "error"
case .SeekNotify:
case .seekNotify:
return "status"
case .StepNotify:
case .stepNotify:
return "status"
case .UnpauseNotify:
case .unpauseNotify:
return "status"
case .UnpublishSuccess:
case .unpublishSuccess:
return "status"
case .VideoDimensionChange:
case .videoDimensionChange:
return "status"
}
}
func data(_ description:String) -> ASObject {
internal func data(_ description:String) -> ASObject {
return [
"code": rawValue,
"level": level,
@ -179,12 +179,12 @@ open class RTMPStream: Stream {
flash.net.NetStreamPlayTransitions for Swift
*/
public enum PlayTransition: String {
case Append = "append"
case AppendAndWait = "appendAndWait"
case Reset = "reset"
case Resume = "resume"
case Stop = "stop"
case Swap = "swap"
case append = "append"
case appendAndWait = "appendAndWait"
case reset = "reset"
case resume = "resume"
case stop = "stop"
case swap = "swap"
case Switch = "switch"
}
@ -202,14 +202,14 @@ open class RTMPStream: Stream {
}
public enum HowToPublish: String {
case Record = "record"
case Append = "append"
case AppendWithGap = "appendWithGap"
case Live = "live"
case LocalRecord = "localRecord"
case record = "record"
case append = "append"
case appendWithGap = "appendWithGap"
case live = "live"
case localRecord = "localRecord"
}
enum ReadyState: UInt8 {
internal enum ReadyState: UInt8 {
case initilized = 0
case open = 1
case play = 2
@ -219,7 +219,7 @@ open class RTMPStream: Stream {
case closed = 6
}
static let defaultID:UInt32 = 0
static internal let defaultID:UInt32 = 0
open static let defaultAudioBitrate:UInt32 = AACEncoder.defaultBitrate
open static let defaultVideoBitrate:UInt32 = AVCEncoder.defaultBitrate
open internal(set) var info:RTMPStreamInfo = RTMPStreamInfo()
@ -230,8 +230,8 @@ open class RTMPStream: Stream {
set { audioPlayback.soundTransform = newValue }
}
var id:UInt32 = RTMPStream.defaultID
var readyState:ReadyState = .initilized {
internal var id:UInt32 = RTMPStream.defaultID
internal var readyState:ReadyState = .initilized {
didSet {
switch readyState {
case .open:
@ -239,10 +239,10 @@ open class RTMPStream: Stream {
frameCount = 0
info.clear()
case .publishing:
send("@setDataFrame", arguments: "onMetaData", createMetaData())
send(handlerName: "@setDataFrame", arguments: "onMetaData", createMetaData())
mixer.audioIO.encoder.startRunning()
mixer.videoIO.encoder.startRunning()
if (howToPublish == .LocalRecord) {
if (howToPublish == .localRecord) {
mixer.recorder.fileName = info.resourceName
mixer.recorder.startRunning()
}
@ -252,28 +252,28 @@ open class RTMPStream: Stream {
}
}
var audioTimestamp:Double = 0
var videoTimestamp:Double = 0
internal var audioTimestamp:Double = 0
internal var videoTimestamp:Double = 0
internal fileprivate(set) var audioPlayback:RTMPAudioPlayback = RTMPAudioPlayback()
fileprivate(set) var audioPlayback:RTMPAudioPlayback = RTMPAudioPlayback()
fileprivate var muxer:RTMPMuxer = RTMPMuxer()
fileprivate var frameCount:UInt8 = 0
fileprivate var chunkTypes:[FLVTag.TagType:Bool] = [:]
fileprivate var dispatcher:IEventDispatcher!
fileprivate var howToPublish:RTMPStream.HowToPublish = .Live
fileprivate var howToPublish:RTMPStream.HowToPublish = .live
fileprivate var rtmpConnection:RTMPConnection
public init(rtmpConnection: RTMPConnection) {
self.rtmpConnection = rtmpConnection
super.init()
self.dispatcher = EventDispatcher(target: self)
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector: #selector(RTMPStream.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.addEventListener(type: Event.RTMP_STATUS, selector: #selector(RTMPStream.rtmpStatusHandler(_:)), observer: self)
if (rtmpConnection.connected) {
rtmpConnection.createStream(self)
rtmpConnection.create(stream: self)
}
}
open func receiveAudio(_ flag:Bool) {
open func receive(audio:Bool) {
lockQueue.async {
guard self.readyState == .playing else {
return
@ -284,12 +284,12 @@ open class RTMPStream: Stream {
objectEncoding: self.objectEncoding,
commandName: "receiveAudio",
commandObject: nil,
arguments: [flag]
arguments: [audio]
)))
}
}
open func receiveVideo(_ flag:Bool) {
open func receive(video:Bool) {
lockQueue.async {
guard self.readyState == .playing else {
return
@ -300,7 +300,7 @@ open class RTMPStream: Stream {
objectEncoding: self.objectEncoding,
commandName: "receiveVideo",
commandObject: nil,
arguments: [flag]
arguments: [video]
)))
}
}
@ -315,7 +315,7 @@ open class RTMPStream: Stream {
self.audioPlayback.stopRunning()
self.rtmpConnection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.audio,
streamId: RTMPChunk.StreamID.audio.rawValue,
message: RTMPCommandMessage(
streamId: self.id,
transactionId: 0,
@ -366,16 +366,16 @@ open class RTMPStream: Stream {
guard let howToPublish:RTMPStream.HowToPublish = RTMPStream.HowToPublish(rawValue: type) else {
return
}
publish(name, type: howToPublish)
publish(withName: name, type: howToPublish)
}
open func publish(_ name:String?, type:RTMPStream.HowToPublish = .Live) {
open func publish(withName:String?, type:RTMPStream.HowToPublish = .live) {
lockQueue.async {
guard let name:String = name else {
guard let name:String = withName else {
guard self.readyState == .publishing else {
self.howToPublish = type
switch type {
case .LocalRecord:
case .localRecord:
self.mixer.recorder.fileName = self.info.resourceName
self.mixer.recorder.startRunning()
default:
@ -395,7 +395,7 @@ open class RTMPStream: Stream {
self.FCUnpublish()
self.rtmpConnection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.audio,
streamId: RTMPChunk.StreamID.audio.rawValue,
message: RTMPCommandMessage(
streamId: self.id,
transactionId: 0,
@ -425,14 +425,14 @@ open class RTMPStream: Stream {
self.FCPublish()
self.rtmpConnection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.audio,
streamId: RTMPChunk.StreamID.audio.rawValue,
message: RTMPCommandMessage(
streamId: self.id,
transactionId: 0,
objectEncoding: self.objectEncoding,
commandName: "publish",
commandObject: nil,
arguments: [name, type == .LocalRecord ? RTMPStream.HowToPublish.Live.rawValue : type.rawValue]
arguments: [name, type == .localRecord ? RTMPStream.HowToPublish.live.rawValue : type.rawValue]
)))
self.readyState = .publish
@ -444,11 +444,11 @@ open class RTMPStream: Stream {
return
}
play()
publish(nil)
publish(withName: nil)
lockQueue.sync {
self.rtmpConnection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
streamId: RTMPChunk.command,
streamId: RTMPChunk.StreamID.command.rawValue,
message: RTMPCommandMessage(
streamId: 0,
transactionId: 0,
@ -461,7 +461,7 @@ open class RTMPStream: Stream {
}
}
open func send(_ handlerName:String, arguments:Any?...) {
open func send(handlerName:String, arguments:Any?...) {
lockQueue.async {
if (self.readyState == .closed) {
return
@ -492,20 +492,20 @@ open class RTMPStream: Stream {
return metadata
}
func didTimerInterval(_ timer:Timer) {
internal func on(timer:Timer) {
currentFPS = frameCount
frameCount = 0
info.didTimerInterval(timer)
info.on(timer: timer)
}
func rtmpStatusHandler(_ notification:Notification) {
internal func rtmpStatusHandler(_ notification:Notification) {
let e:Event = Event.from(notification)
if let data:ASObject = e.data as? ASObject, let code:String = data["code"] as? String {
switch code {
case RTMPConnection.Code.ConnectSuccess.rawValue:
case RTMPConnection.Code.connectSuccess.rawValue:
readyState = .initilized
rtmpConnection.createStream(self)
case RTMPStream.Code.PublishStart.rawValue:
rtmpConnection.create(stream: self)
case RTMPStream.Code.publishStart.rawValue:
readyState = .publishing
default:
break
@ -530,24 +530,24 @@ extension RTMPStream {
}
}
// MARK: - IEventDispatcher
extension RTMPStream: IEventDispatcher {
public func addEventListener(_ type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
dispatcher.addEventListener(type, selector: selector, observer: observer, useCapture: useCapture)
// MARK: IEventDispatcher
public func addEventListener(type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
dispatcher.addEventListener(type: type, selector: selector, observer: observer, useCapture: useCapture)
}
public func removeEventListener(_ type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
dispatcher.removeEventListener(type, selector: selector, observer: observer, useCapture: useCapture)
public func removeEventListener(type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
dispatcher.removeEventListener(type: type, selector: selector, observer: observer, useCapture: useCapture)
}
public func dispatchEvent(_ e:Event) {
dispatcher.dispatchEvent(e)
public func dispatch(event:Event) {
dispatcher.dispatch(event: event)
}
public func dispatchEventWith(_ type:String, bubbles:Bool, data:Any?) {
dispatcher.dispatchEventWith(type, bubbles: bubbles, data: data)
public func dispatch(type:String, bubbles:Bool, data:Any?) {
dispatcher.dispatch(type: type, bubbles: bubbles, data: data)
}
}
// MARK: - RTMPMuxerDelegate
extension RTMPStream: RTMPMuxerDelegate {
// MARK: RTMPMuxerDelegate
func sampleOutput(_ muxer:RTMPMuxer, audio buffer:Data, timestamp:Double) {
guard readyState == .publishing else {
return

View File

@ -0,0 +1,180 @@
import Foundation
final internal class RTMPTSocket: NSObject, RTMPSocketCompatible {
internal var timeout:Int64 = 0
internal var timestamp:TimeInterval = 0
internal var chunkSizeC:Int = RTMPChunk.defaultSize
internal var chunkSizeS:Int = RTMPChunk.defaultSize
internal var totalBytesIn:Int64 = 0
internal var totalBytesOut:Int64 = 0
internal var inputBuffer:[UInt8] = []
internal var securityLevel:StreamSocketSecurityLevel = .none
internal var objectEncoding:UInt8 = 0x00
internal weak var delegate:RTMPSocketDelegate? = nil
internal var connected:Bool = false {
didSet {
if (connected) {
timestamp = Date().timeIntervalSince1970
let c1packet:ByteArray = ByteArray()
.writeUInt8(RTMPSocket.protocolVersion)
.writeInt32(Int32(timestamp))
.writeBytes([0x00, 0x00, 0x00, 0x00])
for _ in 0..<RTMPSocket.sigSize - 8 {
c1packet.writeUInt8(UInt8(arc4random_uniform(0xff)))
}
doOutput(bytes: c1packet.bytes)
readyState = .versionSent
return
}
readyState = .closed
for event in events {
delegate?.dispatch(event: event)
}
events.removeAll()
}
}
internal var readyState:RTMPSocket.ReadyState = .uninitialized {
didSet {
delegate?.didSet(readyState: readyState)
}
}
private var events:[Event] = []
private var index:Int64 = 0
private var baseURL:URL!
private var session:URLSession!
private var connectionID:String!
override internal init() {
super.init()
}
internal func connect(withName:String, port:Int) {
let config:URLSessionConfiguration = URLSessionConfiguration.default
config.httpAdditionalHeaders = [
"Content-Type": "application/x-fcs",
"User-Agent": "Shockwave Flash",
]
let scheme:String = securityLevel == .none ? "http" : "https"
session = URLSession(configuration: config)
baseURL = URL(string: "\(scheme)://\(withName):\(port)")!
doRequest(pathComonent: "/fcs/ident2", data: Data([0x00]), completionHandler:didIdent2)
}
@discardableResult
internal func doOutput(chunk:RTMPChunk) -> Int {
var bytes:[UInt8] = []
let chunks:[[UInt8]] = chunk.split(chunkSizeS)
for chunk in chunks {
bytes.append(contentsOf: chunk)
}
doOutput(bytes: bytes)
return bytes.count
}
internal func close(isDisconnected:Bool) {
}
internal func deinitConnection(isDisconnected:Bool) {
}
internal func listen(data:Data?, response:URLResponse?, error:Error?) {
if let error:Error = error {
logger.error("\(error)")
return
}
guard let data:Data = data else {
return
}
logger.info("\(data.bytes):\(response):\(error)")
var buffer:[UInt8] = data.bytes
buffer.remove(at: 0)
inputBuffer.append(contentsOf: buffer)
switch readyState {
case .versionSent:
if (inputBuffer.count < RTMPSocket.sigSize + 1) {
break
}
let c2packet:ByteArray = ByteArray()
.writeBytes(Array(inputBuffer[1...4]))
.writeInt32(Int32(Date().timeIntervalSince1970 - timestamp))
.writeBytes(Array(inputBuffer[9...RTMPSocket.sigSize]))
doOutput(bytes: c2packet.bytes)
inputBuffer = Array(inputBuffer[RTMPSocket.sigSize + 1..<inputBuffer.count])
readyState = .ackSent
fallthrough
case .ackSent:
if (inputBuffer.count < RTMPSocket.sigSize) {
break
}
inputBuffer.removeAll()
readyState = .handshakeDone
case .handshakeDone:
if (inputBuffer.isEmpty){
break
}
let bytes:[UInt8] = inputBuffer
inputBuffer.removeAll()
delegate?.listen(bytes: bytes)
default:
break
}
}
internal func didIdent2(data:Data?, response:URLResponse?, error:Error?) {
logger.info("\(data?.bytes):\(response):\(error)")
if let error:Error = error {
logger.error("\(error)")
}
doRequest(pathComonent: "/open/1", data: Data([0x00]), completionHandler: didOpen)
}
internal func didOpen(data:Data?, response:URLResponse?, error:Error?) {
logger.info("\(data?.bytes):\(response):\(error)")
if let error:Error = error {
logger.error("\(error)")
}
guard let data:Data = data else {
return
}
connectionID = String(data: data, encoding: String.Encoding.utf8)?.trimmingCharacters(in: .whitespacesAndNewlines)
doRequest(pathComonent: "/idel/\(connectionID!)/0", data: Data([0x00]), completionHandler: didIdel0)
}
internal func didIdel0(data:Data?, response:URLResponse?, error:Error?) {
logger.info("\(data?.bytes):\(response):\(error)")
connected = true
}
@discardableResult
final private func doOutput(bytes:[UInt8]) -> Int {
guard let connectionID:String = connectionID else {
return 0
}
index += 1
doRequest(pathComonent: "/send/\(connectionID)/\(index)", data: Data(bytes), completionHandler: listen)
return bytes.count
}
private func idel() {
guard let connectionID:String = connectionID else {
return
}
index += 1
doRequest(pathComonent: "/idel/\(connectionID)/\(index)", data: Data([0x00]), completionHandler: listen)
}
private func doRequest(pathComonent: String, data:Data, completionHandler: ((Data?, URLResponse?, Error?) -> Void)) {
var request:URLRequest = URLRequest(url: baseURL.appendingPathComponent(pathComonent))
request.httpMethod = "POST"
let task:URLSessionUploadTask = session.uploadTask(with: request, from: data, completionHandler: completionHandler)
task.resume()
logger.verbose("\(request)")
}
}

View File

@ -14,24 +14,23 @@ enum RTSPMethod: String {
case record = "RECORD"
}
// MARK: RTSPResponder
protocol RTSPResponder: class {
func onResponse(_ response:RTSPResponse)
func on(response:RTSPResponse)
}
// MARK: -
final class RTSPNullResponder: RTSPResponder {
static let instance:RTSPNullResponder = RTSPNullResponder()
func onResponse(_ response:RTSPResponse) {
internal func on(response:RTSPResponse) {
}
}
// MARK: -
class RTSPConnection: NSObject {
static let defaultRTPPort:Int32 = 8000
static internal let defaultRTPPort:Int32 = 8000
var userAgent:String = "lf.swift"
internal var userAgent:String = "lf.swift"
fileprivate var sequence:Int = 0
fileprivate lazy var socket:RTSPSocket = {
@ -42,7 +41,7 @@ class RTSPConnection: NSObject {
fileprivate var responders:[RTSPResponder] = []
func doMethod(_ method: RTSPMethod, _ uri: String, _ responder:RTSPResponder = RTSPNullResponder.instance, _ headerFields:[String:String] = [:]) {
internal func doMethod(_ method: RTSPMethod, _ uri: String, _ responder:RTSPResponder = RTSPNullResponder.instance, _ headerFields:[String:String] = [:]) {
sequence += 1
var request:RTSPRequest = RTSPRequest()
request.uri = uri
@ -55,13 +54,13 @@ class RTSPConnection: NSObject {
}
}
// MARK: RTSPSocketDelegate
extension RTSPConnection: RTSPSocketDelegate {
func listen(_ response: RTSPResponse) {
// MARK: RTSPSocketDelegate
internal func listen(_ response: RTSPResponse) {
guard let responder:RTSPResponder = responders.first else {
return
}
responder.onResponse(response)
responder.on(response: response)
responders.removeFirst()
}
}

View File

@ -1,15 +1,15 @@
import Foundation
struct RTSPRequest: HTTPRequestConvertible {
var uri:String = "/"
var method:String = ""
var version:String = "RTSP/1.0"
var headerFields:[String: String] = [:]
internal var uri:String = "/"
internal var method:String = ""
internal var version:String = "RTSP/1.0"
internal var headerFields:[String: String] = [:]
init() {
internal init() {
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
}
}

View File

@ -1,15 +1,15 @@
import Foundation
struct RTSPResponse: HTTPResponseConvertible {
var version:String = "RTSP/1.0"
var statusCode:String = ""
var headerFields:[String: String] = [:]
var body:[UInt8] = []
internal var version:String = "RTSP/1.0"
internal var statusCode:String = ""
internal var headerFields:[String: String] = [:]
internal var body:[UInt8] = []
init() {
internal init() {
}
init?(bytes:[UInt8]) {
internal init?(bytes:[UInt8]) {
self.bytes = bytes
}
}

View File

@ -1,18 +1,17 @@
import Foundation
// MARK: RTSPSocketDelegate
protocol RTSPSocketDelegate: class {
func listen(_ response:RTSPResponse)
}
// MARK: -
final class RTSPSocket: NetSocket {
static let defaultPort:Int = 554
static internal let defaultPort:Int = 554
weak var delegate:RTSPSocketDelegate?
weak internal var delegate:RTSPSocketDelegate?
fileprivate var requests:[RTSPRequest] = []
override var connected:Bool {
override internal var connected:Bool {
didSet {
if (connected) {
for request in requests {
@ -26,7 +25,7 @@ final class RTSPSocket: NetSocket {
}
}
func doOutput(_ request:RTSPRequest) {
internal func doOutput(_ request:RTSPRequest) {
if (connected) {
if (logger.isEnabledForLogLevel(.verbose)) {
logger.verbose("\(request)")
@ -41,7 +40,7 @@ final class RTSPSocket: NetSocket {
connect(host, port: (uri as NSURL).port?.intValue ?? RTSPSocket.defaultPort)
}
override func listen() {
override internal func listen() {
guard let response:RTSPResponse = RTSPResponse(bytes: inputBuffer) else {
return
}
@ -54,7 +53,7 @@ final class RTSPSocket: NetSocket {
fileprivate func connect(_ hostname:String, port:Int) {
networkQueue.async {
Foundation.Stream.getStreamsToHost(
Stream.getStreamsToHost(
withName: hostname,
port: port,
inputStream: &self.inputStream,

View File

@ -5,12 +5,12 @@ final class RTSPPlaySequenceResponder: RTSPResponder {
fileprivate var stream:RTSPStream
fileprivate var method:RTSPMethod = .options
init(uri:String, stream:RTSPStream) {
internal init(uri:String, stream:RTSPStream) {
self.uri = uri
self.stream = stream
}
func onResponse(_ response: RTSPResponse) {
internal func on(response: RTSPResponse) {
switch method {
case .options:
method = .describe
@ -34,37 +34,37 @@ final class RTSPRecordSequenceResponder: RTSPResponder {
fileprivate var stream:RTSPStream
fileprivate var method:RTSPMethod = .options
init(uri:String, stream:RTSPStream) {
internal init(uri:String, stream:RTSPStream) {
self.uri = uri
self.stream = stream
}
func onResponse(_ response: RTSPResponse) {
internal func on(response: RTSPResponse) {
}
}
// MARK: -
class RTSPStream: Stream {
var sessionID:String?
class RTSPStream: NetStream {
internal var sessionID:String?
fileprivate var services:[RTPService] = []
fileprivate var connection:RTSPConnection
init(connection: RTSPConnection) {
internal init(connection: RTSPConnection) {
self.connection = connection
}
func play(_ uri:String) {
internal func play(uri:String) {
connection.doMethod(.options, uri, RTSPPlaySequenceResponder(uri: uri, stream: self), [:])
}
func record(_ uri:String) {
internal func record(uri:String) {
connection.doMethod(.options, uri, RTSPRecordSequenceResponder(uri: uri, stream: self), [:])
}
func tearDown() {
internal func tearDown() {
}
func listen() {
internal func listen() {
for i in 0..<2 {
let service:RTPService = RTPService(domain: "", type: "_rtp._udp", name: "", port: RTSPConnection.defaultRTPPort + i)
service.startRunning()

View File

@ -1,28 +1,29 @@
import Foundation
protocol SessionDescriptionConvertible {
mutating func appendLine(_ line:String)
mutating func append(line:String)
}
// MARK: -
struct SessionDescription: SessionDescriptionConvertible {
var protocolVersionNumber:String = "0"
var originatorAndSessionIdentifier:String = ""
var sessionName:String = ""
var URIOfDescription:String = ""
var emailAddressWithOptionalNameOfContacts:[String] = []
var phoneNumberWithOptionalNameOfContacts:[String] = []
var connectionInformation:String = ""
var bandwidthInformation:[String] = []
var sessionAttributes:[String:String] = [:]
var time:[TimeDescription] = []
var medias:[MediaDescription] = []
internal var protocolVersionNumber:String = "0"
internal var originatorAndSessionIdentifier:String = ""
internal var sessionName:String = ""
internal var URIOfDescription:String = ""
internal var emailAddressWithOptionalNameOfContacts:[String] = []
internal var phoneNumberWithOptionalNameOfContacts:[String] = []
internal var connectionInformation:String = ""
internal var bandwidthInformation:[String] = []
internal var sessionAttributes:[String:String] = [:]
internal var time:[TimeDescription] = []
internal var medias:[MediaDescription] = []
fileprivate var media:MediaDescription?
mutating func appendLine(_ line:String) {
mutating internal func append(line:String) {
let character:String = line.substring(to: line.characters.index(line.startIndex, offsetBy: 1))
if (media != nil && character != "m") {
media?.appendLine(line)
media?.append(line: line)
return
}
let value:String = line.substring(from: line.characters.index(line.startIndex, offsetBy: 2))
@ -50,8 +51,8 @@ struct SessionDescription: SessionDescriptionConvertible {
}
}
// MARK: CustomStringConvertible
extension SessionDescription: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
get {
return Mirror(reflecting: self).description
@ -59,7 +60,7 @@ extension SessionDescription: CustomStringConvertible {
set {
let lines:[String] = newValue.components(separatedBy: "\n")
for line in lines {
appendLine(line)
append(line:line)
}
if let media:MediaDescription = media {
medias.append(media)
@ -71,14 +72,14 @@ extension SessionDescription: CustomStringConvertible {
// MARK: -
struct TimeDescription: SessionDescriptionConvertible {
var timeTheSessionIsActive:String = ""
var repeatTimes:[String] = []
init(timeTheSessionIsActive:String) {
internal var timeTheSessionIsActive:String = ""
internal var repeatTimes:[String] = []
internal init(timeTheSessionIsActive:String) {
self.timeTheSessionIsActive = timeTheSessionIsActive
}
mutating func appendLine(_ line:String) {
mutating internal func append(line:String) {
let value:String = line.substring(from: line.characters.index(line.startIndex, offsetBy: 2))
switch line.substring(to: line.characters.index(line.startIndex, offsetBy: 1)) {
case "r":
@ -91,18 +92,18 @@ struct TimeDescription: SessionDescriptionConvertible {
// MARK: -
struct MediaDescription: SessionDescriptionConvertible {
var mediaNameAndTransportAddress:String = ""
var mediaTitleInformationField:[String] = []
var connectionInformation:[String] = []
var bandwidthInformation:[String] = []
var encryptionKey:[String] = []
var mediaAttributes:[String:String] = [:]
internal var mediaNameAndTransportAddress:String = ""
internal var mediaTitleInformationField:[String] = []
internal var connectionInformation:[String] = []
internal var bandwidthInformation:[String] = []
internal var encryptionKey:[String] = []
internal var mediaAttributes:[String:String] = [:]
init(mediaNameAndTransportAddress: String) {
internal init(mediaNameAndTransportAddress: String) {
self.mediaNameAndTransportAddress = mediaNameAndTransportAddress
}
mutating func appendLine(_ line:String) {
mutating internal func append(line:String) {
let value:String = line.substring(from: line.characters.index(line.startIndex, offsetBy: 2))
switch line.substring(to: line.characters.index(line.startIndex, offsetBy: 1)) {
case "i":

View File

@ -1,6 +1,5 @@
import Foundation
// MARK: - ByteArrayConvertible
protocol ByteArrayConvertible {
var bytes:[UInt8] { get }
@ -63,32 +62,32 @@ protocol ByteArrayConvertible {
// MARK: -
open class ByteArray: ByteArrayConvertible {
static let sizeOfInt8:Int = 1
static let sizeOfInt16:Int = 2
static let sizeOfInt24:Int = 3
static let sizeOfInt32:Int = 4
static let sizeOfFloat:Int = 4
static let sizeOfDouble:Int = 8
static internal let sizeOfInt8:Int = 1
static internal let sizeOfInt16:Int = 2
static internal let sizeOfInt24:Int = 3
static internal let sizeOfInt32:Int = 4
static internal let sizeOfFloat:Int = 4
static internal let sizeOfDouble:Int = 8
public enum Error: Swift.Error {
case eof
case parse
}
init() {
internal init() {
}
init(bytes:[UInt8]) {
internal init(bytes:[UInt8]) {
self.bytes = bytes
}
init(data:Data) {
internal init(data:Data) {
bytes = [UInt8](repeating: 0x00, count: data.count)
(data as NSData).getBytes(&bytes, length: data.count)
}
fileprivate(set) var bytes:[UInt8] = []
open var length:Int {
get {
return bytes.count
@ -298,7 +297,7 @@ open class ByteArray: ByteArrayConvertible {
return self
}
func sequence(_ length:Int, lambda:((ByteArray) -> Void)) {
internal func sequence(_ length:Int, lambda:((ByteArray) -> Void)) {
let r:Int = (bytes.count - position) % length
for index in stride(from: bytes.startIndex.advanced(by: position), to: bytes.endIndex.advanced(by: -r), by: length) {
lambda(ByteArray(bytes: Array(bytes[index..<index.advanced(by: length)])))
@ -308,7 +307,7 @@ open class ByteArray: ByteArrayConvertible {
}
}
func toUInt32() -> [UInt32] {
internal func toUInt32() -> [UInt32] {
let size:Int = MemoryLayout<UInt32>.size
if ((bytes.endIndex - position) % size != 0) {
return []
@ -321,8 +320,8 @@ open class ByteArray: ByteArrayConvertible {
}
}
// MARK: CustomStringConvertible
extension ByteArray: CustomStringConvertible {
// MARK: CustomStringConvertible
public var description:String {
return Mirror(reflecting: self).description
}

View File

@ -1,11 +1,11 @@
import Foundation
final class CRC32 {
static let MPEG2:CRC32 = CRC32(polynomial: 0x04c11db7)
static internal let MPEG2:CRC32 = CRC32(polynomial: 0x04c11db7)
let table:[UInt32]
internal let table:[UInt32]
init(polynomial:UInt32) {
internal init(polynomial:UInt32) {
var table:[UInt32] = [UInt32](repeating: 0x00000000, count: 256)
for i in 0..<table.count {
var crc:UInt32 = UInt32(i) << 24
@ -17,11 +17,11 @@ final class CRC32 {
self.table = table
}
func calculate(_ bytes:[UInt8]) -> UInt32 {
internal func calculate(_ bytes:[UInt8]) -> UInt32 {
return calculate(bytes, seed: nil)
}
func calculate(_ bytes:[UInt8], seed:UInt32?) -> UInt32 {
internal func calculate(_ bytes:[UInt8], seed:UInt32?) -> UInt32 {
var crc:UInt32 = seed ?? 0xffffffff
for i in 0..<bytes.count {
crc = (crc << 8) ^ table[Int((crc >> 24) ^ (UInt32(bytes[i]) & 0xff) & 0xff)]
@ -30,8 +30,8 @@ final class CRC32 {
}
}
// MARK: CustomStringConvertible
extension CRC32: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}

View File

@ -1,17 +1,16 @@
import Foundation
import AVFoundation
// MARK: ClockedQueueDelegate
protocol ClockedQueueDelegate:class {
func queue(_ buffer: CMSampleBuffer)
}
// MARK: -
class ClockedQueue {
var bufferTime:TimeInterval = 0.1 // sec
fileprivate(set) var running:Bool = false
fileprivate(set) var duration:TimeInterval = 0
weak var delegate:ClockedQueueDelegate?
internal var bufferTime:TimeInterval = 0.1 // sec
internal fileprivate(set) var running:Bool = false
internal fileprivate(set) var duration:TimeInterval = 0
internal weak var delegate:ClockedQueueDelegate?
fileprivate var date:Date = Date()
fileprivate var buffers:[CMSampleBuffer] = []
@ -30,7 +29,7 @@ class ClockedQueue {
}
}
func enqueue(_ buffer:CMSampleBuffer) {
internal func enqueue(_ buffer:CMSampleBuffer) {
do {
try mutex.lock()
duration += buffer.duration.seconds
@ -41,12 +40,12 @@ class ClockedQueue {
}
if (timer == nil) {
timer = Timer(
timeInterval: 0.001, target: self, selector: #selector(ClockedQueue.onTimer(_:)), userInfo: nil, repeats: true
timeInterval: 0.001, target: self, selector: #selector(ClockedQueue.on(timer:)), userInfo: nil, repeats: true
)
}
}
@objc func onTimer(_ timer:Timer) {
@objc internal func on(timer:Timer) {
guard let buffer:CMSampleBuffer = buffers.first , bufferTime <= self.duration else {
return
}
@ -67,8 +66,8 @@ class ClockedQueue {
}
}
// MARK: CustomStringConvertible
extension ClockedQueue: CustomStringConvertible {
// MARK: CustomStringConvertible
var description:String {
return Mirror(reflecting: self).description
}

View File

@ -2,7 +2,7 @@ import Foundation
import AVFoundation
public final class DeviceUtil {
fileprivate init() {
private init() {
}
#if os(iOS)
@ -22,31 +22,31 @@ public final class DeviceUtil {
}
#endif
static public func deviceWithPosition(_ position:AVCaptureDevicePosition) -> AVCaptureDevice? {
static public func device(withPosition:AVCaptureDevicePosition) -> AVCaptureDevice? {
for device in AVCaptureDevice.devices() {
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
continue
}
if (device.hasMediaType(AVMediaTypeVideo) && device.position == position) {
if (device.hasMediaType(AVMediaTypeVideo) && device.position == withPosition) {
return device
}
}
return nil
}
static public func deviceWithLocalizedName(_ localizedName:String, mediaType:String) -> AVCaptureDevice? {
static public func device(withLocalizedName:String, mediaType:String) -> AVCaptureDevice? {
for device in AVCaptureDevice.devices() {
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
continue
}
if (device.hasMediaType(mediaType) && device.localizedName == localizedName) {
if (device.hasMediaType(mediaType) && device.localizedName == withLocalizedName) {
return device
}
}
return nil
}
static func getActualFPS(_ fps:Float64, device:AVCaptureDevice) -> (fps:Float64, duration:CMTime)? {
static internal func getActualFPS(_ fps:Float64, device:AVCaptureDevice) -> (fps:Float64, duration:CMTime)? {
var durations:[CMTime] = []
var frameRates:[Float64] = []

View File

@ -4,10 +4,10 @@ import Foundation
flash.events.IEventDispatcher for Swift
*/
public protocol IEventDispatcher: class {
func addEventListener(_ type:String, selector:Selector, observer:AnyObject?, useCapture:Bool)
func removeEventListener(_ type:String, selector:Selector, observer:AnyObject?, useCapture:Bool)
func dispatchEvent(_ e:Event)
func dispatchEventWith(_ type:String, bubbles:Bool, data:Any?)
func addEventListener(type:String, selector:Selector, observer:AnyObject?, useCapture:Bool)
func removeEventListener(type:String, selector:Selector, observer:AnyObject?, useCapture:Bool)
func dispatch(event:Event)
func dispatch(type:String, bubbles:Bool, data:Any?)
}
public enum EventPhase: UInt8 {
@ -72,27 +72,27 @@ open class EventDispatcher: NSObject, IEventDispatcher {
target = nil
}
public final func addEventListener(_ type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
public final func addEventListener(type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
NotificationCenter.default.addObserver(
observer ?? target ?? self, selector: selector, name: NSNotification.Name(rawValue: "\(type)/\(useCapture)"), object: target ?? self
)
}
public final func removeEventListener(_ type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
public final func removeEventListener(type:String, selector:Selector, observer:AnyObject? = nil, useCapture:Bool = false) {
NotificationCenter.default.removeObserver(
observer ?? target ?? self, name: NSNotification.Name(rawValue: "\(type)/\(useCapture)"), object: target ?? self
)
}
open func dispatchEvent(_ e:Event) {
e.target = target ?? self
open func dispatch(event:Event) {
event.target = target ?? self
NotificationCenter.default.post(
name: Notification.Name(rawValue: "\(e.type)/false"), object: target ?? self, userInfo: ["event": e]
name: Notification.Name(rawValue: "\(event.type)/false"), object: target ?? self, userInfo: ["event": event]
)
e.target = nil
event.target = nil
}
public final func dispatchEventWith(_ type:String, bubbles:Bool, data:Any?) {
dispatchEvent(Event(type: type, bubbles: bubbles, data: data))
public final func dispatch(type:String, bubbles:Bool, data:Any?) {
dispatch(event: Event(type: type, bubbles: bubbles, data: data))
}
}

View File

@ -10,11 +10,11 @@ final class Mutex {
case perm
}
fileprivate let mutex:UnsafeMutablePointer<pthread_mutex_t>
fileprivate let condition:UnsafeMutablePointer<pthread_cond_t>
fileprivate let attribute:UnsafeMutablePointer<pthread_mutexattr_t>
private let mutex:UnsafeMutablePointer<pthread_mutex_t>
private let condition:UnsafeMutablePointer<pthread_cond_t>
private let attribute:UnsafeMutablePointer<pthread_mutexattr_t>
init() {
internal init() {
mutex = UnsafeMutablePointer<pthread_mutex_t>.allocate(capacity: MemoryLayout<pthread_mutex_t>.size)
condition = UnsafeMutablePointer<pthread_cond_t>.allocate(capacity: MemoryLayout<pthread_cond_t>.size)
attribute = UnsafeMutablePointer<pthread_mutexattr_t>.allocate(capacity: MemoryLayout<pthread_mutexattr_t>.size)
@ -30,7 +30,7 @@ final class Mutex {
pthread_mutex_destroy(mutex)
}
func lock() throws {
internal func lock() throws {
switch pthread_mutex_trylock(mutex) {
case EBUSY:
throw Mutex.Error.busy
@ -41,15 +41,15 @@ final class Mutex {
}
}
func unlock() {
internal func unlock() {
pthread_mutex_unlock(mutex)
}
func wait() -> Bool {
internal func wait() -> Bool {
return pthread_cond_wait(condition, mutex) == 0
}
func signal() -> Bool {
internal func signal() -> Bool {
return pthread_cond_signal(condition) == 0
}
}

View File

@ -2,7 +2,7 @@ import Foundation
import AVFoundation
final class VideoGravityUtil {
@inline(__always) static func calclute(_ videoGravity:String, inRect:inout CGRect, fromRect:inout CGRect) {
@inline(__always) static internal func calclute(_ videoGravity:String, inRect:inout CGRect, fromRect:inout CGRect) {
switch videoGravity {
case AVLayerVideoGravityResizeAspect:
resizeAspect(&inRect, fromRect: &fromRect)
@ -13,7 +13,7 @@ final class VideoGravityUtil {
}
}
@inline(__always) static func resizeAspect(_ inRect:inout CGRect, fromRect:inout CGRect) {
@inline(__always) static internal func resizeAspect(_ inRect:inout CGRect, fromRect:inout CGRect) {
let xRatio:CGFloat = inRect.width / fromRect.width
let yRatio:CGFloat = inRect.height / fromRect.height
if (yRatio < xRatio) {
@ -25,7 +25,7 @@ final class VideoGravityUtil {
}
}
@inline(__always) static func resizeAspectFill(_ inRect:inout CGRect, fromRect:inout CGRect) {
@inline(__always) static internal func resizeAspectFill(_ inRect:inout CGRect, fromRect:inout CGRect) {
let inRectAspect:CGFloat = inRect.size.width / inRect.size.height
let fromRectAspect:CGFloat = fromRect.size.width / fromRect.size.height
if (inRectAspect < fromRectAspect) {

View File

@ -4,4 +4,56 @@ import XCTest
@testable import lf
final class AMF0SerializerTests: XCTestCase {
static let connectionChunk:ASObject = [
"tcUrl": "rtmp://localhost:1935/live",
"flashVer": "FMLE/3.0 (compatible; FMSc/1.0)",
"swfUrl": nil,
"app": "live",
"fpad": false,
"audioCodecs": Double(1024),
"videoCodecs": Double(128),
"videoFunction": Double(1),
"capabilities": Double(239),
"pageUrl": nil,
"objectEncoding": Double(0)
]
func testConnectionChunk() {
var amf:AMFSerializer = AMF0Serializer()
amf.serialize(AMF0SerializerTests.connectionChunk)
amf.position = 0
let result:ASObject = try! amf.deserialize()
for key in AMF0SerializerTests.connectionChunk.keys {
let value:Any? = result[key]
switch key {
case "tcUrl":
XCTAssertEqual(value as? String, "rtmp://localhost:1935/live")
case "flashVer":
XCTAssertEqual(value as? String, "FMLE/3.0 (compatible; FMSc/1.0)")
case "swfUrl":
//XCTAssertNil(value!)
break
case "app":
XCTAssertEqual(value as? String, "live")
case "fpad":
XCTAssertEqual(value as? Bool, false)
case "audioCodecs":
XCTAssertEqual(value as? Double, Double(1024))
case "videoCodecs":
XCTAssertEqual(value as? Double, Double(128))
case "videoFunction":
XCTAssertEqual(value as? Double, Double(1))
case "capabilities":
XCTAssertEqual(value as? Double, Double(239))
case "pageUrl":
//XCTAssertNil(value!)
break
case "objectEncoding":
XCTAssertEqual(value as? Double, Double(0))
default:
XCTFail(key.debugDescription)
}
}
}
}

View File

@ -1,16 +0,0 @@
import Foundation
import XCTest
@testable import lf
final class AMF3SerializerTests: XCTestCase {
func testBool() {
let amf:AMF3Serializer = AMF3Serializer()
amf.serialize(true)
amf.serialize(false)
amf.position = 0
XCTAssertTrue(try! amf.deserialize())
XCTAssertFalse(try! amf.deserialize())
}
}

View File

@ -8,7 +8,7 @@ final class RTMPChunkTests: XCTestCase {
let bytes:[UInt8] = [130, 0, 0, 0, 0, 4, 9, 104]
let chunk:RTMPChunk? = RTMPChunk(bytes: bytes, size: 128)
if let chunk:RTMPChunk = chunk {
XCTAssertEqual("\(chunk.type)", "Two")
XCTAssertEqual(chunk.type, .two)
}
}
}

View File

@ -28,6 +28,5 @@ final class SessionDescriptionTests: XCTestCase {
func testString() {
var session:SessionDescription = SessionDescription()
session.description = SessionDescriptionTests.contents
print(session.description)
}
}

View File

@ -9,4 +9,14 @@ final class SwiftCoreExtensionTests: XCTestCase {
XCTAssertEqual(Int32.min, Int32(bytes: Int32.min.bytes))
XCTAssertEqual(Int32.max, Int32(bytes: Int32.max.bytes))
}
func testArraySplit() {
let data:[UInt8] = [0, 1, 2, 3, 4, 5, 6, 7, 2, 8, 9]
let result:[[UInt8]] = data.split(by: 3)
let answer:[[UInt8]] = [[0, 1, 2], [3, 4, 5], [6, 7, 2], [8, 9]]
for i in 0..<result.count {
XCTAssertEqual(result[i], answer[i])
}
}
}

View File

@ -1,7 +1,7 @@
Pod::Spec.new do |s|
s.name = "lf"
s.version = "0.4.1"
s.version = "0.5.0"
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS."
s.description = <<-DESC
@ -21,7 +21,7 @@ Pod::Spec.new do |s|
s.osx.source_files = "Platforms/macOS/*.{h,swift}"
s.source_files = "Sources/**/*.swift"
s.dependency 'XCGLogger', '~> 3.3'
s.dependency 'XCGLogger', '~> 4.0.0'
end

View File

@ -10,6 +10,7 @@
05932E852349517AF5114CB1 /* Pods_Example_iOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = FBC39416255B1FE34117A703 /* Pods_Example_iOS.framework */; };
2901A4EE1D437170002BBD23 /* ClockedQueue.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2901A4ED1D437170002BBD23 /* ClockedQueue.swift */; };
2901A4EF1D437662002BBD23 /* ClockedQueue.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2901A4ED1D437170002BBD23 /* ClockedQueue.swift */; };
2915EC4D1D85BB8C00621092 /* RTMPTSocket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294852551D84BFAD002DE492 /* RTMPTSocket.swift */; };
291F4E381CF206E600F59C51 /* Icon.png in Resources */ = {isa = PBXBuildFile; fileRef = 291F4E361CF206E200F59C51 /* Icon.png */; };
2923A1F31D63004E0019FBCD /* VisualEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2965436A1D62FEB700734698 /* VisualEffect.swift */; };
2923A1F41D6300510019FBCD /* MainWindowController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296543691D62FEB700734698 /* MainWindowController.swift */; };
@ -26,8 +27,7 @@
293120501D4529FB00B14211 /* RTSPResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2931204D1D4522E400B14211 /* RTSPResponse.swift */; };
2942424D1CF4C01300D65DCB /* MD5.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2942424C1CF4C01300D65DCB /* MD5.swift */; };
2942424F1CF4C02300D65DCB /* MD5Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2942424E1CF4C02300D65DCB /* MD5Tests.swift */; };
294B155F1D32009C004D021D /* Stream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294B155E1D32009C004D021D /* Stream.swift */; };
294B15601D322318004D021D /* Stream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294B155E1D32009C004D021D /* Stream.swift */; };
294852571D852499002DE492 /* RTMPTSocket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 294852551D84BFAD002DE492 /* RTMPTSocket.swift */; };
2955F51F1D09EBAD004CC995 /* VisualEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296897461CDB01D20074D5F0 /* VisualEffect.swift */; };
2965434F1D62FAED00734698 /* RTMPConnectionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 2965434E1D62FAED00734698 /* RTMPConnectionTests.m */; };
296543601D62FE6A00734698 /* AudioUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296543561D62FE6200734698 /* AudioUtil.swift */; };
@ -49,7 +49,6 @@
2976A4871D49045700B53EF2 /* DeviceUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2976A4851D4903C300B53EF2 /* DeviceUtil.swift */; };
29798E521CE5DF1A00F5CBD0 /* MP4File.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29798E511CE5DF1900F5CBD0 /* MP4File.swift */; };
29798E671CE610F500F5CBD0 /* lf.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 29B8761B1CD701F900FC07DA /* lf.framework */; };
29798E681CE6110F00F5CBD0 /* AMF3SerializerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876CD1CD70CE700FC07DA /* AMF3SerializerTests.swift */; };
29798E691CE6110F00F5CBD0 /* ASClassTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876CE1CD70CE700FC07DA /* ASClassTests.swift */; };
29798E6A1CE6110F00F5CBD0 /* ByteArrayTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876CF1CD70CE700FC07DA /* ByteArrayTests.swift */; };
29798E6B1CE6110F00F5CBD0 /* CRC32Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876D01CD70CE700FC07DA /* CRC32Tests.swift */; };
@ -74,6 +73,8 @@
299AE0E71D45003D00D26A49 /* RTSPSocket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 299AE0E41D44EC7800D26A49 /* RTSPSocket.swift */; };
299B131D1D35272D00A1E8F5 /* ScreenCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 299B131C1D35272D00A1E8F5 /* ScreenCaptureSession.swift */; };
299B13271D3B751400A1E8F5 /* LFView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 299B13261D3B751400A1E8F5 /* LFView.swift */; };
29AF3FCF1D7C744C00E41212 /* NetStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29AF3FCE1D7C744C00E41212 /* NetStream.swift */; };
29AF3FD01D7C745200E41212 /* NetStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29AF3FCE1D7C744C00E41212 /* NetStream.swift */; };
29B8765B1CD70A7900FC07DA /* AACEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876571CD70A7900FC07DA /* AACEncoder.swift */; };
29B8765C1CD70A7900FC07DA /* AVCDecoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876581CD70A7900FC07DA /* AVCDecoder.swift */; };
29B8765D1CD70A7900FC07DA /* AVCEncoder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 29B876591CD70A7900FC07DA /* AVCEncoder.swift */; };
@ -246,7 +247,7 @@
2942424C1CF4C01300D65DCB /* MD5.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MD5.swift; path = Sources/Util/MD5.swift; sourceTree = SOURCE_ROOT; };
2942424E1CF4C02300D65DCB /* MD5Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MD5Tests.swift; sourceTree = "<group>"; };
2945CBBD1B4BE66000104112 /* lf.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = lf.framework; sourceTree = BUILT_PRODUCTS_DIR; };
294B155E1D32009C004D021D /* Stream.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Stream.swift; path = Sources/Media/Stream.swift; sourceTree = SOURCE_ROOT; };
294852551D84BFAD002DE492 /* RTMPTSocket.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = RTMPTSocket.swift; path = Sources/RTMP/RTMPTSocket.swift; sourceTree = SOURCE_ROOT; };
2965434E1D62FAED00734698 /* RTMPConnectionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RTMPConnectionTests.m; sourceTree = "<group>"; };
296543561D62FE6200734698 /* AudioUtil.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AudioUtil.swift; path = Platforms/macOS/AudioUtil.swift; sourceTree = "<group>"; };
296543571D62FE6200734698 /* GLLFView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = GLLFView.swift; path = Platforms/macOS/GLLFView.swift; sourceTree = "<group>"; };
@ -286,6 +287,7 @@
299B13261D3B751400A1E8F5 /* LFView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LFView.swift; path = Platforms/iOS/LFView.swift; sourceTree = "<group>"; };
299F7E3A1CD71A97001E7272 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = Platforms/iOS/Info.plist; sourceTree = "<group>"; };
299F7E3B1CD71A97001E7272 /* lf.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = lf.h; path = Platforms/iOS/lf.h; sourceTree = "<group>"; };
29AF3FCE1D7C744C00E41212 /* NetStream.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = NetStream.swift; path = Sources/Net/NetStream.swift; sourceTree = SOURCE_ROOT; };
29B8761B1CD701F900FC07DA /* lf.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = lf.framework; sourceTree = BUILT_PRODUCTS_DIR; };
29B876571CD70A7900FC07DA /* AACEncoder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AACEncoder.swift; path = Sources/Codec/AACEncoder.swift; sourceTree = SOURCE_ROOT; };
29B876581CD70A7900FC07DA /* AVCDecoder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AVCDecoder.swift; path = Sources/Codec/AVCDecoder.swift; sourceTree = SOURCE_ROOT; };
@ -333,7 +335,6 @@
29B876B81CD70B3900FC07DA /* ByteArray.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ByteArray.swift; path = Sources/Util/ByteArray.swift; sourceTree = SOURCE_ROOT; };
29B876B91CD70B3900FC07DA /* CRC32.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CRC32.swift; path = Sources/Util/CRC32.swift; sourceTree = SOURCE_ROOT; };
29B876BA1CD70B3900FC07DA /* EventDispatcher.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = EventDispatcher.swift; path = Sources/Util/EventDispatcher.swift; sourceTree = SOURCE_ROOT; };
29B876CD1CD70CE700FC07DA /* AMF3SerializerTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AMF3SerializerTests.swift; path = Tests/AMF3SerializerTests.swift; sourceTree = SOURCE_ROOT; };
29B876CE1CD70CE700FC07DA /* ASClassTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ASClassTests.swift; path = Tests/ASClassTests.swift; sourceTree = SOURCE_ROOT; };
29B876CF1CD70CE700FC07DA /* ByteArrayTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ByteArrayTests.swift; path = Tests/ByteArrayTests.swift; sourceTree = SOURCE_ROOT; };
29B876D01CD70CE700FC07DA /* CRC32Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CRC32Tests.swift; path = Tests/CRC32Tests.swift; sourceTree = SOURCE_ROOT; };
@ -356,6 +357,7 @@
29C932941CD76FD300283FC5 /* Example macOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Example macOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
29DD70E31D68CACA0021904A /* RTSPStream.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = RTSPStream.swift; path = Sources/RTSP/RTSPStream.swift; sourceTree = SOURCE_ROOT; };
29DD70E51D68CF020021904A /* RTPPacket.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = RTPPacket.swift; path = Sources/RTP/RTPPacket.swift; sourceTree = SOURCE_ROOT; };
29E258361D7C249B0070DC07 /* lf.podspec */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = lf.podspec; sourceTree = "<group>"; };
29E5C0F91D05B32700407208 /* AudioUtil.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AudioUtil.swift; path = Platforms/iOS/AudioUtil.swift; sourceTree = "<group>"; };
29FE61461CF7DD2700E51833 /* AMF0SerializerTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AMF0SerializerTests.swift; sourceTree = "<group>"; };
29FE61481CF7DD3A00E51833 /* RTMPChunkTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPChunkTests.swift; sourceTree = "<group>"; };
@ -486,7 +488,6 @@
isa = PBXGroup;
children = (
29FE61461CF7DD2700E51833 /* AMF0SerializerTests.swift */,
29B876CD1CD70CE700FC07DA /* AMF3SerializerTests.swift */,
29B876CE1CD70CE700FC07DA /* ASClassTests.swift */,
29FE61481CF7DD3A00E51833 /* RTMPChunkTests.swift */,
);
@ -544,6 +545,7 @@
2945CBB31B4BE66000104112 = {
isa = PBXGroup;
children = (
29E258361D7C249B0070DC07 /* lf.podspec */,
2981E1301D646E3F00E8F7CA /* Cartfile */,
2997BDD71D50D3EA000AF900 /* Podfile */,
2997BDD31D50D31B000AF900 /* README.md */,
@ -698,6 +700,7 @@
29B876981CD70B1100FC07DA /* NetClient.swift */,
29B876991CD70B1100FC07DA /* NetService.swift */,
29B8769A1CD70B1100FC07DA /* NetSocket.swift */,
29AF3FCE1D7C744C00E41212 /* NetStream.swift */,
);
name = Net;
sourceTree = "<group>";
@ -775,7 +778,6 @@
2976A47D1D48C5C700B53EF2 /* AVMixerRecorder.swift */,
2976A4801D49025B00B53EF2 /* IOComponent.swift */,
29B8768D1CD70AFE00FC07DA /* SoundTransform.swift */,
294B155E1D32009C004D021D /* Stream.swift */,
29B8768E1CD70AFE00FC07DA /* VideoIOComponent.swift */,
29B8768F1CD70AFE00FC07DA /* VisualEffect.swift */,
);
@ -797,6 +799,7 @@
29B876A81CD70B2800FC07DA /* RTMPSharedObject.swift */,
29B876A91CD70B2800FC07DA /* RTMPSocket.swift */,
29B876AA1CD70B2800FC07DA /* RTMPStream.swift */,
294852551D84BFAD002DE492 /* RTMPTSocket.swift */,
);
name = RTMP;
sourceTree = "<group>";
@ -995,6 +998,7 @@
};
29C9327D1CD76FB800283FC5 = {
CreatedOnToolsVersion = 7.3;
DevelopmentTeam = SUEQ2SZ2L5;
LastSwiftMigration = 0800;
};
29C932931CD76FD300283FC5 = {
@ -1331,6 +1335,7 @@
29B876AE1CD70B2800FC07DA /* RTMPAudioPlayback.swift in Sources */,
29B876B11CD70B2800FC07DA /* RTMPMessage.swift in Sources */,
299B131D1D35272D00A1E8F5 /* ScreenCaptureSession.swift in Sources */,
2915EC4D1D85BB8C00621092 /* RTMPTSocket.swift in Sources */,
29C2631C1D0083B50098D4EF /* VideoIOComponent.swift in Sources */,
29B876B41CD70B2800FC07DA /* RTMPSharedObject.swift in Sources */,
29B8766E1CD70AB300FC07DA /* SiwftCore+Extension.swift in Sources */,
@ -1349,6 +1354,7 @@
2931204E1D4522E400B14211 /* RTSPResponse.swift in Sources */,
29B8765D1CD70A7900FC07DA /* AVCEncoder.swift in Sources */,
29B876B31CD70B2800FC07DA /* FLVTag.swift in Sources */,
29AF3FCF1D7C744C00E41212 /* NetStream.swift in Sources */,
29DD70E41D68CACA0021904A /* RTSPStream.swift in Sources */,
299B13271D3B751400A1E8F5 /* LFView.swift in Sources */,
29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */,
@ -1363,7 +1369,6 @@
2976A47E1D48C5C700B53EF2 /* AVMixerRecorder.swift in Sources */,
29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */,
29B876851CD70AE800FC07DA /* NALUnit.swift in Sources */,
294B155F1D32009C004D021D /* Stream.swift in Sources */,
29B876BD1CD70B3900FC07DA /* CRC32.swift in Sources */,
29B876B51CD70B2800FC07DA /* RTMPSocket.swift in Sources */,
29B876AB1CD70B2800FC07DA /* AMF0Serializer.swift in Sources */,
@ -1401,7 +1406,6 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
29798E681CE6110F00F5CBD0 /* AMF3SerializerTests.swift in Sources */,
29798E691CE6110F00F5CBD0 /* ASClassTests.swift in Sources */,
2981B7F61D73476B002FA821 /* SessionDescriptionTests.swift in Sources */,
29798E6A1CE6110F00F5CBD0 /* ByteArrayTests.swift in Sources */,
@ -1445,6 +1449,7 @@
29B876FC1CD70D5A00FC07DA /* TSWriter.swift in Sources */,
29B876FD1CD70D5A00FC07DA /* AudioSpecificConfig.swift in Sources */,
29B876FE1CD70D5A00FC07DA /* H264+AVC.swift in Sources */,
294852571D852499002DE492 /* RTMPTSocket.swift in Sources */,
29245AEE1D32347E00AFFB9A /* VideoGravityUtil.swift in Sources */,
29B876FF1CD70D5A00FC07DA /* NALUnit.swift in Sources */,
29B877001CD70D5A00FC07DA /* PacketizedElementaryStream.swift in Sources */,
@ -1472,7 +1477,7 @@
29B877101CD70D5A00FC07DA /* ASClass.swift in Sources */,
29B877111CD70D5A00FC07DA /* RTMPAudioPlayback.swift in Sources */,
29B877121CD70D5A00FC07DA /* RTMPChunk.swift in Sources */,
294B15601D322318004D021D /* Stream.swift in Sources */,
29AF3FD01D7C745200E41212 /* NetStream.swift in Sources */,
2975EF701D60B32600DABA5A /* Mutex.swift in Sources */,
29BCFFA71D68C75B00F1A726 /* RTPService.swift in Sources */,
29B877131CD70D5A00FC07DA /* RTMPConnection.swift in Sources */,
@ -1847,12 +1852,13 @@
isa = XCBuildConfiguration;
baseConfigurationReference = 533C38C964051C778E6633CC /* Pods-Example iOS.debug.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = "$(inherited)";
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ANALYZER_NONNULL = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUEQ2SZ2L5;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
@ -1868,11 +1874,12 @@
isa = XCBuildConfiguration;
baseConfigurationReference = A1D8ABC4A4E12BFBD293958E /* Pods-Example iOS.release.xcconfig */;
buildSettings = {
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = "$(inherited)";
ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ANALYZER_NONNULL = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
DEVELOPMENT_TEAM = SUEQ2SZ2L5;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 8.0;