[WIP]Migration Swift 4.0
This commit is contained in:
parent
fca51c38bc
commit
07e355a57c
|
@ -1 +1 @@
|
|||
3.1
|
||||
4.0
|
||||
|
|
2
Cartfile
2
Cartfile
|
@ -1 +1 @@
|
|||
github "shogo4405/Logboard" ~> 1.0.1
|
||||
github "shogo4405/Logboard" ~> 1.1.1
|
||||
|
|
|
@ -39,7 +39,7 @@ final class LiveViewController: UIViewController {
|
|||
@IBOutlet var fpsControl:UISegmentedControl?
|
||||
@IBOutlet var effectSegmentControl:UISegmentedControl?
|
||||
|
||||
var currentPosition:AVCaptureDevicePosition = AVCaptureDevicePosition.back
|
||||
var currentPosition:AVCaptureDevice.Position = .back
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
|
@ -47,7 +47,7 @@ final class LiveViewController: UIViewController {
|
|||
rtmpStream = RTMPStream(connection: rtmpConnection)
|
||||
rtmpStream.syncOrientation = true
|
||||
rtmpStream.captureSettings = [
|
||||
"sessionPreset": AVCaptureSessionPreset1280x720,
|
||||
"sessionPreset": AVCaptureSession.Preset.hd1280x720.rawValue,
|
||||
"continuousAutofocus": true,
|
||||
"continuousExposure": true,
|
||||
]
|
||||
|
@ -67,7 +67,7 @@ final class LiveViewController: UIViewController {
|
|||
override func viewWillAppear(_ animated: Bool) {
|
||||
logger.info("viewWillAppear")
|
||||
super.viewWillAppear(animated)
|
||||
rtmpStream.attachAudio(AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)) { error in
|
||||
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
|
||||
logger.warn(error.description)
|
||||
}
|
||||
rtmpStream.attachCamera(DeviceUtil.device(withPosition: currentPosition)) { error in
|
||||
|
@ -87,7 +87,7 @@ final class LiveViewController: UIViewController {
|
|||
|
||||
@IBAction func rotateCamera(_ sender:UIButton) {
|
||||
logger.info("rotateCamera")
|
||||
let position:AVCaptureDevicePosition = currentPosition == .back ? .front : .back
|
||||
let position:AVCaptureDevice.Position = currentPosition == .back ? .front : .back
|
||||
rtmpStream.attachCamera(DeviceUtil.device(withPosition: position)) { error in
|
||||
logger.warn(error.description)
|
||||
}
|
||||
|
@ -124,18 +124,18 @@ final class LiveViewController: UIViewController {
|
|||
if (publish.isSelected) {
|
||||
UIApplication.shared.isIdleTimerDisabled = false
|
||||
rtmpConnection.close()
|
||||
rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
|
||||
rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(self.rtmpStatusHandler(_:)), observer: self)
|
||||
publish.setTitle("●", for: UIControlState())
|
||||
} else {
|
||||
UIApplication.shared.isIdleTimerDisabled = true
|
||||
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
|
||||
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(self.rtmpStatusHandler(_:)), observer: self)
|
||||
rtmpConnection.connect(Preference.defaultInstance.uri!)
|
||||
publish.setTitle("■", for: UIControlState())
|
||||
}
|
||||
publish.isSelected = !publish.isSelected
|
||||
}
|
||||
|
||||
func rtmpStatusHandler(_ notification:Notification) {
|
||||
@objc func rtmpStatusHandler(_ notification:Notification) {
|
||||
let e:Event = Event.from(notification)
|
||||
if let data:ASObject = e.data as? ASObject , let code:String = data["code"] as? String {
|
||||
switch code {
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
import HaishinKit
|
||||
import Foundation
|
||||
import ReplayKit
|
||||
|
||||
class MovieClipHandler: RPBroadcastMP4ClipHandler {
|
||||
private var broadcaster:RTMPBroadcaster = RTMPBroadcaster()
|
||||
|
||||
override open func processMP4Clip(with mp4ClipURL: URL?, setupInfo: [String : NSObject]?, finished: Bool) {
|
||||
guard
|
||||
let endpointURL:String = setupInfo?["endpointURL"] as? String,
|
||||
let streamName:String = setupInfo?["streamName"] as? String else {
|
||||
return
|
||||
}
|
||||
broadcaster.streamName = streamName
|
||||
broadcaster.connect(endpointURL, arguments: nil)
|
||||
if (finished) {
|
||||
broadcaster.processMP4Clip(mp4ClipURL: mp4ClipURL) {_ in
|
||||
if (finished) {
|
||||
self.broadcaster.close()
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
broadcaster.processMP4Clip(mp4ClipURL: mp4ClipURL)
|
||||
}
|
||||
}
|
||||
|
|
@ -19,11 +19,11 @@ public class RTMPBroadcaster : RTMPConnection {
|
|||
|
||||
public override init() {
|
||||
super.init()
|
||||
addEventListener(Event.RTMP_STATUS, selector: #selector(RTMPBroadcaster.rtmpStatusEvent(_:)), observer: self)
|
||||
addEventListener(Event.RTMP_STATUS, selector: #selector(self.rtmpStatusEvent(_:)), observer: self)
|
||||
}
|
||||
|
||||
deinit {
|
||||
removeEventListener(Event.RTMP_STATUS, selector: #selector(RTMPBroadcaster.rtmpStatusEvent(_:)), observer: self)
|
||||
removeEventListener(Event.RTMP_STATUS, selector: #selector(self.rtmpStatusEvent(_:)), observer: self)
|
||||
}
|
||||
|
||||
open override func connect(_ command: String, arguments: Any?...) {
|
||||
|
@ -61,7 +61,7 @@ public class RTMPBroadcaster : RTMPConnection {
|
|||
}
|
||||
}
|
||||
|
||||
open func rtmpStatusEvent(_ status:Notification) {
|
||||
@objc open func rtmpStatusEvent(_ status:Notification) {
|
||||
let e:Event = Event.from(status)
|
||||
guard
|
||||
let data:ASObject = e.data as? ASObject,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Pod::Spec.new do |s|
|
||||
|
||||
s.name = "HaishinKit"
|
||||
s.version = "0.7.5"
|
||||
s.version = "0.8.0"
|
||||
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS."
|
||||
|
||||
s.description = <<-DESC
|
||||
|
@ -25,7 +25,7 @@ Pod::Spec.new do |s|
|
|||
s.tvos.source_files = "Platforms/tvOS/*.{h,swift}"
|
||||
|
||||
s.source_files = "Sources/**/*.swift"
|
||||
s.dependency 'Logboard', '~> 1.0.1'
|
||||
s.dependency 'Logboard', '~> 1.1.1'
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -108,7 +108,6 @@
|
|||
2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2958912D1EEB8F4100CE51E1 /* FLVSoundType.swift */; };
|
||||
2958912F1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2958912D1EEB8F4100CE51E1 /* FLVSoundType.swift */; };
|
||||
295891301EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2958912D1EEB8F4100CE51E1 /* FLVSoundType.swift */; };
|
||||
2962425E1D8BFC7B00C451A3 /* MovieClipHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 292D8A301D8B233C00DBECE2 /* MovieClipHandler.swift */; };
|
||||
296242611D8DB86500C451A3 /* TSReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2962425F1D8DB86500C451A3 /* TSReader.swift */; };
|
||||
296242621D8DB86500C451A3 /* TSWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296242601D8DB86500C451A3 /* TSWriter.swift */; };
|
||||
296242631D8DBA8C00C451A3 /* TSReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2962425F1D8DB86500C451A3 /* TSReader.swift */; };
|
||||
|
@ -457,7 +456,6 @@
|
|||
2926A9EB1DE6B71D0074E3D2 /* MachUtil.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MachUtil.swift; path = Sources/Util/MachUtil.swift; sourceTree = SOURCE_ROOT; };
|
||||
2926A9F01DE6EAEB0074E3D2 /* TimerDriver.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = TimerDriver.swift; path = Sources/Util/TimerDriver.swift; sourceTree = SOURCE_ROOT; };
|
||||
2927A2991E7ED2D70044AF91 /* LICENSE.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = LICENSE.md; sourceTree = "<group>"; };
|
||||
292D8A301D8B233C00DBECE2 /* MovieClipHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MovieClipHandler.swift; path = Examples/iOS/Screencast/MovieClipHandler.swift; sourceTree = "<group>"; };
|
||||
292D8A321D8B293300DBECE2 /* MP4Sampler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MP4Sampler.swift; path = Sources/ISO/MP4Sampler.swift; sourceTree = SOURCE_ROOT; };
|
||||
292F6DB01EEBB2040097EDBE /* AVFoundation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AVFoundation.swift; path = Platforms/tvOS/AVFoundation.swift; sourceTree = "<group>"; };
|
||||
2930D03D1E12D12100DA2DC5 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = Examples/iOS/Screencast/README.md; sourceTree = "<group>"; };
|
||||
|
@ -511,7 +509,6 @@
|
|||
29798E511CE5DF1900F5CBD0 /* MP4Reader.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MP4Reader.swift; path = Sources/ISO/MP4Reader.swift; sourceTree = SOURCE_ROOT; };
|
||||
29798E591CE60E5300F5CBD0 /* Tests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = Tests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
29798E5D1CE60E5300F5CBD0 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
|
||||
297A6A1B1F3F3146008C2508 /* lf.podspec */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = lf.podspec; sourceTree = "<group>"; };
|
||||
2981E1301D646E3F00E8F7CA /* Cartfile */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = Cartfile; sourceTree = "<group>"; };
|
||||
298BCF321DD4C44A007FF86A /* AnyUtil.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AnyUtil.swift; path = Sources/Util/AnyUtil.swift; sourceTree = SOURCE_ROOT; };
|
||||
2992D1521ED04A1B008D9DC1 /* VideoIOComponent+Extension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "VideoIOComponent+Extension.swift"; path = "Platforms/macOS/VideoIOComponent+Extension.swift"; sourceTree = "<group>"; };
|
||||
|
@ -795,7 +792,6 @@
|
|||
2945CBB31B4BE66000104112 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
297A6A1B1F3F3146008C2508 /* lf.podspec */,
|
||||
29F04FF21F3388B000172706 /* HaishinKit.podspec */,
|
||||
2981E1301D646E3F00E8F7CA /* Cartfile */,
|
||||
2927A2991E7ED2D70044AF91 /* LICENSE.md */,
|
||||
|
@ -1004,7 +1000,6 @@
|
|||
isa = PBXGroup;
|
||||
children = (
|
||||
29A39C881D85BF30007C27E9 /* Info.plist */,
|
||||
292D8A301D8B233C00DBECE2 /* MovieClipHandler.swift */,
|
||||
2930D03D1E12D12100DA2DC5 /* README.md */,
|
||||
2957473B1E34F30300EF056E /* RTMPBroadcaster.swift */,
|
||||
2930D03F1E12D17C00DA2DC5 /* SampleHandler.swift */,
|
||||
|
@ -1538,7 +1533,6 @@
|
|||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
2930D0411E12D35400DA2DC5 /* SampleHandler.swift in Sources */,
|
||||
2962425E1D8BFC7B00C451A3 /* MovieClipHandler.swift in Sources */,
|
||||
2957473E1E34F3DB00EF056E /* RTMPBroadcaster.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
|
@ -1961,7 +1955,7 @@
|
|||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Debug;
|
||||
|
@ -1981,7 +1975,7 @@
|
|||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS.Screencast";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Release;
|
||||
|
@ -2003,7 +1997,7 @@
|
|||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Debug;
|
||||
|
@ -2023,7 +2017,7 @@
|
|||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS.ScreencastUI";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Release;
|
||||
|
@ -2163,7 +2157,7 @@
|
|||
PROVISIONING_PROFILE = "";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_INCLUDE_PATHS = "";
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
|
@ -2193,7 +2187,7 @@
|
|||
PROVISIONING_PROFILE = "";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_INCLUDE_PATHS = "";
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
|
@ -2266,7 +2260,7 @@
|
|||
PRODUCT_NAME = HaishinKit;
|
||||
SDKROOT = macosx;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Debug;
|
||||
|
@ -2295,7 +2289,7 @@
|
|||
PRODUCT_NAME = HaishinKit;
|
||||
SDKROOT = macosx;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Release;
|
||||
|
@ -2322,7 +2316,7 @@
|
|||
ONLY_ACTIVE_ARCH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Debug;
|
||||
|
@ -2346,7 +2340,7 @@
|
|||
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Release;
|
||||
|
@ -2369,7 +2363,7 @@
|
|||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-macOS";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = macosx;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Debug;
|
||||
|
@ -2391,7 +2385,7 @@
|
|||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-macOS";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = macosx;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
};
|
||||
name = Release;
|
||||
|
@ -2421,7 +2415,7 @@
|
|||
SDKROOT = appletvos;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = 3;
|
||||
TVOS_DEPLOYMENT_TARGET = 10.2;
|
||||
|
@ -2451,7 +2445,7 @@
|
|||
PRODUCT_NAME = HaishinKit;
|
||||
SDKROOT = appletvos;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = On;
|
||||
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = 3;
|
||||
TVOS_DEPLOYMENT_TARGET = 10.2;
|
||||
|
@ -2474,7 +2468,7 @@
|
|||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = appletvos;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
||||
SWIFT_VERSION = 3.0;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = 3;
|
||||
TVOS_DEPLOYMENT_TARGET = 10.2;
|
||||
};
|
||||
|
@ -2494,7 +2488,7 @@
|
|||
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS";
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = appletvos;
|
||||
SWIFT_VERSION = 3.0;
|
||||
SWIFT_VERSION = 4.0;
|
||||
TARGETED_DEVICE_FAMILY = 3;
|
||||
TVOS_DEPLOYMENT_TARGET = 10.2;
|
||||
};
|
||||
|
|
|
@ -9,9 +9,9 @@ open class GLLFView: GLKView {
|
|||
]
|
||||
open static var defaultBackgroundColor:UIColor = UIColor.black
|
||||
|
||||
open var videoGravity:String = AVLayerVideoGravityResizeAspect
|
||||
open var videoGravity:AVLayerVideoGravity = .resizeAspect
|
||||
|
||||
var position:AVCaptureDevicePosition = .back
|
||||
var position:AVCaptureDevice.Position = .back
|
||||
var orientation:AVCaptureVideoOrientation = .portrait
|
||||
|
||||
fileprivate var displayImage:CIImage?
|
||||
|
@ -25,13 +25,13 @@ open class GLLFView: GLKView {
|
|||
}
|
||||
|
||||
public override init(frame: CGRect) {
|
||||
super.init(frame: frame, context: EAGLContext(api: .openGLES2))
|
||||
super.init(frame: frame, context: EAGLContext(api: .openGLES2)!)
|
||||
awakeFromNib()
|
||||
}
|
||||
|
||||
required public init?(coder aDecoder: NSCoder) {
|
||||
super.init(coder: aDecoder)
|
||||
self.context = EAGLContext(api: .openGLES2)
|
||||
self.context = EAGLContext(api: .openGLES2)!
|
||||
}
|
||||
|
||||
open override func awakeFromNib() {
|
||||
|
@ -64,7 +64,7 @@ extension GLLFView: GLKViewDelegate {
|
|||
var fromRect:CGRect = displayImage.extent
|
||||
VideoGravityUtil.calclute(videoGravity, inRect: &inRect, fromRect: &fromRect)
|
||||
if (position == .front) {
|
||||
currentStream?.mixer.videoIO.context?.draw(displayImage.applyingOrientation(2), in: inRect, from: fromRect)
|
||||
currentStream?.mixer.videoIO.context?.draw(displayImage.oriented(forExifOrientation: 2), in: inRect, from: fromRect)
|
||||
} else {
|
||||
currentStream?.mixer.videoIO.context?.draw(displayImage, in: inRect, from: fromRect)
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<key>CFBundlePackageType</key>
|
||||
<string>FMWK</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>0.7.5</string>
|
||||
<string>0.8.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
|
|
|
@ -13,7 +13,7 @@ open class LFView: UIView {
|
|||
return super.layer as! AVCaptureVideoPreviewLayer
|
||||
}
|
||||
|
||||
public var videoGravity:String = AVLayerVideoGravityResizeAspect {
|
||||
public var videoGravity:AVLayerVideoGravity = .resizeAspect {
|
||||
didSet {
|
||||
layer.videoGravity = videoGravity
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ open class LFView: UIView {
|
|||
}
|
||||
}
|
||||
}
|
||||
var position:AVCaptureDevicePosition = .front
|
||||
var position:AVCaptureDevice.Position = .front
|
||||
|
||||
private weak var currentStream:NetStream? {
|
||||
didSet {
|
||||
|
|
|
@ -20,7 +20,7 @@ open class GLLFView: NSOpenGLView {
|
|||
return pixelFormat
|
||||
}
|
||||
|
||||
public var videoGravity:String! = AVLayerVideoGravity.resizeAspect.rawValue
|
||||
public var videoGravity:AVLayerVideoGravity = .resizeAspect
|
||||
var orientation:AVCaptureVideoOrientation = .portrait
|
||||
var position:AVCaptureDevice.Position = .front
|
||||
fileprivate var displayImage:CIImage!
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<key>CFBundlePackageType</key>
|
||||
<string>FMWK</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>0.7.5</string>
|
||||
<string>0.8.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
|
|
|
@ -7,8 +7,8 @@ open class GLLFView: GLKView {
|
|||
kCIContextWorkingColorSpace: NSNull(),
|
||||
kCIContextUseSoftwareRenderer: NSNumber(value: false),
|
||||
]
|
||||
open static var defaultBackgroundColor:UIColor = UIColor.black
|
||||
open var videoGravity:String = AVLayerVideoGravity.resizeAspect.rawValue
|
||||
open static var defaultBackgroundColor:UIColor = .black
|
||||
open var videoGravity:AVLayerVideoGravity = .resizeAspect
|
||||
fileprivate var displayImage:CIImage?
|
||||
fileprivate weak var currentStream:NetStream? {
|
||||
didSet {
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<key>CFBundlePackageType</key>
|
||||
<string>FMWK</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>0.7.5</string>
|
||||
<string>0.8.0</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>$(CURRENT_PROJECT_VERSION)</string>
|
||||
<key>NSPrincipalClass</key>
|
||||
|
|
|
@ -282,10 +282,10 @@ final class H264Encoder: NSObject {
|
|||
}
|
||||
|
||||
#if os(iOS)
|
||||
func applicationWillEnterForeground(_ notification:Notification) {
|
||||
@objc func applicationWillEnterForeground(_ notification:Notification) {
|
||||
invalidateSession = true
|
||||
}
|
||||
func didAudioSessionInterruption(_ notification:Notification) {
|
||||
@objc func didAudioSessionInterruption(_ notification:Notification) {
|
||||
guard
|
||||
let userInfo:[AnyHashable: Any] = notification.userInfo,
|
||||
let value:NSNumber = userInfo[AVAudioSessionInterruptionTypeKey] as? NSNumber,
|
||||
|
@ -310,14 +310,14 @@ extension H264Encoder: Runnable {
|
|||
#if os(iOS)
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(H264Encoder.didAudioSessionInterruption(_:)),
|
||||
name: NSNotification.Name.AVAudioSessionInterruption,
|
||||
selector: #selector(self.didAudioSessionInterruption(_:)),
|
||||
name: .AVAudioSessionInterruption,
|
||||
object: nil
|
||||
)
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(H264Encoder.applicationWillEnterForeground(_:)),
|
||||
name: NSNotification.Name.UIApplicationWillEnterForeground,
|
||||
selector: #selector(self.applicationWillEnterForeground(_:)),
|
||||
name: .UIApplicationWillEnterForeground,
|
||||
object: nil
|
||||
)
|
||||
#endif
|
||||
|
@ -330,8 +330,8 @@ extension H264Encoder: Runnable {
|
|||
self.lastImageBuffer = nil;
|
||||
self.formatDescription = nil
|
||||
#if os(iOS)
|
||||
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVAudioSessionInterruption, object: nil)
|
||||
NotificationCenter.default.removeObserver(self, name: NSNotification.Name.UIApplicationWillEnterForeground, object: nil)
|
||||
NotificationCenter.default.removeObserver(self, name: .AVAudioSessionInterruption, object: nil)
|
||||
NotificationCenter.default.removeObserver(self, name: .UIApplicationWillEnterForeground, object: nil)
|
||||
#endif
|
||||
self.running = false
|
||||
}
|
||||
|
|
|
@ -669,10 +669,10 @@ final class MP4TrakReader {
|
|||
handle = try FileHandle(forReadingFrom: reader.url)
|
||||
|
||||
if let avcC:MP4Box = trak.getBoxes(byName: "avcC").first {
|
||||
delegate?.didSet(config: reader.readData(ofBox: avcC), withID: id, type: AVMediaType.video.rawValue)
|
||||
delegate?.didSet(config: reader.readData(ofBox: avcC), withID: id, type: .video)
|
||||
}
|
||||
if let esds:MP4ElementaryStreamDescriptorBox = trak.getBoxes(byName: "esds").first as? MP4ElementaryStreamDescriptorBox {
|
||||
delegate?.didSet(config: Data(esds.audioDecorderSpecificConfig), withID: id, type: AVMediaType.audio.rawValue)
|
||||
delegate?.didSet(config: Data(esds.audioDecorderSpecificConfig), withID: id, type: .audio)
|
||||
}
|
||||
|
||||
timerDriver.interval = MachUtil.nanosToAbs(UInt64(currentTimeToSample * 1000 * 1000))
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import Foundation
|
||||
import AVFoundation
|
||||
|
||||
protocol MP4SamplerDelegate: class {
|
||||
func didOpen(_ reader:MP4Reader)
|
||||
func didSet(config:Data, withID:Int, type:String)
|
||||
func didSet(config:Data, withID:Int, type:AVMediaType)
|
||||
func output(data:Data, withID:Int, currentTime:Double, keyframe:Bool)
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ struct TSPacket {
|
|||
|
||||
fileprivate var remain:Int {
|
||||
var adaptationFieldSize:Int = 0
|
||||
if var adaptationField:TSAdaptationField = adaptationField , adaptationFieldFlag {
|
||||
if let adaptationField:TSAdaptationField = adaptationField , adaptationFieldFlag {
|
||||
adaptationField.compute()
|
||||
adaptationFieldSize = Int(adaptationField.length) + 1
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ extension TSPacket: CustomStringConvertible {
|
|||
}
|
||||
|
||||
// MARK: -
|
||||
struct TSAdaptationField {
|
||||
class TSAdaptationField {
|
||||
static let PCRSize:Int = 6
|
||||
static let fixedSectionSize:Int = 2
|
||||
|
||||
|
@ -217,7 +217,7 @@ struct TSAdaptationField {
|
|||
self.data = data
|
||||
}
|
||||
|
||||
mutating func compute() {
|
||||
func compute() {
|
||||
length = UInt8(truncatingIfNeeded: TSAdaptationField.fixedSectionSize)
|
||||
length += UInt8(truncatingIfNeeded: PCR.count)
|
||||
length += UInt8(truncatingIfNeeded: OPCR.count)
|
||||
|
@ -229,7 +229,7 @@ struct TSAdaptationField {
|
|||
length -= 1
|
||||
}
|
||||
|
||||
mutating func stuffing(_ size:Int) {
|
||||
func stuffing(_ size:Int) {
|
||||
stuffingBytes = Data(repeating: 0xff, count: size)
|
||||
length += UInt8(size)
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ import AVFoundation
|
|||
|
||||
public protocol AVMixerRecorderDelegate: class {
|
||||
var moviesDirectory:URL { get }
|
||||
func rotateFile(_ recorder:AVMixerRecorder, withPresentationTimeStamp:CMTime, mediaType:String)
|
||||
func rotateFile(_ recorder:AVMixerRecorder, withPresentationTimeStamp:CMTime, mediaType:AVMediaType)
|
||||
func getPixelBufferAdaptor(_ recorder:AVMixerRecorder, withWriterInput: AVAssetWriterInput?) -> AVAssetWriterInputPixelBufferAdaptor?
|
||||
func getWriterInput(_ recorder:AVMixerRecorder, mediaType:String, sourceFormatHint:CMFormatDescription?) -> AVAssetWriterInput?
|
||||
func getWriterInput(_ recorder:AVMixerRecorder, mediaType:AVMediaType, sourceFormatHint:CMFormatDescription?) -> AVAssetWriterInput?
|
||||
func didStartRunning(_ recorder: AVMixerRecorder)
|
||||
func didStopRunning(_ recorder: AVMixerRecorder)
|
||||
func didFinishWriting(_ recorder: AVMixerRecorder)
|
||||
|
@ -14,13 +14,13 @@ public protocol AVMixerRecorderDelegate: class {
|
|||
// MARK: -
|
||||
open class AVMixerRecorder: NSObject {
|
||||
|
||||
open static let defaultOutputSettings:[String:[String:Any]] = [
|
||||
AVMediaType.audio.rawValue: [
|
||||
open static let defaultOutputSettings:[AVMediaType:[String:Any]] = [
|
||||
.audio: [
|
||||
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
|
||||
AVSampleRateKey: 0,
|
||||
AVNumberOfChannelsKey: 0,
|
||||
],
|
||||
AVMediaType.video.rawValue: [
|
||||
.video: [
|
||||
AVVideoCodecKey: AVVideoCodecH264,
|
||||
AVVideoHeightKey: 0,
|
||||
AVVideoWidthKey: 0,
|
||||
|
@ -30,8 +30,8 @@ open class AVMixerRecorder: NSObject {
|
|||
open var writer:AVAssetWriter?
|
||||
open var fileName:String?
|
||||
open var delegate:AVMixerRecorderDelegate?
|
||||
open var writerInputs:[String:AVAssetWriterInput] = [:]
|
||||
open var outputSettings:[String:[String:Any]] = AVMixerRecorder.defaultOutputSettings
|
||||
open var writerInputs:[AVMediaType:AVAssetWriterInput] = [:]
|
||||
open var outputSettings:[AVMediaType:[String:Any]] = AVMixerRecorder.defaultOutputSettings
|
||||
open var pixelBufferAdaptor:AVAssetWriterInputPixelBufferAdaptor?
|
||||
open let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AVMixerRecorder.lock")
|
||||
fileprivate(set) var running:Bool = false
|
||||
|
@ -49,7 +49,7 @@ open class AVMixerRecorder: NSObject {
|
|||
delegate = DefaultAVMixerRecorderDelegate()
|
||||
}
|
||||
|
||||
final func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer, mediaType:String) {
|
||||
final func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer, mediaType:AVMediaType) {
|
||||
lockQueue.async {
|
||||
guard let delegate:AVMixerRecorderDelegate = self.delegate, self.running else {
|
||||
return
|
||||
|
@ -84,10 +84,10 @@ open class AVMixerRecorder: NSObject {
|
|||
return
|
||||
}
|
||||
|
||||
delegate.rotateFile(self, withPresentationTimeStamp: withPresentationTime, mediaType: AVMediaType.video.rawValue)
|
||||
delegate.rotateFile(self, withPresentationTimeStamp: withPresentationTime, mediaType: .video)
|
||||
guard
|
||||
let writer:AVAssetWriter = self.writer,
|
||||
let input:AVAssetWriterInput = delegate.getWriterInput(self, mediaType: AVMediaType.video.rawValue, sourceFormatHint: CMVideoFormatDescription.create(withPixelBuffer: pixelBuffer)),
|
||||
let input:AVAssetWriterInput = delegate.getWriterInput(self, mediaType: .video, sourceFormatHint: CMVideoFormatDescription.create(withPixelBuffer: pixelBuffer)),
|
||||
let adaptor:AVAssetWriterInputPixelBufferAdaptor = delegate.getPixelBufferAdaptor(self, withWriterInput: input),
|
||||
self.isReadyForStartWriting else {
|
||||
return
|
||||
|
@ -153,7 +153,7 @@ open class DefaultAVMixerRecorderDelegate: NSObject {
|
|||
open var dateFormat:String = "-yyyyMMdd-HHmmss"
|
||||
|
||||
fileprivate var rotateTime:CMTime = kCMTimeZero
|
||||
fileprivate var clockReference:String = AVMediaType.video.rawValue
|
||||
fileprivate var clockReference:AVMediaType = .video
|
||||
|
||||
#if os(iOS)
|
||||
open lazy var moviesDirectory:URL = {
|
||||
|
@ -168,7 +168,7 @@ open class DefaultAVMixerRecorderDelegate: NSObject {
|
|||
|
||||
extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
|
||||
// MARK: AVMixerRecorderDelegate
|
||||
open func rotateFile(_ recorder:AVMixerRecorder, withPresentationTimeStamp:CMTime, mediaType:String) {
|
||||
open func rotateFile(_ recorder:AVMixerRecorder, withPresentationTimeStamp:CMTime, mediaType:AVMediaType) {
|
||||
guard clockReference == mediaType && rotateTime.value < withPresentationTimeStamp.value else {
|
||||
return
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
|
|||
return adaptor
|
||||
}
|
||||
|
||||
open func getWriterInput(_ recorder:AVMixerRecorder, mediaType:String, sourceFormatHint:CMFormatDescription?) -> AVAssetWriterInput? {
|
||||
open func getWriterInput(_ recorder:AVMixerRecorder, mediaType:AVMediaType, sourceFormatHint:CMFormatDescription?) -> AVAssetWriterInput? {
|
||||
guard recorder.writerInputs[mediaType] == nil else {
|
||||
return recorder.writerInputs[mediaType]
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
|
|||
var outputSettings:[String:Any] = [:]
|
||||
if let defaultOutputSettings:[String:Any] = recorder.outputSettings[mediaType] {
|
||||
switch mediaType {
|
||||
case AVMediaType.audio:
|
||||
case .audio:
|
||||
guard
|
||||
let format:CMAudioFormatDescription = sourceFormatHint,
|
||||
let inSourceFormat:AudioStreamBasicDescription = format.streamBasicDescription?.pointee else {
|
||||
|
@ -219,7 +219,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
|
|||
outputSettings[key] = value
|
||||
}
|
||||
}
|
||||
case AVMediaType.video:
|
||||
case .video:
|
||||
guard let format:CMVideoFormatDescription = sourceFormatHint else {
|
||||
break
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
|
|||
}
|
||||
}
|
||||
|
||||
let input:AVAssetWriterInput = AVAssetWriterInput(mediaType: AVMediaType(rawValue: mediaType), outputSettings: outputSettings, sourceFormatHint: sourceFormatHint)
|
||||
let input:AVAssetWriterInput = AVAssetWriterInput(mediaType: mediaType, outputSettings: outputSettings, sourceFormatHint: sourceFormatHint)
|
||||
input.expectsMediaDataInRealTime = true
|
||||
recorder.writerInputs[mediaType] = input
|
||||
recorder.writer?.add(input)
|
||||
|
|
|
@ -47,6 +47,11 @@ final class AudioIOComponent: IOComponent {
|
|||
encoder.lockQueue = lockQueue
|
||||
}
|
||||
|
||||
func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer) {
|
||||
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: .audio)
|
||||
encoder.encodeSampleBuffer(sampleBuffer)
|
||||
}
|
||||
|
||||
#if os(iOS) || os(macOS)
|
||||
func attachAudio(_ audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool) throws {
|
||||
guard let mixer:AVMixer = mixer else {
|
||||
|
@ -86,9 +91,8 @@ final class AudioIOComponent: IOComponent {
|
|||
|
||||
extension AudioIOComponent: AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
|
||||
func captureOutput(captureOutput:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer, from connection:AVCaptureConnection) {
|
||||
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaType.audio.rawValue)
|
||||
encoder.encodeSampleBuffer(sampleBuffer)
|
||||
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
||||
appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,12 +59,10 @@ final class VideoIOComponent: IOComponent {
|
|||
return
|
||||
}
|
||||
for connection in output.connections {
|
||||
if let connection:AVCaptureConnection = connection as? AVCaptureConnection {
|
||||
if (connection.isVideoOrientationSupported) {
|
||||
connection.videoOrientation = orientation
|
||||
if (torch) {
|
||||
setTorchMode(.on)
|
||||
}
|
||||
if (connection.isVideoOrientationSupported) {
|
||||
connection.videoOrientation = orientation
|
||||
if (torch) {
|
||||
setTorchMode(.on)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -252,9 +250,6 @@ final class VideoIOComponent: IOComponent {
|
|||
input = try AVCaptureDeviceInput(device: camera)
|
||||
mixer.session.addOutput(output)
|
||||
for connection in output.connections {
|
||||
guard let connection:AVCaptureConnection = connection as? AVCaptureConnection else {
|
||||
continue
|
||||
}
|
||||
if (connection.isVideoOrientationSupported) {
|
||||
connection.videoOrientation = orientation
|
||||
}
|
||||
|
@ -290,6 +285,29 @@ final class VideoIOComponent: IOComponent {
|
|||
}
|
||||
#endif
|
||||
|
||||
func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer) {
|
||||
guard var buffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
CVPixelBufferLockBaseAddress(buffer, .readOnly)
|
||||
defer { CVPixelBufferUnlockBaseAddress(buffer, .readOnly) }
|
||||
let image:CIImage = effect(buffer)
|
||||
if !effects.isEmpty {
|
||||
#if os(macOS)
|
||||
// green edge hack for OSX
|
||||
buffer = CVPixelBuffer.create(image)!
|
||||
#endif
|
||||
context?.render(image, to: buffer)
|
||||
}
|
||||
encoder.encodeImageBuffer(
|
||||
buffer,
|
||||
presentationTimeStamp: sampleBuffer.presentationTimeStamp,
|
||||
duration: sampleBuffer.duration
|
||||
)
|
||||
drawable?.draw(image: image)
|
||||
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: .video)
|
||||
}
|
||||
|
||||
func effect(_ buffer:CVImageBuffer) -> CIImage {
|
||||
var image:CIImage = CIImage(cvPixelBuffer: buffer)
|
||||
for effect in effects {
|
||||
|
@ -325,27 +343,8 @@ final class VideoIOComponent: IOComponent {
|
|||
|
||||
extension VideoIOComponent: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
func captureOutput(captureOutput:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer, from connection:AVCaptureConnection) {
|
||||
guard var buffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
CVPixelBufferLockBaseAddress(buffer, .readOnly)
|
||||
defer { CVPixelBufferUnlockBaseAddress(buffer, .readOnly) }
|
||||
let image:CIImage = effect(buffer)
|
||||
if !effects.isEmpty {
|
||||
#if os(macOS)
|
||||
// green edge hack for OSX
|
||||
buffer = CVPixelBuffer.create(image)!
|
||||
#endif
|
||||
context?.render(image, to: buffer)
|
||||
}
|
||||
encoder.encodeImageBuffer(
|
||||
buffer,
|
||||
presentationTimeStamp: sampleBuffer.presentationTimeStamp,
|
||||
duration: sampleBuffer.duration
|
||||
)
|
||||
drawable?.draw(image: image)
|
||||
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaType.video.rawValue)
|
||||
func captureOutput(_ captureOutput:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer, from connection:AVCaptureConnection) {
|
||||
appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -110,9 +110,9 @@ open class NetStream: NSObject {
|
|||
}
|
||||
}
|
||||
|
||||
open var recorderSettings:[String:[String:Any]] {
|
||||
open var recorderSettings:[AVMediaType:[String:Any]] {
|
||||
get {
|
||||
var recorderSettings:[String:[String:Any]]!
|
||||
var recorderSettings:[AVMediaType:[String:Any]]!
|
||||
lockQueue.sync {
|
||||
recorderSettings = self.mixer.recorder.outputSettings
|
||||
}
|
||||
|
@ -155,9 +155,9 @@ open class NetStream: NSObject {
|
|||
open func appendSampleBuffer(_ sampleBuffer:CMSampleBuffer, withType: CMSampleBufferType, options:[NSObject: AnyObject]? = nil) {
|
||||
switch withType {
|
||||
case .audio:
|
||||
mixer.audioIO.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil)
|
||||
mixer.audioIO.appendSampleBuffer(sampleBuffer)
|
||||
case .video:
|
||||
mixer.videoIO.captureOutput(nil, didOutputSampleBuffer: sampleBuffer, from: nil)
|
||||
mixer.videoIO.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -97,17 +97,17 @@ extension RTMPMuxer: MP4SamplerDelegate {
|
|||
delegate?.metadata(metadata)
|
||||
}
|
||||
|
||||
func didSet(config:Data, withID:Int, type:String) {
|
||||
func didSet(config:Data, withID:Int, type:AVMediaType) {
|
||||
guard configs[withID] != config else {
|
||||
return
|
||||
}
|
||||
configs[withID] = config
|
||||
switch type {
|
||||
case AVMediaType.video:
|
||||
case .video:
|
||||
var buffer:Data = Data([FLVFrameType.key.rawValue << 4 | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.seq.rawValue, 0, 0, 0])
|
||||
buffer.append(config)
|
||||
delegate?.sampleOutput(video: buffer, withTimestamp: 0, muxer: self)
|
||||
case AVMediaType.audio:
|
||||
case .audio:
|
||||
if (withID != 1) {
|
||||
break
|
||||
}
|
||||
|
|
|
@ -8,9 +8,6 @@ public final class DeviceUtil {
|
|||
|
||||
static public func device(withPosition:AVCaptureDevice.Position) -> AVCaptureDevice? {
|
||||
for device in AVCaptureDevice.devices() {
|
||||
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
|
||||
continue
|
||||
}
|
||||
if (device.hasMediaType(AVMediaType.video) && device.position == withPosition) {
|
||||
return device
|
||||
}
|
||||
|
@ -20,9 +17,6 @@ public final class DeviceUtil {
|
|||
|
||||
static public func device(withLocalizedName:String, mediaType:String) -> AVCaptureDevice? {
|
||||
for device in AVCaptureDevice.devices() {
|
||||
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
|
||||
continue
|
||||
}
|
||||
if (device.hasMediaType(AVMediaType(rawValue: mediaType)) && device.localizedName == withLocalizedName) {
|
||||
return device
|
||||
}
|
||||
|
@ -35,7 +29,7 @@ public final class DeviceUtil {
|
|||
var frameRates:[Float64] = []
|
||||
|
||||
for object:Any in device.activeFormat.videoSupportedFrameRateRanges {
|
||||
guard let range:AVCaptureDevice.Format.FrameRateRange = object as? AVCaptureDevice.Format.FrameRateRange else {
|
||||
guard let range:AVFrameRateRange = object as? AVFrameRateRange else {
|
||||
continue
|
||||
}
|
||||
if (range.minFrameRate == range.maxFrameRate) {
|
||||
|
|
|
@ -2,11 +2,11 @@ import Foundation
|
|||
import AVFoundation
|
||||
|
||||
final class VideoGravityUtil {
|
||||
@inline(__always) static func calclute(_ videoGravity:String, inRect:inout CGRect, fromRect:inout CGRect) {
|
||||
@inline(__always) static func calclute(_ videoGravity:AVLayerVideoGravity, inRect:inout CGRect, fromRect:inout CGRect) {
|
||||
switch videoGravity {
|
||||
case AVLayerVideoGravity.resizeAspect:
|
||||
case .resizeAspect:
|
||||
resizeAspect(&inRect, fromRect: &fromRect)
|
||||
case AVLayerVideoGravity.resizeAspectFill:
|
||||
case .resizeAspectFill:
|
||||
resizeAspectFill(&inRect, fromRect: &fromRect)
|
||||
default:
|
||||
break
|
||||
|
|
30
lf.podspec
30
lf.podspec
|
@ -1,30 +0,0 @@
|
|||
Pod::Spec.new do |s|
|
||||
|
||||
s.name = "lf"
|
||||
s.version = "0.7.5"
|
||||
s.summary = "Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS."
|
||||
|
||||
s.description = <<-DESC
|
||||
HaishinKit (formerly lf). Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS.
|
||||
DESC
|
||||
|
||||
s.homepage = "https://github.com/shogo4405/HaishinKit.swift"
|
||||
s.license = "New BSD"
|
||||
s.author = { "shogo4405" => "shogo4405@gmail.com" }
|
||||
s.authors = { "shogo4405" => "shogo4405@gmail.com" }
|
||||
s.source = { :git => "https://github.com/shogo4405/HaishinKit.swift.git", :tag => "#{s.version}" }
|
||||
s.social_media_url = "http://twitter.com/shogo4405"
|
||||
|
||||
s.ios.deployment_target = "8.0"
|
||||
s.ios.source_files = "Platforms/iOS/*.{h,swift}"
|
||||
|
||||
s.osx.deployment_target = "10.11"
|
||||
s.osx.source_files = "Platforms/macOS/*.{h,swift}"
|
||||
|
||||
s.tvos.deployment_target = "10.2"
|
||||
s.tvos.source_files = "Platforms/tvOS/*.{h,swift}"
|
||||
|
||||
s.source_files = "Sources/**/*.swift"
|
||||
s.dependency 'Logboard', '~> 1.0.1'
|
||||
|
||||
end
|
Loading…
Reference in New Issue