Migration Swift 4.0

This commit is contained in:
shogo4405 2017-09-18 00:23:59 +09:00
parent 8e9d41b7cc
commit fca51c38bc
23 changed files with 131 additions and 96 deletions

View File

@ -15,7 +15,7 @@ class BroadcastViewController: UIViewController {
startButton.addTarget(self, action: #selector(BroadcastViewController.userDidFinishSetup), for: .touchDown)
}
func userDidFinishSetup() {
@objc func userDidFinishSetup() {
let broadcastURL:URL = URL(string: endpointURLField.text!)!

View File

@ -26,14 +26,14 @@ final class MainViewController: NSViewController {
urlField.stringValue = Preference.defaultInstance.uri ?? ""
let audios:[Any]! = AVCaptureDevice.devices(withMediaType: AVMediaTypeAudio)
let audios:[Any]! = AVCaptureDevice.devices(for: AVMediaType.audio)
for audio in audios {
if let audio:AVCaptureDevice = audio as? AVCaptureDevice {
audioPopUpButton?.addItem(withTitle: audio.localizedName)
}
}
let cameras:[Any]! = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
let cameras:[Any]! = AVCaptureDevice.devices(for: AVMediaType.video)
for camera in cameras {
if let camera:AVCaptureDevice = camera as? AVCaptureDevice {
cameraPopUpButton?.addItem(withTitle: camera.localizedName)
@ -43,8 +43,8 @@ final class MainViewController: NSViewController {
override func viewWillAppear() {
super.viewWillAppear()
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.audio.rawValue))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.video.rawValue))
lfView?.attachStream(rtmpStream)
}
@ -101,7 +101,7 @@ final class MainViewController: NSViewController {
@IBAction func selectAudio(_ sender:AnyObject) {
let device:AVCaptureDevice? = DeviceUtil.device(withLocalizedName:
audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio
audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.audio.rawValue
)
switch segmentedControl.selectedSegment {
case 0:
@ -117,7 +117,7 @@ final class MainViewController: NSViewController {
@IBAction func selectCamera(_ sender:AnyObject) {
let device:AVCaptureDevice? = DeviceUtil.device(withLocalizedName:
cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo
cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.video.rawValue
)
switch segmentedControl.selectedSegment {
case 0:
@ -136,15 +136,15 @@ final class MainViewController: NSViewController {
case 0:
httpStream.attachAudio(nil)
httpStream.attachCamera(nil)
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
rtmpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.audio.rawValue))
rtmpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.video.rawValue))
lfView.attachStream(rtmpStream)
urlField.stringValue = Preference.defaultInstance.uri ?? ""
case 1:
rtmpStream.attachAudio(nil)
rtmpStream.attachCamera(nil)
httpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeAudio))
httpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaTypeVideo))
httpStream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.itemTitles[audioPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.audio.rawValue))
httpStream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.itemTitles[cameraPopUpButton.indexOfSelectedItem], mediaType: AVMediaType.video.rawValue))
lfView.attachStream(httpStream)
urlField.stringValue = "http://{ipAddress}:8080/hello/playlist.m3u8"
default:
@ -152,7 +152,7 @@ final class MainViewController: NSViewController {
}
}
func rtmpStatusHandler(_ notification:Notification) {
@objc func rtmpStatusHandler(_ notification:Notification) {
let e:Event = Event.from(notification)
guard
let data:ASObject = e.data as? ASObject,

View File

@ -1318,23 +1318,25 @@
attributes = {
LastSwiftMigration = 0700;
LastSwiftUpdateCheck = 0830;
LastUpgradeCheck = 0820;
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = "Shogo Endo";
TargetAttributes = {
2915EC511D85BDF100621092 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = SUEQ2SZ2L5;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
2915EC5E1D85BDF100621092 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = SUEQ2SZ2L5;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
2945CBBC1B4BE66000104112 = {
CreatedOnToolsVersion = 6.4;
DevelopmentTeam = SUEQ2SZ2L5;
LastSwiftMigration = 0800;
LastSwiftMigration = 0900;
};
29798E581CE60E5300F5CBD0 = {
CreatedOnToolsVersion = 7.3.1;
@ -1342,20 +1344,21 @@
};
29B8761A1CD701F900FC07DA = {
CreatedOnToolsVersion = 7.3;
LastSwiftMigration = 0800;
LastSwiftMigration = 0900;
};
29C9327D1CD76FB800283FC5 = {
CreatedOnToolsVersion = 7.3;
DevelopmentTeam = SUEQ2SZ2L5;
LastSwiftMigration = 0800;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
29C932931CD76FD300283FC5 = {
CreatedOnToolsVersion = 7.3;
LastSwiftMigration = 0800;
LastSwiftMigration = 0900;
};
29D3D4DB1ED0509900DD4AA6 = {
CreatedOnToolsVersion = 8.3.2;
LastSwiftMigration = 0900;
ProvisioningStyle = Automatic;
};
29D3D4EE1ED0531500DD4AA6 = {
@ -1958,7 +1961,8 @@
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -1977,7 +1981,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS.Screencast";
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
@ -1998,7 +2003,8 @@
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -2017,7 +2023,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS.ScreencastUI";
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
@ -2029,14 +2036,20 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
@ -2081,14 +2094,20 @@
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
@ -2144,7 +2163,8 @@
PROVISIONING_PROFILE = "";
SKIP_INSTALL = YES;
SWIFT_INCLUDE_PATHS = "";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
@ -2173,7 +2193,8 @@
PROVISIONING_PROFILE = "";
SKIP_INSTALL = YES;
SWIFT_INCLUDE_PATHS = "";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
@ -2245,7 +2266,8 @@
PRODUCT_NAME = HaishinKit;
SDKROOT = macosx;
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -2273,7 +2295,8 @@
PRODUCT_NAME = HaishinKit;
SDKROOT = macosx;
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
@ -2299,7 +2322,8 @@
ONLY_ACTIVE_ARCH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -2322,7 +2346,8 @@
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-iOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
@ -2344,7 +2369,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-macOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = macosx;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Debug;
};
@ -2365,7 +2391,8 @@
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.HaishinKit.Example-macOS";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = macosx;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
};
name = Release;
};
@ -2394,7 +2421,8 @@
SDKROOT = appletvos;
SKIP_INSTALL = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = 3;
TVOS_DEPLOYMENT_TARGET = 10.2;
};
@ -2423,7 +2451,8 @@
PRODUCT_NAME = HaishinKit;
SDKROOT = appletvos;
SKIP_INSTALL = YES;
SWIFT_VERSION = 3.0;
SWIFT_SWIFT3_OBJC_INFERENCE = On;
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = 3;
TVOS_DEPLOYMENT_TARGET = 10.2;
};

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0830"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
@ -26,6 +26,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
@ -36,6 +37,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0830"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
@ -26,6 +26,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
@ -36,6 +37,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0830"
LastUpgradeVersion = "0900"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
@ -26,6 +26,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
@ -36,6 +37,7 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

View File

@ -80,7 +80,7 @@ open class ScreenCaptureSession: NSObject {
super.init()
}
public func onScreen(_ displayLink:CADisplayLink) {
@objc public func onScreen(_ displayLink:CADisplayLink) {
guard semaphore.wait(timeout: DispatchTime.now()) == .success else {
return
}

View File

@ -20,9 +20,9 @@ open class GLLFView: NSOpenGLView {
return pixelFormat
}
public var videoGravity:String! = AVLayerVideoGravityResizeAspect
public var videoGravity:String! = AVLayerVideoGravity.resizeAspect.rawValue
var orientation:AVCaptureVideoOrientation = .portrait
var position:AVCaptureDevicePosition = .front
var position:AVCaptureDevice.Position = .front
fileprivate var displayImage:CIImage!
fileprivate var originalFrame:CGRect = CGRect.zero
fileprivate var scale:CGRect = CGRect.zero

View File

@ -4,13 +4,13 @@ import AVFoundation
open class LFView: NSView {
public static var defaultBackgroundColor:NSColor = NSColor.black
public var videoGravity:String = AVLayerVideoGravityResizeAspect {
public var videoGravity:String = AVLayerVideoGravity.resizeAspect.rawValue {
didSet {
layer?.setValue(videoGravity, forKey: "videoGravity")
}
}
var position:AVCaptureDevicePosition = .front {
var position:AVCaptureDevice.Position = .front {
didSet {
DispatchQueue.main.async {
self.layer?.setNeedsLayout()

View File

@ -8,7 +8,7 @@ open class GLLFView: GLKView {
kCIContextUseSoftwareRenderer: NSNumber(value: false),
]
open static var defaultBackgroundColor:UIColor = UIColor.black
open var videoGravity:String = AVLayerVideoGravityResizeAspect
open var videoGravity:String = AVLayerVideoGravity.resizeAspect.rawValue
fileprivate var displayImage:CIImage?
fileprivate weak var currentStream:NetStream? {
didSet {
@ -20,13 +20,13 @@ open class GLLFView: GLKView {
}
public override init(frame: CGRect) {
super.init(frame: frame, context: EAGLContext(api: .openGLES2))
super.init(frame: frame, context: EAGLContext(api: .openGLES2)!)
awakeFromNib()
}
required public init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.context = EAGLContext(api: .openGLES2)
self.context = EAGLContext(api: .openGLES2)!
}
open override func awakeFromNib() {

View File

@ -669,10 +669,10 @@ final class MP4TrakReader {
handle = try FileHandle(forReadingFrom: reader.url)
if let avcC:MP4Box = trak.getBoxes(byName: "avcC").first {
delegate?.didSet(config: reader.readData(ofBox: avcC), withID: id, type: AVMediaTypeVideo)
delegate?.didSet(config: reader.readData(ofBox: avcC), withID: id, type: AVMediaType.video.rawValue)
}
if let esds:MP4ElementaryStreamDescriptorBox = trak.getBoxes(byName: "esds").first as? MP4ElementaryStreamDescriptorBox {
delegate?.didSet(config: Data(esds.audioDecorderSpecificConfig), withID: id, type: AVMediaTypeAudio)
delegate?.didSet(config: Data(esds.audioDecorderSpecificConfig), withID: id, type: AVMediaType.audio.rawValue)
}
timerDriver.interval = MachUtil.nanosToAbs(UInt64(currentTimeToSample * 1000 * 1000))

View File

@ -132,11 +132,11 @@ struct TSTimestamp {
static func encode(_ b:UInt64, _ m:UInt8) -> Data {
var data:Data = Data(count: 5)
data[0] = UInt8(truncatingBitPattern: b >> 29) | 0x01 | m
data[1] = UInt8(truncatingBitPattern: b >> 22)
data[2] = UInt8(truncatingBitPattern: b >> 14) | 0x01
data[3] = UInt8(truncatingBitPattern: b >> 7)
data[4] = UInt8(truncatingBitPattern: b << 1) | 0x01
data[0] = UInt8(truncatingIfNeeded: b >> 29) | 0x01 | m
data[1] = UInt8(truncatingIfNeeded: b >> 22)
data[2] = UInt8(truncatingIfNeeded: b >> 14) | 0x01
data[3] = UInt8(truncatingIfNeeded: b >> 7)
data[4] = UInt8(truncatingIfNeeded: b << 1) | 0x01
return data
}
}
@ -161,10 +161,10 @@ struct TSProgramClockReference {
static func encode(_ b:UInt64, _ e:UInt16) -> Data {
var data:Data = Data(count: 6)
data[0] = UInt8(truncatingBitPattern: b >> 25)
data[1] = UInt8(truncatingBitPattern: b >> 17)
data[2] = UInt8(truncatingBitPattern: b >> 9)
data[3] = UInt8(truncatingBitPattern: b >> 1)
data[0] = UInt8(truncatingIfNeeded: b >> 25)
data[1] = UInt8(truncatingIfNeeded: b >> 17)
data[2] = UInt8(truncatingIfNeeded: b >> 9)
data[3] = UInt8(truncatingIfNeeded: b >> 1)
data[4] = 0xff
if (b & 1 == 1) {
data[4] |= 0x80
@ -176,7 +176,7 @@ struct TSProgramClockReference {
} else {
data[4] &= 0xfe
}
data[5] = UInt8(truncatingBitPattern: e)
data[5] = UInt8(truncatingIfNeeded: e)
return data
}
}
@ -218,14 +218,14 @@ struct TSAdaptationField {
}
mutating func compute() {
length = UInt8(truncatingBitPattern: TSAdaptationField.fixedSectionSize)
length += UInt8(truncatingBitPattern: PCR.count)
length += UInt8(truncatingBitPattern: OPCR.count)
length += UInt8(truncatingBitPattern: transportPrivateData.count)
length = UInt8(truncatingIfNeeded: TSAdaptationField.fixedSectionSize)
length += UInt8(truncatingIfNeeded: PCR.count)
length += UInt8(truncatingIfNeeded: OPCR.count)
length += UInt8(truncatingIfNeeded: transportPrivateData.count)
if let adaptationExtension:TSAdaptationExtensionField = adaptationExtension {
length += adaptationExtension.length + 1
}
length += UInt8(truncatingBitPattern: stuffingBytes.count)
length += UInt8(truncatingIfNeeded: stuffingBytes.count)
length -= 1
}

View File

@ -15,7 +15,7 @@ final public class AVMixer: NSObject {
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA)
]
#if os(iOS) || os(macOS)
static let defaultSessionPreset:String = AVCaptureSessionPresetMedium
static let defaultSessionPreset:String = AVCaptureSession.Preset.medium.rawValue
var fps:Float64 {
get { return videoIO.fps }
@ -38,7 +38,7 @@ final public class AVMixer: NSObject {
return
}
session.beginConfiguration()
session.sessionPreset = sessionPreset
session.sessionPreset = AVCaptureSession.Preset(rawValue: sessionPreset)
session.commitConfiguration()
}
}
@ -48,7 +48,7 @@ final public class AVMixer: NSObject {
get {
if (_session == nil) {
_session = AVCaptureSession()
_session!.sessionPreset = AVMixer.defaultSessionPreset
_session!.sessionPreset = AVCaptureSession.Preset(rawValue: AVMixer.defaultSessionPreset)
}
return _session!
}

View File

@ -15,12 +15,12 @@ public protocol AVMixerRecorderDelegate: class {
open class AVMixerRecorder: NSObject {
open static let defaultOutputSettings:[String:[String:Any]] = [
AVMediaTypeAudio: [
AVMediaType.audio.rawValue: [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 0,
AVNumberOfChannelsKey: 0,
],
AVMediaTypeVideo: [
AVMediaType.video.rawValue: [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoHeightKey: 0,
AVVideoWidthKey: 0,
@ -84,10 +84,10 @@ open class AVMixerRecorder: NSObject {
return
}
delegate.rotateFile(self, withPresentationTimeStamp: withPresentationTime, mediaType: AVMediaTypeVideo)
delegate.rotateFile(self, withPresentationTimeStamp: withPresentationTime, mediaType: AVMediaType.video.rawValue)
guard
let writer:AVAssetWriter = self.writer,
let input:AVAssetWriterInput = delegate.getWriterInput(self, mediaType: AVMediaTypeVideo, sourceFormatHint: CMVideoFormatDescription.create(withPixelBuffer: pixelBuffer)),
let input:AVAssetWriterInput = delegate.getWriterInput(self, mediaType: AVMediaType.video.rawValue, sourceFormatHint: CMVideoFormatDescription.create(withPixelBuffer: pixelBuffer)),
let adaptor:AVAssetWriterInputPixelBufferAdaptor = delegate.getPixelBufferAdaptor(self, withWriterInput: input),
self.isReadyForStartWriting else {
return
@ -153,7 +153,7 @@ open class DefaultAVMixerRecorderDelegate: NSObject {
open var dateFormat:String = "-yyyyMMdd-HHmmss"
fileprivate var rotateTime:CMTime = kCMTimeZero
fileprivate var clockReference:String = AVMediaTypeVideo
fileprivate var clockReference:String = AVMediaType.video.rawValue
#if os(iOS)
open lazy var moviesDirectory:URL = {
@ -203,7 +203,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
var outputSettings:[String:Any] = [:]
if let defaultOutputSettings:[String:Any] = recorder.outputSettings[mediaType] {
switch mediaType {
case AVMediaTypeAudio:
case AVMediaType.audio:
guard
let format:CMAudioFormatDescription = sourceFormatHint,
let inSourceFormat:AudioStreamBasicDescription = format.streamBasicDescription?.pointee else {
@ -219,7 +219,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
outputSettings[key] = value
}
}
case AVMediaTypeVideo:
case AVMediaType.video:
guard let format:CMVideoFormatDescription = sourceFormatHint else {
break
}
@ -238,7 +238,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
}
}
let input:AVAssetWriterInput = AVAssetWriterInput(mediaType: mediaType, outputSettings: outputSettings, sourceFormatHint: sourceFormatHint)
let input:AVAssetWriterInput = AVAssetWriterInput(mediaType: AVMediaType(rawValue: mediaType), outputSettings: outputSettings, sourceFormatHint: sourceFormatHint)
input.expectsMediaDataInRealTime = true
recorder.writerInputs[mediaType] = input
recorder.writer?.add(input)
@ -270,7 +270,7 @@ extension DefaultAVMixerRecorderDelegate: AVMixerRecorderDelegate {
}
let url:URL = moviesDirectory.appendingPathComponent((fileComponent ?? UUID().uuidString) + ".mp4")
logger.info("\(url)")
return try AVAssetWriter(outputURL: url, fileType: AVFileTypeMPEG4)
return try AVAssetWriter(outputURL: url, fileType: AVFileType.mp4)
} catch {
logger.warn("create an AVAssetWriter")
}

View File

@ -86,8 +86,8 @@ final class AudioIOComponent: IOComponent {
extension AudioIOComponent: AVCaptureAudioDataOutputSampleBufferDelegate {
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaTypeAudio)
func captureOutput(captureOutput:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer, from connection:AVCaptureConnection) {
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaType.audio.rawValue)
encoder.encodeSampleBuffer(sampleBuffer)
}
}

View File

@ -45,11 +45,11 @@ final class VideoIOComponent: IOComponent {
}
}
var position:AVCaptureDevicePosition = .back
var position:AVCaptureDevice.Position = .back
var videoSettings:[NSObject:AnyObject] = AVMixer.defaultVideoSettings {
didSet {
output.videoSettings = videoSettings
output.videoSettings = videoSettings as! [String : Any]
}
}
@ -86,7 +86,7 @@ final class VideoIOComponent: IOComponent {
guard continuousAutofocus != oldValue else {
return
}
let focusMode:AVCaptureFocusMode = continuousAutofocus ? .continuousAutoFocus : .autoFocus
let focusMode:AVCaptureDevice.FocusMode = continuousAutofocus ? .continuousAutoFocus : .autoFocus
guard let device:AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
device.isFocusModeSupported(focusMode) else {
logger.warn("focusMode(\(focusMode.rawValue)) is not supported")
@ -146,7 +146,7 @@ final class VideoIOComponent: IOComponent {
guard continuousExposure != oldValue else {
return
}
let exposureMode:AVCaptureExposureMode = continuousExposure ? .continuousAutoExposure : .autoExpose
let exposureMode:AVCaptureDevice.ExposureMode = continuousExposure ? .continuousAutoExposure : .autoExpose
guard let device:AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device,
device.isExposureModeSupported(exposureMode) else {
logger.warn("exposureMode(\(exposureMode.rawValue)) is not supported")
@ -168,7 +168,7 @@ final class VideoIOComponent: IOComponent {
if (_output == nil) {
_output = AVCaptureVideoDataOutput()
_output!.alwaysDiscardsLateVideoFrames = true
_output!.videoSettings = videoSettings
_output!.videoSettings = videoSettings as! [String : Any]
}
return _output!
}
@ -266,7 +266,7 @@ final class VideoIOComponent: IOComponent {
drawable?.position = camera.position
}
func setTorchMode(_ torchMode:AVCaptureTorchMode) {
func setTorchMode(_ torchMode:AVCaptureDevice.TorchMode) {
guard let device:AVCaptureDevice = (input as? AVCaptureDeviceInput)?.device, device.isTorchModeSupported(torchMode) else {
logger.warn("torchMode(\(torchMode)) is not supported")
return
@ -325,7 +325,7 @@ final class VideoIOComponent: IOComponent {
extension VideoIOComponent: AVCaptureVideoDataOutputSampleBufferDelegate {
// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) {
func captureOutput(captureOutput:AVCaptureOutput, didOutput sampleBuffer:CMSampleBuffer, from connection:AVCaptureConnection) {
guard var buffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
@ -345,7 +345,7 @@ extension VideoIOComponent: AVCaptureVideoDataOutputSampleBufferDelegate {
duration: sampleBuffer.duration
)
drawable?.draw(image: image)
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaTypeVideo)
mixer?.recorder.appendSampleBuffer(sampleBuffer, mediaType: AVMediaType.video.rawValue)
}
}

View File

@ -5,7 +5,7 @@ import AVFoundation
protocol NetStreamDrawable: class {
#if os(iOS) || os(macOS)
var orientation:AVCaptureVideoOrientation { get set }
var position:AVCaptureDevicePosition { get set }
var position:AVCaptureDevice.Position { get set }
#endif
func draw(image:CIImage)

View File

@ -182,11 +182,11 @@ open class RTMPConnection: EventDispatcher {
return streams.count
}
/// The statistics of outgoing queue bytes per second.
dynamic open fileprivate(set) var previousQueueBytesOut:[Int64] = []
@objc dynamic open fileprivate(set) var previousQueueBytesOut:[Int64] = []
/// The statistics of incoming bytes per second.
dynamic open fileprivate(set) var currentBytesInPerSecond:Int32 = 0
@objc dynamic open fileprivate(set) var currentBytesInPerSecond:Int32 = 0
/// The statistics of outgoing bytes per second.
dynamic open fileprivate(set) var currentBytesOutPerSecond:Int32 = 0
@objc dynamic open fileprivate(set) var currentBytesOutPerSecond:Int32 = 0
var socket:RTMPSocketCompatible!
var streams:[UInt32: RTMPStream] = [:]
@ -311,7 +311,7 @@ open class RTMPConnection: EventDispatcher {
call("createStream", responder: responder)
}
func on(status:Notification) {
@objc func on(status:Notification) {
let e:Event = Event.from(status)
guard

View File

@ -103,11 +103,11 @@ extension RTMPMuxer: MP4SamplerDelegate {
}
configs[withID] = config
switch type {
case AVMediaTypeVideo:
case AVMediaType.video:
var buffer:Data = Data([FLVFrameType.key.rawValue << 4 | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.seq.rawValue, 0, 0, 0])
buffer.append(config)
delegate?.sampleOutput(video: buffer, withTimestamp: 0, muxer: self)
case AVMediaTypeAudio:
case AVMediaType.audio:
if (withID != 1) {
break
}

View File

@ -216,7 +216,7 @@ open class RTMPSharedObject: EventDispatcher {
)
}
func rtmpStatusHandler(_ notification:Notification) {
@objc func rtmpStatusHandler(_ notification:Notification) {
let e:Event = Event.from(notification)
if let data:ASObject = e.data as? ASObject, let code:String = data["code"] as? String {
switch code {

View File

@ -222,7 +222,7 @@ open class RTMPStream: NetStream {
open var qosDelegate:RTMPStreamQoSDelegate? = nil
open internal(set) var info:RTMPStreamInfo = RTMPStreamInfo()
open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
open fileprivate(set) dynamic var currentFPS:UInt16 = 0
@objc open fileprivate(set) dynamic var currentFPS:UInt16 = 0
open var soundTransform:SoundTransform {
get { return mixer.audioIO.playback.soundTransform }
set { mixer.audioIO.playback.soundTransform = newValue }

View File

@ -6,12 +6,12 @@ public final class DeviceUtil {
private init() {
}
static public func device(withPosition:AVCaptureDevicePosition) -> AVCaptureDevice? {
static public func device(withPosition:AVCaptureDevice.Position) -> AVCaptureDevice? {
for device in AVCaptureDevice.devices() {
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
continue
}
if (device.hasMediaType(AVMediaTypeVideo) && device.position == withPosition) {
if (device.hasMediaType(AVMediaType.video) && device.position == withPosition) {
return device
}
}
@ -23,7 +23,7 @@ public final class DeviceUtil {
guard let device:AVCaptureDevice = device as? AVCaptureDevice else {
continue
}
if (device.hasMediaType(mediaType) && device.localizedName == withLocalizedName) {
if (device.hasMediaType(AVMediaType(rawValue: mediaType)) && device.localizedName == withLocalizedName) {
return device
}
}
@ -35,7 +35,7 @@ public final class DeviceUtil {
var frameRates:[Float64] = []
for object:Any in device.activeFormat.videoSupportedFrameRateRanges {
guard let range:AVFrameRateRange = object as? AVFrameRateRange else {
guard let range:AVCaptureDevice.Format.FrameRateRange = object as? AVCaptureDevice.Format.FrameRateRange else {
continue
}
if (range.minFrameRate == range.maxFrameRate) {

View File

@ -4,9 +4,9 @@ import AVFoundation
final class VideoGravityUtil {
@inline(__always) static func calclute(_ videoGravity:String, inRect:inout CGRect, fromRect:inout CGRect) {
switch videoGravity {
case AVLayerVideoGravityResizeAspect:
case AVLayerVideoGravity.resizeAspect:
resizeAspect(&inRect, fromRect: &fromRect)
case AVLayerVideoGravityResizeAspectFill:
case AVLayerVideoGravity.resizeAspectFill:
resizeAspectFill(&inRect, fromRect: &fromRect)
default:
break