* refactoring

This commit is contained in:
Adam Nemecek 2018-03-11 21:29:48 -07:00
parent dadaf847d4
commit 1fb6927350
12 changed files with 19 additions and 25 deletions

View File

@ -8,6 +8,12 @@ public protocol ScreenCaptureOutputPixelBufferDelegate: class {
func output(pixelBuffer: CVPixelBuffer, withPresentationTime: CMTime)
}
extension CGRect {
init(size: CGSize) {
self.init(origin: .zero, size: size)
}
}
// MARK: -
open class ScreenCaptureSession: NSObject {
static let defaultFrameInterval: Int = 2
@ -33,13 +39,13 @@ open class ScreenCaptureSession: NSObject {
public var afterScreenUpdates: Bool = false
private var context: CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)])
private let semaphore: DispatchSemaphore = DispatchSemaphore(value: 1)
private let lockQueue: DispatchQueue = DispatchQueue(
label: "com.haishinkit.HaishinKit.ScreenCaptureSession.lock", qos: DispatchQoS.userInteractive, attributes: []
private let lockQueue = DispatchQueue(
label: "com.haishinkit.HaishinKit.ScreenCaptureSession.lock", qos: .userInteractive, attributes: []
)
private var colorSpace: CGColorSpace!
private var displayLink: CADisplayLink!
private var size: CGSize = CGSize() {
private var size: CGSize = .zero {
didSet {
guard size != oldValue else {
return
@ -143,7 +149,7 @@ extension ScreenCaptureSession: Running {
self.running = true
self.pixelBufferPool = nil
self.colorSpace = CGColorSpaceCreateDeviceRGB()
self.displayLink = CADisplayLink(target: self, selector: #selector(ScreenCaptureSession.onScreen(_: )))
self.displayLink = CADisplayLink(target: self, selector: #selector(onScreen))
self.displayLink.frameInterval = self.frameInterval
self.displayLink.add(to: .main, forMode: .commonModes)
}

View File

@ -97,7 +97,7 @@ final class H264Decoder {
guard let session: VTDecompressionSession = session else {
return kVTInvalidSessionErr
}
var flagsOut: VTDecodeInfoFlags = VTDecodeInfoFlags()
var flagsOut: VTDecodeInfoFlags = []
let decodeFlags: VTDecodeFrameFlags = [._EnableAsynchronousDecompression,
._EnableTemporalProcessing]
return VTDecompressionSessionDecodeFrame(session, sampleBuffer, decodeFlags, nil, &flagsOut)
@ -141,9 +141,9 @@ final class H264Decoder {
delegate?.sampleOutput(video: buffer)
} else {
buffers.append(buffer)
buffers.sort(by: { (lhs: CMSampleBuffer, rhs: CMSampleBuffer) -> Bool in
return lhs.presentationTimeStamp < rhs.presentationTimeStamp
})
buffers.sort {
$0.presentationTimeStamp < $1.presentationTimeStamp
}
if minimumGroupOfPictures <= buffers.count {
delegate?.sampleOutput(video: buffers.removeFirst())
}

View File

@ -247,7 +247,7 @@ final class H264Encoder: NSObject {
guard let session: VTCompressionSession = session else {
return
}
var flags: VTEncodeInfoFlags = VTEncodeInfoFlags()
var flags: VTEncodeInfoFlags = []
VTCompressionSessionEncodeFrame(
session,
muted ? lastImageBuffer ?? imageBuffer : imageBuffer,
@ -304,13 +304,13 @@ extension H264Encoder: Running {
#if os(iOS)
NotificationCenter.default.addObserver(
self,
selector: #selector(self.didAudioSessionInterruption(_: )),
selector: #selector(self.didAudioSessionInterruption),
name: .AVAudioSessionInterruption,
object: nil
)
NotificationCenter.default.addObserver(
self,
selector: #selector(self.applicationWillEnterForeground(_: )),
selector: #selector(self.applicationWillEnterForeground),
name: .UIApplicationWillEnterForeground,
object: nil
)
@ -324,8 +324,7 @@ extension H264Encoder: Running {
self.lastImageBuffer = nil
self.formatDescription = nil
#if os(iOS)
NotificationCenter.default.removeObserver(self, name: .AVAudioSessionInterruption, object: nil)
NotificationCenter.default.removeObserver(self, name: .UIApplicationWillEnterForeground, object: nil)
NotificationCenter.default.removeObserver(self)
#endif
self.running = false
}

View File

@ -1,4 +1,3 @@
import Foundation
protocol DataConvertible {
var data: Data { get set }

View File

@ -1,4 +1,3 @@
import Foundation
public enum FLVAACPacketType: UInt8 {
case seq = 0

View File

@ -1,4 +1,3 @@
import Foundation
public enum FLVAVCPacketType: UInt8 {
case seq = 0

View File

@ -1,4 +1,3 @@
import Foundation
public enum FLVFrameType: UInt8 {
case key = 1

View File

@ -1,5 +1,3 @@
import Foundation
public enum FLVSoundRate: UInt8 {
case kHz5_5 = 0
case kHz11 = 1

View File

@ -1,5 +1,3 @@
import Foundation
public enum FLVSoundSize: UInt8 {
case snd8bit = 0
case snd16bit = 1

View File

@ -1,5 +1,3 @@
import Foundation
public enum FLVSoundType: UInt8 {
case mono = 0
case stereo = 1

View File

@ -1,4 +1,3 @@
import Foundation
public enum FLVTagType: UInt8 {
case audio = 8

View File

@ -307,7 +307,7 @@ final class VideoIOComponent: IOComponent {
if !effects.isEmpty {
#if os(macOS)
// green edge hack for OSX
buffer = CVPixelBuffer.create(image)!
buffer = CVPixelBuffer(image: image)!
#endif
context?.render(image, to: buffer)
}