diff --git a/Examples/iOS/Screencast/RTMPBroadcaster.swift b/Examples/iOS/Screencast/RTMPBroadcaster.swift index 9c507a47..9f0a5de9 100644 --- a/Examples/iOS/Screencast/RTMPBroadcaster.swift +++ b/Examples/iOS/Screencast/RTMPBroadcaster.swift @@ -9,7 +9,7 @@ public class RTMPBroadcaster : RTMPConnection { return RTMPStream(connection: self) }() - fileprivate lazy var spliter:SoundSpliter = { + private lazy var spliter:SoundSpliter = { var spliter:SoundSpliter = SoundSpliter() spliter.delegate = self return spliter diff --git a/Platforms/iOS/GLLFView.swift b/Platforms/iOS/GLLFView.swift index 8dad8a8a..88fdad4a 100644 --- a/Platforms/iOS/GLLFView.swift +++ b/Platforms/iOS/GLLFView.swift @@ -14,8 +14,8 @@ open class GLLFView: GLKView { var position:AVCaptureDevice.Position = .back var orientation:AVCaptureVideoOrientation = .portrait - fileprivate var displayImage:CIImage? - fileprivate weak var currentStream:NetStream? { + private var displayImage:CIImage? + private weak var currentStream:NetStream? { didSet { guard let oldValue:NetStream = oldValue else { return diff --git a/Platforms/iOS/ScreenCaptureSession.swift b/Platforms/iOS/ScreenCaptureSession.swift index 9743bebd..ba7730ae 100644 --- a/Platforms/iOS/ScreenCaptureSession.swift +++ b/Platforms/iOS/ScreenCaptureSession.swift @@ -30,18 +30,18 @@ open class ScreenCaptureSession: NSObject { public weak var delegate:ScreenCaptureOutputPixelBufferDelegate? internal(set) var running:Bool = false - fileprivate var shared:UIApplication? - fileprivate var viewToCapture:UIView? + private var shared:UIApplication? + private var viewToCapture:UIView? public var afterScreenUpdates: Bool = false - fileprivate var context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)]) - fileprivate let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1) - fileprivate let lockQueue:DispatchQueue = DispatchQueue( + private var context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)]) + private let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1) + private let lockQueue:DispatchQueue = DispatchQueue( label: "com.haishinkit.HaishinKit.ScreenCaptureSession.lock", qos: DispatchQoS.userInteractive, attributes: [] ) - fileprivate var colorSpace:CGColorSpace! - fileprivate var displayLink:CADisplayLink! + private var colorSpace:CGColorSpace! + private var displayLink:CADisplayLink! - fileprivate var size:CGSize = CGSize() { + private var size:CGSize = CGSize() { didSet { guard size != oldValue else { return @@ -50,12 +50,12 @@ open class ScreenCaptureSession: NSObject { pixelBufferPool = nil } } - fileprivate var scale:CGFloat { + private var scale:CGFloat { return enabledScale ? UIScreen.main.scale : 1.0 } - fileprivate var _pixelBufferPool:CVPixelBufferPool? - fileprivate var pixelBufferPool:CVPixelBufferPool! { + private var _pixelBufferPool:CVPixelBufferPool? + private var pixelBufferPool:CVPixelBufferPool! { get { if (_pixelBufferPool == nil) { var pixelBufferPool:CVPixelBufferPool? diff --git a/Platforms/macOS/AudioUtil.swift b/Platforms/macOS/AudioUtil.swift index 82d33088..25284e2a 100644 --- a/Platforms/macOS/AudioUtil.swift +++ b/Platforms/macOS/AudioUtil.swift @@ -4,7 +4,7 @@ import CoreAudio final class AudioUtil { - fileprivate static var defaultDeviceID:AudioObjectID { + private static var defaultDeviceID:AudioObjectID { var deviceID:AudioObjectID = AudioObjectID(0) var size:UInt32 = UInt32(MemoryLayout.size) var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress() @@ -15,7 +15,7 @@ final class AudioUtil { return deviceID } - fileprivate init() { + private init() { } static func setInputGain(_ volume:Float32) -> OSStatus { diff --git a/Platforms/macOS/GLLFView.swift b/Platforms/macOS/GLLFView.swift index 608f0a8b..d839d0d2 100644 --- a/Platforms/macOS/GLLFView.swift +++ b/Platforms/macOS/GLLFView.swift @@ -23,10 +23,10 @@ open class GLLFView: NSOpenGLView { public var videoGravity:AVLayerVideoGravity = .resizeAspect var orientation:AVCaptureVideoOrientation = .portrait var position:AVCaptureDevice.Position = .front - fileprivate var displayImage:CIImage! - fileprivate var originalFrame:CGRect = CGRect.zero - fileprivate var scale:CGRect = CGRect.zero - fileprivate weak var currentStream:NetStream? + private var displayImage:CIImage! + private var originalFrame:CGRect = CGRect.zero + private var scale:CGRect = CGRect.zero + private weak var currentStream:NetStream? open override func prepareOpenGL() { var param:GLint = 1 diff --git a/Platforms/tvOS/GLLFView.swift b/Platforms/tvOS/GLLFView.swift index 56d49bb2..02899449 100644 --- a/Platforms/tvOS/GLLFView.swift +++ b/Platforms/tvOS/GLLFView.swift @@ -9,8 +9,8 @@ open class GLLFView: GLKView { ] open static var defaultBackgroundColor:UIColor = .black open var videoGravity:AVLayerVideoGravity = .resizeAspect - fileprivate var displayImage:CIImage? - fileprivate weak var currentStream:NetStream? { + private var displayImage:CIImage? + private weak var currentStream:NetStream? { didSet { guard let oldValue:NetStream = oldValue else { return diff --git a/Sources/Codec/AACEncoder.swift b/Sources/Codec/AACEncoder.swift index 245936b9..0827c8e9 100644 --- a/Sources/Codec/AACEncoder.swift +++ b/Sources/Codec/AACEncoder.swift @@ -73,10 +73,10 @@ final class AACEncoder: NSObject { var lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AACEncoder.lock") weak var delegate:AudioEncoderDelegate? internal(set) var running:Bool = false - fileprivate var maximumBuffers:Int = AACEncoder.defaultMaximumBuffers - fileprivate var bufferListSize:Int = AACEncoder.defaultBufferListSize - fileprivate var currentBufferList:UnsafeMutableAudioBufferListPointer? = nil - fileprivate var inSourceFormat:AudioStreamBasicDescription? { + private var maximumBuffers:Int = AACEncoder.defaultMaximumBuffers + private var bufferListSize:Int = AACEncoder.defaultBufferListSize + private var currentBufferList:UnsafeMutableAudioBufferListPointer? = nil + private var inSourceFormat:AudioStreamBasicDescription? { didSet { logger.info("\(String(describing: self.inSourceFormat))") guard let inSourceFormat:AudioStreamBasicDescription = self.inSourceFormat else { @@ -87,8 +87,8 @@ final class AACEncoder: NSObject { bufferListSize = nonInterleaved ? AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers) : AACEncoder.defaultBufferListSize } } - fileprivate var _inDestinationFormat:AudioStreamBasicDescription? - fileprivate var inDestinationFormat:AudioStreamBasicDescription { + private var _inDestinationFormat:AudioStreamBasicDescription? + private var inDestinationFormat:AudioStreamBasicDescription { get { if (_inDestinationFormat == nil) { _inDestinationFormat = AudioStreamBasicDescription() @@ -113,7 +113,7 @@ final class AACEncoder: NSObject { } } - fileprivate var inputDataProc:AudioConverterComplexInputDataProc = {( + private var inputDataProc:AudioConverterComplexInputDataProc = {( converter:AudioConverterRef, ioNumberDataPackets:UnsafeMutablePointer, ioData:UnsafeMutablePointer, @@ -126,8 +126,8 @@ final class AACEncoder: NSObject { ) } - fileprivate var _converter:AudioConverterRef? - fileprivate var converter:AudioConverterRef { + private var _converter:AudioConverterRef? + private var converter:AudioConverterRef { var status:OSStatus = noErr if (_converter == nil) { var converter:AudioConverterRef? = nil diff --git a/Sources/Codec/H264Encoder.swift b/Sources/Codec/H264Encoder.swift index c3b6c8c0..4b2c5be3 100644 --- a/Sources/Codec/H264Encoder.swift +++ b/Sources/Codec/H264Encoder.swift @@ -167,18 +167,18 @@ final class H264Encoder: NSObject { } weak var delegate:VideoEncoderDelegate? internal(set) var running:Bool = false - fileprivate(set) var status:OSStatus = noErr - fileprivate var attributes:[NSString: AnyObject] { + private(set) var status:OSStatus = noErr + private var attributes:[NSString: AnyObject] { var attributes:[NSString: AnyObject] = H264Encoder.defaultAttributes attributes[kCVPixelBufferWidthKey] = NSNumber(value: width) attributes[kCVPixelBufferHeightKey] = NSNumber(value: height) return attributes } - fileprivate var invalidateSession:Bool = true - fileprivate var lastImageBuffer:CVImageBuffer? = nil; + private var invalidateSession:Bool = true + private var lastImageBuffer:CVImageBuffer? = nil; // @see: https://developer.apple.com/library/mac/releasenotes/General/APIDiffsMacOSX10_8/VideoToolbox.html - fileprivate var properties:[NSString: NSObject] { + private var properties:[NSString: NSObject] { let isBaseline:Bool = profileLevel.contains("Baseline") var properties:[NSString: NSObject] = [ kVTCompressionPropertyKey_RealTime: kCFBooleanTrue, @@ -209,7 +209,7 @@ final class H264Encoder: NSObject { return properties } - fileprivate var callback:VTCompressionOutputCallback = {( + private var callback:VTCompressionOutputCallback = {( outputCallbackRefCon:UnsafeMutableRawPointer?, sourceFrameRefCon:UnsafeMutableRawPointer?, status:OSStatus, @@ -223,8 +223,8 @@ final class H264Encoder: NSObject { encoder.delegate?.sampleOutput(video: sampleBuffer) } - fileprivate var _session:VTCompressionSession? = nil - fileprivate var session:VTCompressionSession? { + private var _session:VTCompressionSession? = nil + private var session:VTCompressionSession? { get { if (_session == nil) { guard VTCompressionSessionCreate( diff --git a/Sources/FLV/FLVReader.swift b/Sources/FLV/FLVReader.swift index 8e89290e..d06ef246 100644 --- a/Sources/FLV/FLVReader.swift +++ b/Sources/FLV/FLVReader.swift @@ -7,8 +7,8 @@ final class FLVReader { private(set) var url:URL private(set) var hasAudio:Bool = false private(set) var hasVideo:Bool = false - fileprivate var currentOffSet:UInt64 = 0 - fileprivate var fileHandle:FileHandle? = nil + private var currentOffSet:UInt64 = 0 + private var fileHandle:FileHandle? = nil init(url:URL) { do { diff --git a/Sources/HTTP/HTTPService.swift b/Sources/HTTP/HTTPService.swift index 28c3df7e..2347e7f1 100644 --- a/Sources/HTTP/HTTPService.swift +++ b/Sources/HTTP/HTTPService.swift @@ -245,7 +245,7 @@ open class HTTPService: NetService { } open class HLSService: HTTPService { - fileprivate(set) var streams:[HTTPStream] = [] + private(set) var streams:[HTTPStream] = [] open func addHTTPStream(_ stream:HTTPStream) { for i in 0.. Bool { + private func hasNext() -> Bool { return cursor + 1 < offset.count } - fileprivate func next() { + private func next() { defer { cursor += 1 } diff --git a/Sources/ISO/MP4Sampler.swift b/Sources/ISO/MP4Sampler.swift index 9beb30e6..08e7f5fb 100644 --- a/Sources/ISO/MP4Sampler.swift +++ b/Sources/ISO/MP4Sampler.swift @@ -13,12 +13,12 @@ public class MP4Sampler { weak var delegate:MP4SamplerDelegate? - fileprivate var files:[URL] = [] - fileprivate var handlers:[URL:Handler?] = [:] - fileprivate let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.lock") - fileprivate let loopQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.loop") - fileprivate let operations:OperationQueue = OperationQueue() - fileprivate(set) var running:Bool = false + private var files:[URL] = [] + private var handlers:[URL:Handler?] = [:] + private let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.lock") + private let loopQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.loop") + private let operations:OperationQueue = OperationQueue() + private(set) var running:Bool = false func appendFile(_ file:URL, completionHandler: Handler? = nil) { lockQueue.async { @@ -27,7 +27,7 @@ public class MP4Sampler { } } - fileprivate func execute(url:URL) { + private func execute(url:URL) { let reader:MP4Reader = MP4Reader(url: url) do { @@ -51,7 +51,7 @@ public class MP4Sampler { reader.close() } - fileprivate func run() { + private func run() { if (files.isEmpty) { return } diff --git a/Sources/ISO/TSReader.swift b/Sources/ISO/TSReader.swift index c0f7d142..aed6ca16 100644 --- a/Sources/ISO/TSReader.swift +++ b/Sources/ISO/TSReader.swift @@ -27,14 +27,14 @@ class TSReader { } } } - fileprivate(set) var numberOfPackets:Int = 0 + private(set) var numberOfPackets:Int = 0 - fileprivate var eof:UInt64 = 0 - fileprivate var cursor:Int = 0 - fileprivate var fileHandle:FileHandle? - fileprivate var dictionaryForPrograms:[UInt16:UInt16] = [:] - fileprivate var dictionaryForESSpecData:[UInt16:ElementaryStreamSpecificData] = [:] - fileprivate var packetizedElementaryStreams:[UInt16:PacketizedElementaryStream] = [:] + private var eof:UInt64 = 0 + private var cursor:Int = 0 + private var fileHandle:FileHandle? + private var dictionaryForPrograms:[UInt16:UInt16] = [:] + private var dictionaryForESSpecData:[UInt16:ElementaryStreamSpecificData] = [:] + private var packetizedElementaryStreams:[UInt16:PacketizedElementaryStream] = [:] init(url:URL) throws { fileHandle = try FileHandle(forReadingFrom: url) diff --git a/Sources/ISO/TSWriter.swift b/Sources/ISO/TSWriter.swift index 022ab9c9..a747d470 100644 --- a/Sources/ISO/TSWriter.swift +++ b/Sources/ISO/TSWriter.swift @@ -27,24 +27,24 @@ class TSWriter { var segmentMaxCount:Int = TSWriter.defaultSegmentMaxCount var segmentDuration:Double = TSWriter.defaultSegmentDuration - fileprivate(set) var PAT:ProgramAssociationSpecific = { + private(set) var PAT:ProgramAssociationSpecific = { let PAT:ProgramAssociationSpecific = ProgramAssociationSpecific() PAT.programs = [1: TSWriter.defaultPMTPID] return PAT }() - fileprivate(set) var PMT:ProgramMapSpecific = ProgramMapSpecific() - fileprivate(set) var files:[M3UMediaInfo] = [] - fileprivate(set) var running:Bool = false - fileprivate var PCRPID:UInt16 = TSWriter.defaultVideoPID - fileprivate var sequence:Int = 0 - fileprivate var timestamps:[UInt16:CMTime] = [:] - fileprivate var audioConfig:AudioSpecificConfig? - fileprivate var videoConfig:AVCConfigurationRecord? - fileprivate var PCRTimestamp:CMTime = kCMTimeZero - fileprivate var currentFileURL:URL? - fileprivate var rotatedTimestamp:CMTime = kCMTimeZero - fileprivate var currentFileHandle:FileHandle? - fileprivate var continuityCounters:[UInt16:UInt8] = [:] + private(set) var PMT:ProgramMapSpecific = ProgramMapSpecific() + private(set) var files:[M3UMediaInfo] = [] + private(set) var running:Bool = false + private var PCRPID:UInt16 = TSWriter.defaultVideoPID + private var sequence:Int = 0 + private var timestamps:[UInt16:CMTime] = [:] + private var audioConfig:AudioSpecificConfig? + private var videoConfig:AVCConfigurationRecord? + private var PCRTimestamp:CMTime = kCMTimeZero + private var currentFileURL:URL? + private var rotatedTimestamp:CMTime = kCMTimeZero + private var currentFileHandle:FileHandle? + private var continuityCounters:[UInt16:UInt8] = [:] func getFilePath(_ fileName:String) -> String? { for info in files { diff --git a/Sources/ISO/TransportStream.swift b/Sources/ISO/TransportStream.swift index 4a918443..deac5949 100644 --- a/Sources/ISO/TransportStream.swift +++ b/Sources/ISO/TransportStream.swift @@ -21,7 +21,7 @@ struct TSPacket { var adaptationField:TSAdaptationField? var payload:Data = Data() - fileprivate var remain:Int { + private var remain:Int { var adaptationFieldSize:Int = 0 if let adaptationField:TSAdaptationField = adaptationField , adaptationFieldFlag { adaptationField.compute() diff --git a/Sources/Media/AVMixer.swift b/Sources/Media/AVMixer.swift index 8aeadf6f..edb7594c 100644 --- a/Sources/Media/AVMixer.swift +++ b/Sources/Media/AVMixer.swift @@ -43,7 +43,7 @@ final public class AVMixer: NSObject { } } - fileprivate var _session:AVCaptureSession? + private var _session:AVCaptureSession? public var session:AVCaptureSession { get { if (_session == nil) { diff --git a/Sources/Media/AVMixerRecorder.swift b/Sources/Media/AVMixerRecorder.swift index ae65f8e9..dd2bd957 100644 --- a/Sources/Media/AVMixerRecorder.swift +++ b/Sources/Media/AVMixerRecorder.swift @@ -34,7 +34,7 @@ open class AVMixerRecorder: NSObject { open var outputSettings:[AVMediaType:[String:Any]] = AVMixerRecorder.defaultOutputSettings open var pixelBufferAdaptor:AVAssetWriterInputPixelBufferAdaptor? open let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AVMixerRecorder.lock") - fileprivate(set) var running:Bool = false + private(set) var running:Bool = false fileprivate(set) var sourceTime:CMTime = kCMTimeZero var isReadyForStartWriting:Bool { @@ -152,8 +152,8 @@ open class DefaultAVMixerRecorderDelegate: NSObject { open var duration:Int64 = 0 open var dateFormat:String = "-yyyyMMdd-HHmmss" - fileprivate var rotateTime:CMTime = kCMTimeZero - fileprivate var clockReference:AVMediaType = .video + private var rotateTime:CMTime = kCMTimeZero + private var clockReference:AVMediaType = .video #if os(iOS) open lazy var moviesDirectory:URL = { diff --git a/Sources/Media/AudioStreamPlayback.swift b/Sources/Media/AudioStreamPlayback.swift index c6abf2d2..1bc01148 100644 --- a/Sources/Media/AudioStreamPlayback.swift +++ b/Sources/Media/AudioStreamPlayback.swift @@ -22,7 +22,7 @@ class AudioStreamPlayback { } } - fileprivate(set) var running:Bool = false + private(set) var running:Bool = false var formatDescription:AudioStreamBasicDescription? = nil var fileTypeHint:AudioFileTypeID? = nil { didSet { @@ -41,8 +41,8 @@ class AudioStreamPlayback { } } let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioStreamPlayback.lock") - fileprivate var bufferSize:UInt32 = AudioStreamPlayback.defaultBufferSize - fileprivate var queue:AudioQueueRef? = nil { + private var bufferSize:UInt32 = AudioStreamPlayback.defaultBufferSize + private var queue:AudioQueueRef? = nil { didSet { guard let oldValue:AudioQueueRef = oldValue else { return @@ -51,13 +51,13 @@ class AudioStreamPlayback { AudioQueueDispose(oldValue, true) } } - fileprivate var inuse:[Bool] = [] - fileprivate var buffers:[AudioQueueBufferRef] = [] - fileprivate var current:Int = 0 - fileprivate var started:Bool = false - fileprivate var filledBytes:UInt32 = 0 - fileprivate var packetDescriptions:[AudioStreamPacketDescription] = [] - fileprivate var fileStreamID:AudioFileStreamID? = nil { + private var inuse:[Bool] = [] + private var buffers:[AudioQueueBufferRef] = [] + private var current:Int = 0 + private var started:Bool = false + private var filledBytes:UInt32 = 0 + private var packetDescriptions:[AudioStreamPacketDescription] = [] + private var fileStreamID:AudioFileStreamID? = nil { didSet { guard let oldValue:AudioFileStreamID = oldValue else { return @@ -65,11 +65,11 @@ class AudioStreamPlayback { AudioFileStreamClose(oldValue) } } - fileprivate var isPacketDescriptionsFull:Bool { + private var isPacketDescriptionsFull:Bool { return packetDescriptions.count == maxPacketDescriptions } - fileprivate var outputCallback:AudioQueueOutputCallback = {( + private var outputCallback:AudioQueueOutputCallback = {( inUserData: UnsafeMutableRawPointer?, inAQ: AudioQueueRef, inBuffer:AudioQueueBufferRef) -> Void in @@ -77,7 +77,7 @@ class AudioStreamPlayback { playback.onOutputForQueue(inAQ, inBuffer) } - fileprivate var packetsProc:AudioFileStream_PacketsProc = {( + private var packetsProc:AudioFileStream_PacketsProc = {( inClientData:UnsafeMutableRawPointer, inNumberBytes:UInt32, inNumberPackets:UInt32, @@ -88,7 +88,7 @@ class AudioStreamPlayback { playback.onAudioPacketsForFileStream(inNumberBytes, inNumberPackets, inInputData, inPacketDescriptions) } - fileprivate var propertyListenerProc:AudioFileStream_PropertyListenerProc = {( + private var propertyListenerProc:AudioFileStream_PropertyListenerProc = {( inClientData:UnsafeMutableRawPointer, inAudioFileStream:AudioFileStreamID, inPropertyID:AudioFileStreamPropertyID, diff --git a/Sources/Media/IOComponent.swift b/Sources/Media/IOComponent.swift index 9dedff28..2e46cc56 100644 --- a/Sources/Media/IOComponent.swift +++ b/Sources/Media/IOComponent.swift @@ -2,7 +2,7 @@ import CoreMedia import Foundation class IOComponent: NSObject { - fileprivate(set) weak var mixer:AVMixer? + private(set) weak var mixer:AVMixer? init(mixer: AVMixer) { self.mixer = mixer diff --git a/Sources/Media/VideoIOComponent.swift b/Sources/Media/VideoIOComponent.swift index 0302cb99..ae7b2c76 100644 --- a/Sources/Media/VideoIOComponent.swift +++ b/Sources/Media/VideoIOComponent.swift @@ -160,7 +160,7 @@ final class VideoIOComponent: IOComponent { } } - fileprivate var _output:AVCaptureVideoDataOutput? = nil + private var _output:AVCaptureVideoDataOutput? = nil var output:AVCaptureVideoDataOutput! { get { if (_output == nil) { diff --git a/Sources/Net/NetService.swift b/Sources/Net/NetService.swift index cfeacfc9..cc9441d7 100644 --- a/Sources/Net/NetService.swift +++ b/Sources/Net/NetService.swift @@ -9,14 +9,14 @@ open class NetService: NSObject { let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetService.lock") var networkQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetService.network") - fileprivate(set) var domain:String - fileprivate(set) var name:String - fileprivate(set) var port:Int32 - fileprivate(set) var type:String - fileprivate(set) var running:Bool = false - fileprivate(set) var clients:[NetClient] = [] - fileprivate(set) var service:Foundation.NetService! - fileprivate var runloop:RunLoop! + private(set) var domain:String + private(set) var name:String + private(set) var port:Int32 + private(set) var type:String + private(set) var running:Bool = false + private(set) var clients:[NetClient] = [] + private(set) var service:Foundation.NetService! + private var runloop:RunLoop! public init(domain:String, type:String, name:String, port:Int32) { self.domain = domain @@ -53,7 +53,7 @@ open class NetService: NSObject { runloop = nil } - fileprivate func initService() { + private func initService() { runloop = RunLoop.current service = Foundation.NetService(domain: domain, type: type, name: name, port: port) service.delegate = self diff --git a/Sources/Net/NetSocket.swift b/Sources/Net/NetSocket.swift index db2ef69e..ce2958bf 100644 --- a/Sources/Net/NetSocket.swift +++ b/Sources/Net/NetSocket.swift @@ -20,7 +20,7 @@ public class NetSocket: NSObject { private var buffer:UnsafeMutablePointer? = nil private var runloop:RunLoop? private let outputQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetSocket.output") - fileprivate var timeoutHandler:(() -> Void)? + private var timeoutHandler:(() -> Void)? @discardableResult final public func doOutput(data:Data, locked:UnsafeMutablePointer? = nil) -> Int { @@ -152,7 +152,7 @@ public class NetSocket: NSObject { func didTimeout() { } - fileprivate func doInput() { + private func doInput() { guard let inputStream:InputStream = inputStream, let buffer:UnsafeMutablePointer = buffer else { return } diff --git a/Sources/RTMP/AMF0Serializer.swift b/Sources/RTMP/AMF0Serializer.swift index a5d2f4ed..60df6eef 100644 --- a/Sources/RTMP/AMF0Serializer.swift +++ b/Sources/RTMP/AMF0Serializer.swift @@ -1,7 +1,7 @@ import Foundation class AMFSerializerUtil { - fileprivate static var classes:[String: AnyClass] = [:] + private static var classes:[String: AnyClass] = [:] static func getClassByAlias(_ name:String) -> AnyClass? { objc_sync_enter(classes) @@ -372,7 +372,7 @@ extension AMF0Serializer: AMFSerializer { } @discardableResult - fileprivate func serializeUTF8(_ value:String, _ isLong: Bool) -> Self { + private func serializeUTF8(_ value:String, _ isLong: Bool) -> Self { let utf8:Data = Data(value.utf8) if (isLong) { writeUInt32(UInt32(utf8.count)) @@ -382,7 +382,7 @@ extension AMF0Serializer: AMFSerializer { return writeBytes(utf8) } - fileprivate func deserializeUTF8(_ isLong:Bool) throws -> String { + private func deserializeUTF8(_ isLong:Bool) throws -> String { let length:Int = isLong ? Int(try readUInt32()) : Int(try readUInt16()) return try readUTF8Bytes(length) } diff --git a/Sources/RTMP/AMF3Serializer.swift b/Sources/RTMP/AMF3Serializer.swift index 9e530b94..090c2c50 100644 --- a/Sources/RTMP/AMF3Serializer.swift +++ b/Sources/RTMP/AMF3Serializer.swift @@ -500,7 +500,7 @@ extension AMF3Serializer: AMFSerializer { - seealso: 1.3.1 Variable Length Unsigned 29-bit Integer Encoding */ @discardableResult - fileprivate func serializeU29(_ value:Int) -> Self { + private func serializeU29(_ value:Int) -> Self { if (value < Int(Int32.min) || Int(Int32.max) < value) { return serialize(Double(value)) } @@ -523,7 +523,7 @@ extension AMF3Serializer: AMFSerializer { } } - fileprivate func deserializeU29() throws -> Int { + private func deserializeU29() throws -> Int { var count:Int = 1 var result:Int = 0 var byte:UInt8 = try readUInt8() @@ -550,7 +550,7 @@ extension AMF3Serializer: AMFSerializer { - seealso: 1.3.2 Strings and UTF-8 */ @discardableResult - fileprivate func serializeUTF8(_ value:String) -> Self { + private func serializeUTF8(_ value:String) -> Self { if (value.isEmpty) { return serializeU29(0x01) } @@ -562,7 +562,7 @@ extension AMF3Serializer: AMFSerializer { return serializeU29(utf8.count << 1 | 0x01).writeBytes(utf8) } - fileprivate func deserializeUTF8() throws -> String { + private func deserializeUTF8() throws -> String { let ref:Int = try deserializeU29() if (ref & 0x01) == 0 { return try reference.getString(ref >> 1) diff --git a/Sources/RTMP/ASClass.swift b/Sources/RTMP/ASClass.swift index 437b69ea..533477d8 100644 --- a/Sources/RTMP/ASClass.swift +++ b/Sources/RTMP/ASClass.swift @@ -14,8 +14,8 @@ public final class ASUndefined: NSObject { // MARK: - public struct ASArray { - fileprivate(set) var data:[Any?] - fileprivate(set) var dict:[String: Any?] = [:] + private(set) var data:[Any?] + private(set) var dict:[String: Any?] = [:] public var length:Int { return data.count @@ -98,7 +98,7 @@ public final class ASXMLDocument: NSObject { return data } - fileprivate var data:String + private var data:String public init(data:String) { self.data = data @@ -116,7 +116,7 @@ public final class ASXML: NSObject { return data } - fileprivate var data:String + private var data:String public init(data:String) { self.data = data diff --git a/Sources/RTMP/RTMPChunk.swift b/Sources/RTMP/RTMPChunk.swift index 7d266b5e..9e1f66a6 100644 --- a/Sources/RTMP/RTMPChunk.swift +++ b/Sources/RTMP/RTMPChunk.swift @@ -180,9 +180,9 @@ final class RTMPChunk { } } - fileprivate(set) var message:RTMPMessage? - fileprivate(set) var fragmented:Bool = false - fileprivate var _data:Data = Data() + private(set) var message:RTMPMessage? + private(set) var fragmented:Bool = false + private var _data:Data = Data() init(type:RTMPChunkType, streamId:UInt16, message:RTMPMessage) { self.type = type diff --git a/Sources/RTMP/RTMPConnection.swift b/Sources/RTMP/RTMPConnection.swift index d011ab9a..cfbca992 100644 --- a/Sources/RTMP/RTMPConnection.swift +++ b/Sources/RTMP/RTMPConnection.swift @@ -125,7 +125,7 @@ open class RTMPConnection: EventDispatcher { case clientSeek = 1 } - fileprivate static func createSanJoseAuthCommand(_ url:URL, description:String) -> String { + private static func createSanJoseAuthCommand(_ url:URL, description:String) -> String { var command:String = url.absoluteString guard let index:String.CharacterView.Index = description.characters.index(of: "?") else { @@ -164,9 +164,9 @@ open class RTMPConnection: EventDispatcher { /// The outgoing RTMPChunkSize. open var chunkSize:Int = RTMPConnection.defaultChunkSizeS /// The URI passed to the RTMPConnection.connect() method. - open fileprivate(set) var uri:URL? = nil + open private(set) var uri:URL? = nil /// This instance connected to server(true) or not(false). - open fileprivate(set) var connected:Bool = false + open private(set) var connected:Bool = false /// The object encoding for this RTMPConnection instance. open var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding /// The statistics of total incoming bytes. @@ -182,11 +182,11 @@ open class RTMPConnection: EventDispatcher { return streams.count } /// The statistics of outgoing queue bytes per second. - @objc dynamic open fileprivate(set) var previousQueueBytesOut:[Int64] = [] + @objc dynamic open private(set) var previousQueueBytesOut:[Int64] = [] /// The statistics of incoming bytes per second. - @objc dynamic open fileprivate(set) var currentBytesInPerSecond:Int32 = 0 + @objc dynamic open private(set) var currentBytesInPerSecond:Int32 = 0 /// The statistics of outgoing bytes per second. - @objc dynamic open fileprivate(set) var currentBytesOutPerSecond:Int32 = 0 + @objc dynamic open private(set) var currentBytesOutPerSecond:Int32 = 0 var socket:RTMPSocketCompatible! var streams:[UInt32: RTMPStream] = [:] @@ -209,7 +209,7 @@ open class RTMPConnection: EventDispatcher { var windowSizeS:Int64 = RTMPConnection.defaultWindowSizeS var currentTransactionId:Int = 0 - fileprivate var timer:Timer? { + private var timer:Timer? { didSet { if let oldValue:Timer = oldValue { oldValue.invalidate() @@ -219,13 +219,13 @@ open class RTMPConnection: EventDispatcher { } } } - fileprivate var messages:[UInt16:RTMPMessage] = [:] - fileprivate var arguments:[Any?] = [] - fileprivate var currentChunk:RTMPChunk? = nil - fileprivate var measureInterval:Int = 3 - fileprivate var fragmentedChunks:[UInt16:RTMPChunk] = [:] - fileprivate var previousTotalBytesIn:Int64 = 0 - fileprivate var previousTotalBytesOut:Int64 = 0 + private var messages:[UInt16:RTMPMessage] = [:] + private var arguments:[Any?] = [] + private var currentChunk:RTMPChunk? = nil + private var measureInterval:Int = 3 + private var fragmentedChunks:[UInt16:RTMPChunk] = [:] + private var previousTotalBytesIn:Int64 = 0 + private var previousTotalBytesOut:Int64 = 0 override public init() { super.init() @@ -367,7 +367,7 @@ open class RTMPConnection: EventDispatcher { } } - fileprivate func createConnectionChunk() -> RTMPChunk? { + private func createConnectionChunk() -> RTMPChunk? { guard let uri:URL = uri else { return nil } diff --git a/Sources/RTMP/RTMPMessage.swift b/Sources/RTMP/RTMPMessage.swift index 5444ad45..f46e4d80 100644 --- a/Sources/RTMP/RTMPMessage.swift +++ b/Sources/RTMP/RTMPMessage.swift @@ -325,7 +325,7 @@ final class RTMPCommandMessage: RTMPMessage { } } - fileprivate var serializer:AMFSerializer = AMF0Serializer() + private var serializer:AMFSerializer = AMF0Serializer() init(objectEncoding:UInt8) { self.objectEncoding = objectEncoding @@ -507,7 +507,7 @@ final class RTMPSharedObjectMessage: RTMPMessage { } } - fileprivate var serializer:AMFSerializer = AMF0Serializer() + private var serializer:AMFSerializer = AMF0Serializer() init(objectEncoding:UInt8) { self.objectEncoding = objectEncoding diff --git a/Sources/RTMP/RTMPMuxer.swift b/Sources/RTMP/RTMPMuxer.swift index 61c842f8..8f462e58 100644 --- a/Sources/RTMP/RTMPMuxer.swift +++ b/Sources/RTMP/RTMPMuxer.swift @@ -12,9 +12,9 @@ final class RTMPMuxer { static let aac:UInt8 = FLVAudioCodec.aac.rawValue << 4 | FLVSoundRate.kHz44.rawValue << 2 | FLVSoundSize.snd16bit.rawValue << 1 | FLVSoundType.stereo.rawValue weak var delegate:RTMPMuxerDelegate? = nil - fileprivate var configs:[Int:Data] = [:] - fileprivate var audioTimestamp:CMTime = kCMTimeZero - fileprivate var videoTimestamp:CMTime = kCMTimeZero + private var configs:[Int:Data] = [:] + private var audioTimestamp:CMTime = kCMTimeZero + private var videoTimestamp:CMTime = kCMTimeZero func dispose() { configs.removeAll() diff --git a/Sources/RTMP/RTMPSharedObject.swift b/Sources/RTMP/RTMPSharedObject.swift index 2b1dcc7e..c0b6459c 100644 --- a/Sources/RTMP/RTMPSharedObject.swift +++ b/Sources/RTMP/RTMPSharedObject.swift @@ -82,7 +82,7 @@ extension RTMPSharedObjectEvent: CustomStringConvertible { */ open class RTMPSharedObject: EventDispatcher { - static fileprivate var remoteSharedObjects:[String: RTMPSharedObject] = [:] + static private var remoteSharedObjects:[String: RTMPSharedObject] = [:] static open func getRemote(withName: String, remotePath: String, persistence: Bool) -> RTMPSharedObject { let key:String = remotePath + "/" + withName + "?persistence=" + persistence.description objc_sync_enter(remoteSharedObjects) @@ -99,10 +99,10 @@ open class RTMPSharedObject: EventDispatcher { var persistence:Bool var currentVersion:UInt32 = 0 - open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding - open fileprivate(set) var data:[String: Any?] = [:] + open private(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding + open private(set) var data:[String: Any?] = [:] - fileprivate var succeeded:Bool = false { + private var succeeded:Bool = false { didSet { guard succeeded else { return @@ -117,7 +117,7 @@ open class RTMPSharedObject: EventDispatcher { return data.description } - fileprivate var rtmpConnection:RTMPConnection? = nil + private var rtmpConnection:RTMPConnection? = nil init(name:String, path:String, persistence:Bool) { self.name = name diff --git a/Sources/RTMP/RTMPStream.swift b/Sources/RTMP/RTMPStream.swift index c74e1b21..4242af63 100644 --- a/Sources/RTMP/RTMPStream.swift +++ b/Sources/RTMP/RTMPStream.swift @@ -221,8 +221,8 @@ open class RTMPStream: NetStream { open static let defaultVideoBitrate:UInt32 = H264Encoder.defaultBitrate weak open var qosDelegate:RTMPStreamQoSDelegate? = nil open internal(set) var info:RTMPStreamInfo = RTMPStreamInfo() - open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding - @objc open fileprivate(set) dynamic var currentFPS:UInt16 = 0 + open private(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding + @objc open private(set) dynamic var currentFPS:UInt16 = 0 open var soundTransform:SoundTransform { get { return mixer.audioIO.playback.soundTransform } set { mixer.audioIO.playback.soundTransform = newValue } @@ -290,15 +290,15 @@ open class RTMPStream: NetStream { var audioTimestamp:Double = 0 var videoTimestamp:Double = 0 - fileprivate(set) var muxer:RTMPMuxer = RTMPMuxer() - fileprivate var paused:Bool = false - fileprivate var sampler:MP4Sampler? = nil - fileprivate var frameCount:UInt16 = 0 - fileprivate var dispatcher:IEventDispatcher! - fileprivate var audioWasSent:Bool = false - fileprivate var videoWasSent:Bool = false - fileprivate var howToPublish:RTMPStream.HowToPublish = .live - fileprivate var rtmpConnection:RTMPConnection + private(set) var muxer:RTMPMuxer = RTMPMuxer() + private var paused:Bool = false + private var sampler:MP4Sampler? = nil + private var frameCount:UInt16 = 0 + private var dispatcher:IEventDispatcher! + private var audioWasSent:Bool = false + private var videoWasSent:Bool = false + private var howToPublish:RTMPStream.HowToPublish = .live + private var rtmpConnection:RTMPConnection public init(connection: RTMPConnection) { self.rtmpConnection = connection diff --git a/Sources/RTMP/RTMPTSocket.swift b/Sources/RTMP/RTMPTSocket.swift index 5b36bf17..aefadece 100644 --- a/Sources/RTMP/RTMPTSocket.swift +++ b/Sources/RTMP/RTMPTSocket.swift @@ -36,10 +36,10 @@ final class RTMPTSocket: NSObject, RTMPSocketCompatible { } } - fileprivate(set) var totalBytesIn:Int64 = 0 - fileprivate(set) var totalBytesOut:Int64 = 0 - fileprivate(set) var queueBytesOut:Int64 = 0 - fileprivate var timer:Timer? { + private(set) var totalBytesIn:Int64 = 0 + private(set) var totalBytesOut:Int64 = 0 + private(set) var queueBytesOut:Int64 = 0 + private var timer:Timer? { didSet { if let oldValue:Timer = oldValue { oldValue.invalidate() diff --git a/Sources/Util/ByteArray.swift b/Sources/Util/ByteArray.swift index 79a719b5..e607d861 100644 --- a/Sources/Util/ByteArray.swift +++ b/Sources/Util/ByteArray.swift @@ -83,7 +83,7 @@ open class ByteArray: ByteArrayConvertible { self.data = data } - fileprivate(set) var data:Data = Data() + private(set) var data:Data = Data() open var length:Int { get { diff --git a/Sources/Util/ClockedQueue.swift b/Sources/Util/ClockedQueue.swift index 1ab57476..33f35a55 100644 --- a/Sources/Util/ClockedQueue.swift +++ b/Sources/Util/ClockedQueue.swift @@ -8,17 +8,17 @@ protocol ClockedQueueDelegate:class { // MARK: - final class ClockedQueue { var bufferTime:TimeInterval = 0.1 // sec - fileprivate(set) var duration:TimeInterval = 0 + private(set) var duration:TimeInterval = 0 weak var delegate:ClockedQueueDelegate? - fileprivate var isReady:Bool = false - fileprivate var buffers:[CMSampleBuffer] = [] - fileprivate lazy var driver:TimerDriver = { + private var isReady:Bool = false + private var buffers:[CMSampleBuffer] = [] + private lazy var driver:TimerDriver = { var driver:TimerDriver = TimerDriver() driver.setDelegate(self, withQueue: self.lockQueue) return driver }() - fileprivate let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.ClockedQueue.lock") + private let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.ClockedQueue.lock") func enqueue(_ buffer:CMSampleBuffer) { lockQueue.async { diff --git a/Sources/Util/EventDispatcher.swift b/Sources/Util/EventDispatcher.swift index 1e0baa65..89ca5558 100644 --- a/Sources/Util/EventDispatcher.swift +++ b/Sources/Util/EventDispatcher.swift @@ -58,7 +58,7 @@ open class Event: NSObject { */ open class EventDispatcher: NSObject, IEventDispatcher { - fileprivate weak var target:AnyObject? = nil + private weak var target:AnyObject? = nil override public init() { super.init() diff --git a/Sources/Util/TimerDriver.swift b/Sources/Util/TimerDriver.swift index ca95b345..8f0d2712 100644 --- a/Sources/Util/TimerDriver.swift +++ b/Sources/Util/TimerDriver.swift @@ -11,9 +11,9 @@ public class TimerDriver: NSObject { var queue:DispatchQueue? weak var delegate:TimerDriverDelegate? - fileprivate var runloop:RunLoop? - fileprivate var nextFire:UInt64 = 0 - fileprivate weak var timer:Timer? { + private var runloop:RunLoop? + private var nextFire:UInt64 = 0 + private weak var timer:Timer? { didSet { if let oldValue:Timer = oldValue { oldValue.invalidate()