Change declaration fileprivate to private

This commit is contained in:
shogo4405 2017-11-06 00:02:20 +09:00
parent 6320e9a530
commit ae592c65fd
36 changed files with 176 additions and 176 deletions

View File

@ -9,7 +9,7 @@ public class RTMPBroadcaster : RTMPConnection {
return RTMPStream(connection: self)
}()
fileprivate lazy var spliter:SoundSpliter = {
private lazy var spliter:SoundSpliter = {
var spliter:SoundSpliter = SoundSpliter()
spliter.delegate = self
return spliter

View File

@ -14,8 +14,8 @@ open class GLLFView: GLKView {
var position:AVCaptureDevice.Position = .back
var orientation:AVCaptureVideoOrientation = .portrait
fileprivate var displayImage:CIImage?
fileprivate weak var currentStream:NetStream? {
private var displayImage:CIImage?
private weak var currentStream:NetStream? {
didSet {
guard let oldValue:NetStream = oldValue else {
return

View File

@ -30,18 +30,18 @@ open class ScreenCaptureSession: NSObject {
public weak var delegate:ScreenCaptureOutputPixelBufferDelegate?
internal(set) var running:Bool = false
fileprivate var shared:UIApplication?
fileprivate var viewToCapture:UIView?
private var shared:UIApplication?
private var viewToCapture:UIView?
public var afterScreenUpdates: Bool = false
fileprivate var context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)])
fileprivate let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1)
fileprivate let lockQueue:DispatchQueue = DispatchQueue(
private var context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer: NSNumber(value: false)])
private let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1)
private let lockQueue:DispatchQueue = DispatchQueue(
label: "com.haishinkit.HaishinKit.ScreenCaptureSession.lock", qos: DispatchQoS.userInteractive, attributes: []
)
fileprivate var colorSpace:CGColorSpace!
fileprivate var displayLink:CADisplayLink!
private var colorSpace:CGColorSpace!
private var displayLink:CADisplayLink!
fileprivate var size:CGSize = CGSize() {
private var size:CGSize = CGSize() {
didSet {
guard size != oldValue else {
return
@ -50,12 +50,12 @@ open class ScreenCaptureSession: NSObject {
pixelBufferPool = nil
}
}
fileprivate var scale:CGFloat {
private var scale:CGFloat {
return enabledScale ? UIScreen.main.scale : 1.0
}
fileprivate var _pixelBufferPool:CVPixelBufferPool?
fileprivate var pixelBufferPool:CVPixelBufferPool! {
private var _pixelBufferPool:CVPixelBufferPool?
private var pixelBufferPool:CVPixelBufferPool! {
get {
if (_pixelBufferPool == nil) {
var pixelBufferPool:CVPixelBufferPool?

View File

@ -4,7 +4,7 @@ import CoreAudio
final class AudioUtil {
fileprivate static var defaultDeviceID:AudioObjectID {
private static var defaultDeviceID:AudioObjectID {
var deviceID:AudioObjectID = AudioObjectID(0)
var size:UInt32 = UInt32(MemoryLayout<AudioObjectID>.size)
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress()
@ -15,7 +15,7 @@ final class AudioUtil {
return deviceID
}
fileprivate init() {
private init() {
}
static func setInputGain(_ volume:Float32) -> OSStatus {

View File

@ -23,10 +23,10 @@ open class GLLFView: NSOpenGLView {
public var videoGravity:AVLayerVideoGravity = .resizeAspect
var orientation:AVCaptureVideoOrientation = .portrait
var position:AVCaptureDevice.Position = .front
fileprivate var displayImage:CIImage!
fileprivate var originalFrame:CGRect = CGRect.zero
fileprivate var scale:CGRect = CGRect.zero
fileprivate weak var currentStream:NetStream?
private var displayImage:CIImage!
private var originalFrame:CGRect = CGRect.zero
private var scale:CGRect = CGRect.zero
private weak var currentStream:NetStream?
open override func prepareOpenGL() {
var param:GLint = 1

View File

@ -9,8 +9,8 @@ open class GLLFView: GLKView {
]
open static var defaultBackgroundColor:UIColor = .black
open var videoGravity:AVLayerVideoGravity = .resizeAspect
fileprivate var displayImage:CIImage?
fileprivate weak var currentStream:NetStream? {
private var displayImage:CIImage?
private weak var currentStream:NetStream? {
didSet {
guard let oldValue:NetStream = oldValue else {
return

View File

@ -73,10 +73,10 @@ final class AACEncoder: NSObject {
var lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AACEncoder.lock")
weak var delegate:AudioEncoderDelegate?
internal(set) var running:Bool = false
fileprivate var maximumBuffers:Int = AACEncoder.defaultMaximumBuffers
fileprivate var bufferListSize:Int = AACEncoder.defaultBufferListSize
fileprivate var currentBufferList:UnsafeMutableAudioBufferListPointer? = nil
fileprivate var inSourceFormat:AudioStreamBasicDescription? {
private var maximumBuffers:Int = AACEncoder.defaultMaximumBuffers
private var bufferListSize:Int = AACEncoder.defaultBufferListSize
private var currentBufferList:UnsafeMutableAudioBufferListPointer? = nil
private var inSourceFormat:AudioStreamBasicDescription? {
didSet {
logger.info("\(String(describing: self.inSourceFormat))")
guard let inSourceFormat:AudioStreamBasicDescription = self.inSourceFormat else {
@ -87,8 +87,8 @@ final class AACEncoder: NSObject {
bufferListSize = nonInterleaved ? AudioBufferList.sizeInBytes(maximumBuffers: maximumBuffers) : AACEncoder.defaultBufferListSize
}
}
fileprivate var _inDestinationFormat:AudioStreamBasicDescription?
fileprivate var inDestinationFormat:AudioStreamBasicDescription {
private var _inDestinationFormat:AudioStreamBasicDescription?
private var inDestinationFormat:AudioStreamBasicDescription {
get {
if (_inDestinationFormat == nil) {
_inDestinationFormat = AudioStreamBasicDescription()
@ -113,7 +113,7 @@ final class AACEncoder: NSObject {
}
}
fileprivate var inputDataProc:AudioConverterComplexInputDataProc = {(
private var inputDataProc:AudioConverterComplexInputDataProc = {(
converter:AudioConverterRef,
ioNumberDataPackets:UnsafeMutablePointer<UInt32>,
ioData:UnsafeMutablePointer<AudioBufferList>,
@ -126,8 +126,8 @@ final class AACEncoder: NSObject {
)
}
fileprivate var _converter:AudioConverterRef?
fileprivate var converter:AudioConverterRef {
private var _converter:AudioConverterRef?
private var converter:AudioConverterRef {
var status:OSStatus = noErr
if (_converter == nil) {
var converter:AudioConverterRef? = nil

View File

@ -167,18 +167,18 @@ final class H264Encoder: NSObject {
}
weak var delegate:VideoEncoderDelegate?
internal(set) var running:Bool = false
fileprivate(set) var status:OSStatus = noErr
fileprivate var attributes:[NSString: AnyObject] {
private(set) var status:OSStatus = noErr
private var attributes:[NSString: AnyObject] {
var attributes:[NSString: AnyObject] = H264Encoder.defaultAttributes
attributes[kCVPixelBufferWidthKey] = NSNumber(value: width)
attributes[kCVPixelBufferHeightKey] = NSNumber(value: height)
return attributes
}
fileprivate var invalidateSession:Bool = true
fileprivate var lastImageBuffer:CVImageBuffer? = nil;
private var invalidateSession:Bool = true
private var lastImageBuffer:CVImageBuffer? = nil;
// @see: https://developer.apple.com/library/mac/releasenotes/General/APIDiffsMacOSX10_8/VideoToolbox.html
fileprivate var properties:[NSString: NSObject] {
private var properties:[NSString: NSObject] {
let isBaseline:Bool = profileLevel.contains("Baseline")
var properties:[NSString: NSObject] = [
kVTCompressionPropertyKey_RealTime: kCFBooleanTrue,
@ -209,7 +209,7 @@ final class H264Encoder: NSObject {
return properties
}
fileprivate var callback:VTCompressionOutputCallback = {(
private var callback:VTCompressionOutputCallback = {(
outputCallbackRefCon:UnsafeMutableRawPointer?,
sourceFrameRefCon:UnsafeMutableRawPointer?,
status:OSStatus,
@ -223,8 +223,8 @@ final class H264Encoder: NSObject {
encoder.delegate?.sampleOutput(video: sampleBuffer)
}
fileprivate var _session:VTCompressionSession? = nil
fileprivate var session:VTCompressionSession? {
private var _session:VTCompressionSession? = nil
private var session:VTCompressionSession? {
get {
if (_session == nil) {
guard VTCompressionSessionCreate(

View File

@ -7,8 +7,8 @@ final class FLVReader {
private(set) var url:URL
private(set) var hasAudio:Bool = false
private(set) var hasVideo:Bool = false
fileprivate var currentOffSet:UInt64 = 0
fileprivate var fileHandle:FileHandle? = nil
private var currentOffSet:UInt64 = 0
private var fileHandle:FileHandle? = nil
init(url:URL) {
do {

View File

@ -245,7 +245,7 @@ open class HTTPService: NetService {
}
open class HLSService: HTTPService {
fileprivate(set) var streams:[HTTPStream] = []
private(set) var streams:[HTTPStream] = []
open func addHTTPStream(_ stream:HTTPStream) {
for i in 0..<streams.count {

View File

@ -580,26 +580,26 @@ final class MP4TrakReader {
var bufferTime:Double = MP4TrakReader.defaultBufferTime
weak var delegate:MP4SamplerDelegate?
fileprivate var id:Int = 0
fileprivate var handle:FileHandle?
private var id:Int = 0
private var handle:FileHandle?
private lazy var timerDriver:TimerDriver = {
var timerDriver:TimerDriver = TimerDriver()
timerDriver.delegate = self
return timerDriver
}()
fileprivate var currentOffset:UInt64 {
private var currentOffset:UInt64 {
return UInt64(offset[cursor])
}
fileprivate var currentIsKeyframe:Bool {
private var currentIsKeyframe:Bool {
return keyframe[cursor] != nil
}
fileprivate var currentDuration:Double {
private var currentDuration:Double {
return Double(totalTimeToSample) * 1000 / Double(timeScale)
}
fileprivate var currentTimeToSample:Double {
private var currentTimeToSample:Double {
return Double(timeToSample[cursor]) * 1000 / Double(timeScale)
}
fileprivate var currentSampleSize:Int {
private var currentSampleSize:Int {
return Int((sampleSize.count == 1) ? sampleSize[0] : sampleSize[cursor])
}
private var cursor:Int = 0
@ -685,11 +685,11 @@ final class MP4TrakReader {
}
}
fileprivate func hasNext() -> Bool {
private func hasNext() -> Bool {
return cursor + 1 < offset.count
}
fileprivate func next() {
private func next() {
defer {
cursor += 1
}

View File

@ -13,12 +13,12 @@ public class MP4Sampler {
weak var delegate:MP4SamplerDelegate?
fileprivate var files:[URL] = []
fileprivate var handlers:[URL:Handler?] = [:]
fileprivate let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.lock")
fileprivate let loopQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.loop")
fileprivate let operations:OperationQueue = OperationQueue()
fileprivate(set) var running:Bool = false
private var files:[URL] = []
private var handlers:[URL:Handler?] = [:]
private let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.lock")
private let loopQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.MP4Sampler.loop")
private let operations:OperationQueue = OperationQueue()
private(set) var running:Bool = false
func appendFile(_ file:URL, completionHandler: Handler? = nil) {
lockQueue.async {
@ -27,7 +27,7 @@ public class MP4Sampler {
}
}
fileprivate func execute(url:URL) {
private func execute(url:URL) {
let reader:MP4Reader = MP4Reader(url: url)
do {
@ -51,7 +51,7 @@ public class MP4Sampler {
reader.close()
}
fileprivate func run() {
private func run() {
if (files.isEmpty) {
return
}

View File

@ -27,14 +27,14 @@ class TSReader {
}
}
}
fileprivate(set) var numberOfPackets:Int = 0
private(set) var numberOfPackets:Int = 0
fileprivate var eof:UInt64 = 0
fileprivate var cursor:Int = 0
fileprivate var fileHandle:FileHandle?
fileprivate var dictionaryForPrograms:[UInt16:UInt16] = [:]
fileprivate var dictionaryForESSpecData:[UInt16:ElementaryStreamSpecificData] = [:]
fileprivate var packetizedElementaryStreams:[UInt16:PacketizedElementaryStream] = [:]
private var eof:UInt64 = 0
private var cursor:Int = 0
private var fileHandle:FileHandle?
private var dictionaryForPrograms:[UInt16:UInt16] = [:]
private var dictionaryForESSpecData:[UInt16:ElementaryStreamSpecificData] = [:]
private var packetizedElementaryStreams:[UInt16:PacketizedElementaryStream] = [:]
init(url:URL) throws {
fileHandle = try FileHandle(forReadingFrom: url)

View File

@ -27,24 +27,24 @@ class TSWriter {
var segmentMaxCount:Int = TSWriter.defaultSegmentMaxCount
var segmentDuration:Double = TSWriter.defaultSegmentDuration
fileprivate(set) var PAT:ProgramAssociationSpecific = {
private(set) var PAT:ProgramAssociationSpecific = {
let PAT:ProgramAssociationSpecific = ProgramAssociationSpecific()
PAT.programs = [1: TSWriter.defaultPMTPID]
return PAT
}()
fileprivate(set) var PMT:ProgramMapSpecific = ProgramMapSpecific()
fileprivate(set) var files:[M3UMediaInfo] = []
fileprivate(set) var running:Bool = false
fileprivate var PCRPID:UInt16 = TSWriter.defaultVideoPID
fileprivate var sequence:Int = 0
fileprivate var timestamps:[UInt16:CMTime] = [:]
fileprivate var audioConfig:AudioSpecificConfig?
fileprivate var videoConfig:AVCConfigurationRecord?
fileprivate var PCRTimestamp:CMTime = kCMTimeZero
fileprivate var currentFileURL:URL?
fileprivate var rotatedTimestamp:CMTime = kCMTimeZero
fileprivate var currentFileHandle:FileHandle?
fileprivate var continuityCounters:[UInt16:UInt8] = [:]
private(set) var PMT:ProgramMapSpecific = ProgramMapSpecific()
private(set) var files:[M3UMediaInfo] = []
private(set) var running:Bool = false
private var PCRPID:UInt16 = TSWriter.defaultVideoPID
private var sequence:Int = 0
private var timestamps:[UInt16:CMTime] = [:]
private var audioConfig:AudioSpecificConfig?
private var videoConfig:AVCConfigurationRecord?
private var PCRTimestamp:CMTime = kCMTimeZero
private var currentFileURL:URL?
private var rotatedTimestamp:CMTime = kCMTimeZero
private var currentFileHandle:FileHandle?
private var continuityCounters:[UInt16:UInt8] = [:]
func getFilePath(_ fileName:String) -> String? {
for info in files {

View File

@ -21,7 +21,7 @@ struct TSPacket {
var adaptationField:TSAdaptationField?
var payload:Data = Data()
fileprivate var remain:Int {
private var remain:Int {
var adaptationFieldSize:Int = 0
if let adaptationField:TSAdaptationField = adaptationField , adaptationFieldFlag {
adaptationField.compute()

View File

@ -43,7 +43,7 @@ final public class AVMixer: NSObject {
}
}
fileprivate var _session:AVCaptureSession?
private var _session:AVCaptureSession?
public var session:AVCaptureSession {
get {
if (_session == nil) {

View File

@ -34,7 +34,7 @@ open class AVMixerRecorder: NSObject {
open var outputSettings:[AVMediaType:[String:Any]] = AVMixerRecorder.defaultOutputSettings
open var pixelBufferAdaptor:AVAssetWriterInputPixelBufferAdaptor?
open let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AVMixerRecorder.lock")
fileprivate(set) var running:Bool = false
private(set) var running:Bool = false
fileprivate(set) var sourceTime:CMTime = kCMTimeZero
var isReadyForStartWriting:Bool {
@ -152,8 +152,8 @@ open class DefaultAVMixerRecorderDelegate: NSObject {
open var duration:Int64 = 0
open var dateFormat:String = "-yyyyMMdd-HHmmss"
fileprivate var rotateTime:CMTime = kCMTimeZero
fileprivate var clockReference:AVMediaType = .video
private var rotateTime:CMTime = kCMTimeZero
private var clockReference:AVMediaType = .video
#if os(iOS)
open lazy var moviesDirectory:URL = {

View File

@ -22,7 +22,7 @@ class AudioStreamPlayback {
}
}
fileprivate(set) var running:Bool = false
private(set) var running:Bool = false
var formatDescription:AudioStreamBasicDescription? = nil
var fileTypeHint:AudioFileTypeID? = nil {
didSet {
@ -41,8 +41,8 @@ class AudioStreamPlayback {
}
}
let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioStreamPlayback.lock")
fileprivate var bufferSize:UInt32 = AudioStreamPlayback.defaultBufferSize
fileprivate var queue:AudioQueueRef? = nil {
private var bufferSize:UInt32 = AudioStreamPlayback.defaultBufferSize
private var queue:AudioQueueRef? = nil {
didSet {
guard let oldValue:AudioQueueRef = oldValue else {
return
@ -51,13 +51,13 @@ class AudioStreamPlayback {
AudioQueueDispose(oldValue, true)
}
}
fileprivate var inuse:[Bool] = []
fileprivate var buffers:[AudioQueueBufferRef] = []
fileprivate var current:Int = 0
fileprivate var started:Bool = false
fileprivate var filledBytes:UInt32 = 0
fileprivate var packetDescriptions:[AudioStreamPacketDescription] = []
fileprivate var fileStreamID:AudioFileStreamID? = nil {
private var inuse:[Bool] = []
private var buffers:[AudioQueueBufferRef] = []
private var current:Int = 0
private var started:Bool = false
private var filledBytes:UInt32 = 0
private var packetDescriptions:[AudioStreamPacketDescription] = []
private var fileStreamID:AudioFileStreamID? = nil {
didSet {
guard let oldValue:AudioFileStreamID = oldValue else {
return
@ -65,11 +65,11 @@ class AudioStreamPlayback {
AudioFileStreamClose(oldValue)
}
}
fileprivate var isPacketDescriptionsFull:Bool {
private var isPacketDescriptionsFull:Bool {
return packetDescriptions.count == maxPacketDescriptions
}
fileprivate var outputCallback:AudioQueueOutputCallback = {(
private var outputCallback:AudioQueueOutputCallback = {(
inUserData: UnsafeMutableRawPointer?,
inAQ: AudioQueueRef,
inBuffer:AudioQueueBufferRef) -> Void in
@ -77,7 +77,7 @@ class AudioStreamPlayback {
playback.onOutputForQueue(inAQ, inBuffer)
}
fileprivate var packetsProc:AudioFileStream_PacketsProc = {(
private var packetsProc:AudioFileStream_PacketsProc = {(
inClientData:UnsafeMutableRawPointer,
inNumberBytes:UInt32,
inNumberPackets:UInt32,
@ -88,7 +88,7 @@ class AudioStreamPlayback {
playback.onAudioPacketsForFileStream(inNumberBytes, inNumberPackets, inInputData, inPacketDescriptions)
}
fileprivate var propertyListenerProc:AudioFileStream_PropertyListenerProc = {(
private var propertyListenerProc:AudioFileStream_PropertyListenerProc = {(
inClientData:UnsafeMutableRawPointer,
inAudioFileStream:AudioFileStreamID,
inPropertyID:AudioFileStreamPropertyID,

View File

@ -2,7 +2,7 @@ import CoreMedia
import Foundation
class IOComponent: NSObject {
fileprivate(set) weak var mixer:AVMixer?
private(set) weak var mixer:AVMixer?
init(mixer: AVMixer) {
self.mixer = mixer

View File

@ -160,7 +160,7 @@ final class VideoIOComponent: IOComponent {
}
}
fileprivate var _output:AVCaptureVideoDataOutput? = nil
private var _output:AVCaptureVideoDataOutput? = nil
var output:AVCaptureVideoDataOutput! {
get {
if (_output == nil) {

View File

@ -9,14 +9,14 @@ open class NetService: NSObject {
let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetService.lock")
var networkQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetService.network")
fileprivate(set) var domain:String
fileprivate(set) var name:String
fileprivate(set) var port:Int32
fileprivate(set) var type:String
fileprivate(set) var running:Bool = false
fileprivate(set) var clients:[NetClient] = []
fileprivate(set) var service:Foundation.NetService!
fileprivate var runloop:RunLoop!
private(set) var domain:String
private(set) var name:String
private(set) var port:Int32
private(set) var type:String
private(set) var running:Bool = false
private(set) var clients:[NetClient] = []
private(set) var service:Foundation.NetService!
private var runloop:RunLoop!
public init(domain:String, type:String, name:String, port:Int32) {
self.domain = domain
@ -53,7 +53,7 @@ open class NetService: NSObject {
runloop = nil
}
fileprivate func initService() {
private func initService() {
runloop = RunLoop.current
service = Foundation.NetService(domain: domain, type: type, name: name, port: port)
service.delegate = self

View File

@ -20,7 +20,7 @@ public class NetSocket: NSObject {
private var buffer:UnsafeMutablePointer<UInt8>? = nil
private var runloop:RunLoop?
private let outputQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.NetSocket.output")
fileprivate var timeoutHandler:(() -> Void)?
private var timeoutHandler:(() -> Void)?
@discardableResult
final public func doOutput(data:Data, locked:UnsafeMutablePointer<UInt32>? = nil) -> Int {
@ -152,7 +152,7 @@ public class NetSocket: NSObject {
func didTimeout() {
}
fileprivate func doInput() {
private func doInput() {
guard let inputStream:InputStream = inputStream, let buffer:UnsafeMutablePointer<UInt8> = buffer else {
return
}

View File

@ -1,7 +1,7 @@
import Foundation
class AMFSerializerUtil {
fileprivate static var classes:[String: AnyClass] = [:]
private static var classes:[String: AnyClass] = [:]
static func getClassByAlias(_ name:String) -> AnyClass? {
objc_sync_enter(classes)
@ -372,7 +372,7 @@ extension AMF0Serializer: AMFSerializer {
}
@discardableResult
fileprivate func serializeUTF8(_ value:String, _ isLong: Bool) -> Self {
private func serializeUTF8(_ value:String, _ isLong: Bool) -> Self {
let utf8:Data = Data(value.utf8)
if (isLong) {
writeUInt32(UInt32(utf8.count))
@ -382,7 +382,7 @@ extension AMF0Serializer: AMFSerializer {
return writeBytes(utf8)
}
fileprivate func deserializeUTF8(_ isLong:Bool) throws -> String {
private func deserializeUTF8(_ isLong:Bool) throws -> String {
let length:Int = isLong ? Int(try readUInt32()) : Int(try readUInt16())
return try readUTF8Bytes(length)
}

View File

@ -500,7 +500,7 @@ extension AMF3Serializer: AMFSerializer {
- seealso: 1.3.1 Variable Length Unsigned 29-bit Integer Encoding
*/
@discardableResult
fileprivate func serializeU29(_ value:Int) -> Self {
private func serializeU29(_ value:Int) -> Self {
if (value < Int(Int32.min) || Int(Int32.max) < value) {
return serialize(Double(value))
}
@ -523,7 +523,7 @@ extension AMF3Serializer: AMFSerializer {
}
}
fileprivate func deserializeU29() throws -> Int {
private func deserializeU29() throws -> Int {
var count:Int = 1
var result:Int = 0
var byte:UInt8 = try readUInt8()
@ -550,7 +550,7 @@ extension AMF3Serializer: AMFSerializer {
- seealso: 1.3.2 Strings and UTF-8
*/
@discardableResult
fileprivate func serializeUTF8(_ value:String) -> Self {
private func serializeUTF8(_ value:String) -> Self {
if (value.isEmpty) {
return serializeU29(0x01)
}
@ -562,7 +562,7 @@ extension AMF3Serializer: AMFSerializer {
return serializeU29(utf8.count << 1 | 0x01).writeBytes(utf8)
}
fileprivate func deserializeUTF8() throws -> String {
private func deserializeUTF8() throws -> String {
let ref:Int = try deserializeU29()
if (ref & 0x01) == 0 {
return try reference.getString(ref >> 1)

View File

@ -14,8 +14,8 @@ public final class ASUndefined: NSObject {
// MARK: -
public struct ASArray {
fileprivate(set) var data:[Any?]
fileprivate(set) var dict:[String: Any?] = [:]
private(set) var data:[Any?]
private(set) var dict:[String: Any?] = [:]
public var length:Int {
return data.count
@ -98,7 +98,7 @@ public final class ASXMLDocument: NSObject {
return data
}
fileprivate var data:String
private var data:String
public init(data:String) {
self.data = data
@ -116,7 +116,7 @@ public final class ASXML: NSObject {
return data
}
fileprivate var data:String
private var data:String
public init(data:String) {
self.data = data

View File

@ -180,9 +180,9 @@ final class RTMPChunk {
}
}
fileprivate(set) var message:RTMPMessage?
fileprivate(set) var fragmented:Bool = false
fileprivate var _data:Data = Data()
private(set) var message:RTMPMessage?
private(set) var fragmented:Bool = false
private var _data:Data = Data()
init(type:RTMPChunkType, streamId:UInt16, message:RTMPMessage) {
self.type = type

View File

@ -125,7 +125,7 @@ open class RTMPConnection: EventDispatcher {
case clientSeek = 1
}
fileprivate static func createSanJoseAuthCommand(_ url:URL, description:String) -> String {
private static func createSanJoseAuthCommand(_ url:URL, description:String) -> String {
var command:String = url.absoluteString
guard let index:String.CharacterView.Index = description.characters.index(of: "?") else {
@ -164,9 +164,9 @@ open class RTMPConnection: EventDispatcher {
/// The outgoing RTMPChunkSize.
open var chunkSize:Int = RTMPConnection.defaultChunkSizeS
/// The URI passed to the RTMPConnection.connect() method.
open fileprivate(set) var uri:URL? = nil
open private(set) var uri:URL? = nil
/// This instance connected to server(true) or not(false).
open fileprivate(set) var connected:Bool = false
open private(set) var connected:Bool = false
/// The object encoding for this RTMPConnection instance.
open var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
/// The statistics of total incoming bytes.
@ -182,11 +182,11 @@ open class RTMPConnection: EventDispatcher {
return streams.count
}
/// The statistics of outgoing queue bytes per second.
@objc dynamic open fileprivate(set) var previousQueueBytesOut:[Int64] = []
@objc dynamic open private(set) var previousQueueBytesOut:[Int64] = []
/// The statistics of incoming bytes per second.
@objc dynamic open fileprivate(set) var currentBytesInPerSecond:Int32 = 0
@objc dynamic open private(set) var currentBytesInPerSecond:Int32 = 0
/// The statistics of outgoing bytes per second.
@objc dynamic open fileprivate(set) var currentBytesOutPerSecond:Int32 = 0
@objc dynamic open private(set) var currentBytesOutPerSecond:Int32 = 0
var socket:RTMPSocketCompatible!
var streams:[UInt32: RTMPStream] = [:]
@ -209,7 +209,7 @@ open class RTMPConnection: EventDispatcher {
var windowSizeS:Int64 = RTMPConnection.defaultWindowSizeS
var currentTransactionId:Int = 0
fileprivate var timer:Timer? {
private var timer:Timer? {
didSet {
if let oldValue:Timer = oldValue {
oldValue.invalidate()
@ -219,13 +219,13 @@ open class RTMPConnection: EventDispatcher {
}
}
}
fileprivate var messages:[UInt16:RTMPMessage] = [:]
fileprivate var arguments:[Any?] = []
fileprivate var currentChunk:RTMPChunk? = nil
fileprivate var measureInterval:Int = 3
fileprivate var fragmentedChunks:[UInt16:RTMPChunk] = [:]
fileprivate var previousTotalBytesIn:Int64 = 0
fileprivate var previousTotalBytesOut:Int64 = 0
private var messages:[UInt16:RTMPMessage] = [:]
private var arguments:[Any?] = []
private var currentChunk:RTMPChunk? = nil
private var measureInterval:Int = 3
private var fragmentedChunks:[UInt16:RTMPChunk] = [:]
private var previousTotalBytesIn:Int64 = 0
private var previousTotalBytesOut:Int64 = 0
override public init() {
super.init()
@ -367,7 +367,7 @@ open class RTMPConnection: EventDispatcher {
}
}
fileprivate func createConnectionChunk() -> RTMPChunk? {
private func createConnectionChunk() -> RTMPChunk? {
guard let uri:URL = uri else {
return nil
}

View File

@ -325,7 +325,7 @@ final class RTMPCommandMessage: RTMPMessage {
}
}
fileprivate var serializer:AMFSerializer = AMF0Serializer()
private var serializer:AMFSerializer = AMF0Serializer()
init(objectEncoding:UInt8) {
self.objectEncoding = objectEncoding
@ -507,7 +507,7 @@ final class RTMPSharedObjectMessage: RTMPMessage {
}
}
fileprivate var serializer:AMFSerializer = AMF0Serializer()
private var serializer:AMFSerializer = AMF0Serializer()
init(objectEncoding:UInt8) {
self.objectEncoding = objectEncoding

View File

@ -12,9 +12,9 @@ final class RTMPMuxer {
static let aac:UInt8 = FLVAudioCodec.aac.rawValue << 4 | FLVSoundRate.kHz44.rawValue << 2 | FLVSoundSize.snd16bit.rawValue << 1 | FLVSoundType.stereo.rawValue
weak var delegate:RTMPMuxerDelegate? = nil
fileprivate var configs:[Int:Data] = [:]
fileprivate var audioTimestamp:CMTime = kCMTimeZero
fileprivate var videoTimestamp:CMTime = kCMTimeZero
private var configs:[Int:Data] = [:]
private var audioTimestamp:CMTime = kCMTimeZero
private var videoTimestamp:CMTime = kCMTimeZero
func dispose() {
configs.removeAll()

View File

@ -82,7 +82,7 @@ extension RTMPSharedObjectEvent: CustomStringConvertible {
*/
open class RTMPSharedObject: EventDispatcher {
static fileprivate var remoteSharedObjects:[String: RTMPSharedObject] = [:]
static private var remoteSharedObjects:[String: RTMPSharedObject] = [:]
static open func getRemote(withName: String, remotePath: String, persistence: Bool) -> RTMPSharedObject {
let key:String = remotePath + "/" + withName + "?persistence=" + persistence.description
objc_sync_enter(remoteSharedObjects)
@ -99,10 +99,10 @@ open class RTMPSharedObject: EventDispatcher {
var persistence:Bool
var currentVersion:UInt32 = 0
open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
open fileprivate(set) var data:[String: Any?] = [:]
open private(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
open private(set) var data:[String: Any?] = [:]
fileprivate var succeeded:Bool = false {
private var succeeded:Bool = false {
didSet {
guard succeeded else {
return
@ -117,7 +117,7 @@ open class RTMPSharedObject: EventDispatcher {
return data.description
}
fileprivate var rtmpConnection:RTMPConnection? = nil
private var rtmpConnection:RTMPConnection? = nil
init(name:String, path:String, persistence:Bool) {
self.name = name

View File

@ -221,8 +221,8 @@ open class RTMPStream: NetStream {
open static let defaultVideoBitrate:UInt32 = H264Encoder.defaultBitrate
weak open var qosDelegate:RTMPStreamQoSDelegate? = nil
open internal(set) var info:RTMPStreamInfo = RTMPStreamInfo()
open fileprivate(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
@objc open fileprivate(set) dynamic var currentFPS:UInt16 = 0
open private(set) var objectEncoding:UInt8 = RTMPConnection.defaultObjectEncoding
@objc open private(set) dynamic var currentFPS:UInt16 = 0
open var soundTransform:SoundTransform {
get { return mixer.audioIO.playback.soundTransform }
set { mixer.audioIO.playback.soundTransform = newValue }
@ -290,15 +290,15 @@ open class RTMPStream: NetStream {
var audioTimestamp:Double = 0
var videoTimestamp:Double = 0
fileprivate(set) var muxer:RTMPMuxer = RTMPMuxer()
fileprivate var paused:Bool = false
fileprivate var sampler:MP4Sampler? = nil
fileprivate var frameCount:UInt16 = 0
fileprivate var dispatcher:IEventDispatcher!
fileprivate var audioWasSent:Bool = false
fileprivate var videoWasSent:Bool = false
fileprivate var howToPublish:RTMPStream.HowToPublish = .live
fileprivate var rtmpConnection:RTMPConnection
private(set) var muxer:RTMPMuxer = RTMPMuxer()
private var paused:Bool = false
private var sampler:MP4Sampler? = nil
private var frameCount:UInt16 = 0
private var dispatcher:IEventDispatcher!
private var audioWasSent:Bool = false
private var videoWasSent:Bool = false
private var howToPublish:RTMPStream.HowToPublish = .live
private var rtmpConnection:RTMPConnection
public init(connection: RTMPConnection) {
self.rtmpConnection = connection

View File

@ -36,10 +36,10 @@ final class RTMPTSocket: NSObject, RTMPSocketCompatible {
}
}
fileprivate(set) var totalBytesIn:Int64 = 0
fileprivate(set) var totalBytesOut:Int64 = 0
fileprivate(set) var queueBytesOut:Int64 = 0
fileprivate var timer:Timer? {
private(set) var totalBytesIn:Int64 = 0
private(set) var totalBytesOut:Int64 = 0
private(set) var queueBytesOut:Int64 = 0
private var timer:Timer? {
didSet {
if let oldValue:Timer = oldValue {
oldValue.invalidate()

View File

@ -83,7 +83,7 @@ open class ByteArray: ByteArrayConvertible {
self.data = data
}
fileprivate(set) var data:Data = Data()
private(set) var data:Data = Data()
open var length:Int {
get {

View File

@ -8,17 +8,17 @@ protocol ClockedQueueDelegate:class {
// MARK: -
final class ClockedQueue {
var bufferTime:TimeInterval = 0.1 // sec
fileprivate(set) var duration:TimeInterval = 0
private(set) var duration:TimeInterval = 0
weak var delegate:ClockedQueueDelegate?
fileprivate var isReady:Bool = false
fileprivate var buffers:[CMSampleBuffer] = []
fileprivate lazy var driver:TimerDriver = {
private var isReady:Bool = false
private var buffers:[CMSampleBuffer] = []
private lazy var driver:TimerDriver = {
var driver:TimerDriver = TimerDriver()
driver.setDelegate(self, withQueue: self.lockQueue)
return driver
}()
fileprivate let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.ClockedQueue.lock")
private let lockQueue:DispatchQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.ClockedQueue.lock")
func enqueue(_ buffer:CMSampleBuffer) {
lockQueue.async {

View File

@ -58,7 +58,7 @@ open class Event: NSObject {
*/
open class EventDispatcher: NSObject, IEventDispatcher {
fileprivate weak var target:AnyObject? = nil
private weak var target:AnyObject? = nil
override public init() {
super.init()

View File

@ -11,9 +11,9 @@ public class TimerDriver: NSObject {
var queue:DispatchQueue?
weak var delegate:TimerDriverDelegate?
fileprivate var runloop:RunLoop?
fileprivate var nextFire:UInt64 = 0
fileprivate weak var timer:Timer? {
private var runloop:RunLoop?
private var nextFire:UInt64 = 0
private weak var timer:Timer? {
didSet {
if let oldValue:Timer = oldValue {
oldValue.invalidate()