support SwiftPM

This commit is contained in:
omochimetaru 2019-11-27 16:10:53 +09:00
parent c87916b31e
commit 110de942e5
31 changed files with 139 additions and 12 deletions

View File

@ -5,21 +5,26 @@ import PackageDescription
let package = Package(
name: "HaishinKit",
products: [
.library(name: "RTMP", targets: ["HTTP"]),
.library(name: "HTTP", targets: ["RTMP"])
.library(name: "HaishinKit", targets: ["HaishinKit"])
],
dependencies: [
.Package(url: "https://github.com/shogo4405/Logboard.git", from: "2.1.2")
.package(url: "https://github.com/shogo4405/Logboard.git", from: "2.1.2")
],
targets: [
.target(name: "Codec", dependencies: [])
.target(name: "Extension", dependencied: [])
.target(name: "HTTP", dependencies: [])
.target(name: "ISO", dependencies: [])
.target(name: "Media", dependencies: [])
.target(name: "Util", dependencies: [])
.target(name: "Net", dependencies: ["Codec", "Extension", "ISO", "Media", "Util"])
.target(name: "HTTP", dependencies: ["Net"])
.target(name: "RTMP", dependencies: ["Net", "FLV"])
.target(name: "SwiftPMSupport"),
.target(name: "HaishinKit", dependencies: ["Logboard", "SwiftPMSupport"],
path: "Sources",
sources: [
"Codec",
"Extension",
"FLV",
"ISO",
"Media",
"Net",
"Util",
"RTMP",
"HTTP",
"Platforms"
])
]
)

View File

@ -1,4 +1,7 @@
#if os(iOS)
import AVFoundation
import UIKit
extension DeviceUtil {
public static func videoOrientation(by notification: Notification) -> AVCaptureVideoOrientation? {
@ -38,3 +41,5 @@ extension DeviceUtil {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(iOS)
import AVFoundation
import GLKit
@ -77,3 +79,5 @@ extension GLHKView: NetStreamDrawable {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(iOS)
import AVFoundation
import UIKit
@ -80,3 +82,5 @@ extension HKView: NetStreamDrawable {
func draw(image: CIImage) {
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(iOS)
#if canImport(MetalKit)
import AVFoundation
import MetalKit
@ -110,3 +112,5 @@ extension MTHKView: NetStreamDrawable {
}
}
#endif
#endif

View File

@ -1,3 +1,5 @@
#if os(iOS)
import AVFoundation
import Foundation
@ -25,3 +27,5 @@ extension NetStream {
self.mixer.videoIO.setZoomFactor(zoomFactor, ramping: ramping, withRate: withRate)
}
}
#endif

View File

@ -1,6 +1,11 @@
#if os(iOS)
import AVFoundation
import CoreImage
#if os(iOS)
import UIKit
#endif
public protocol ScreenCaptureOutputPixelBufferDelegate: class {
func didSet(size: CGSize)
@ -168,3 +173,5 @@ extension ScreenCaptureSession: Running {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(iOS)
import AVFoundation
extension VideoIOComponent {
@ -62,3 +64,5 @@ extension VideoIOComponent: ScreenCaptureOutputPixelBufferDelegate {
mixer?.recorder.appendPixelBuffer(pixelBuffer, withPresentationTime: withPresentationTime)
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import CoreVideo
import Foundation
@ -41,3 +43,5 @@ final class DisplayLink: NSObject {
status = CVDisplayLinkStop(displayLink)
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import AVFoundation
import GLUT
import OpenGL.GL3
@ -111,3 +113,5 @@ extension GLHKView: NetStreamDrawable {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import AVFoundation
open class HKView: NSView {
@ -60,3 +62,5 @@ extension HKView: NetStreamDrawable {
func draw(image: CIImage) {
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import AVFoundation
import MetalKit
@ -100,3 +102,5 @@ extension MTHKView: NetStreamDrawable {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import AVFoundation
import Foundation
@ -8,3 +10,5 @@ extension NetStream {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(macOS)
import AVFoundation
extension VideoIOComponent {
@ -17,3 +19,5 @@ extension VideoIOComponent {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(tvOS)
import CoreMedia
import Foundation
@ -11,3 +13,5 @@ protocol AVCaptureVideoDataOutputSampleBufferDelegate: class {
protocol AVCaptureAudioDataOutputSampleBufferDelegate: class {
func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
}
#endif

View File

@ -1,3 +1,5 @@
#if os(tvOS)
import AVFoundation
import GLKit
@ -65,3 +67,5 @@ extension GLHKView: NetStreamDrawable {
}
}
}
#endif

View File

@ -1,3 +1,5 @@
#if os(tvOS)
import AVFoundation
import MetalKit
@ -102,3 +104,5 @@ extension MTHKView: NetStreamDrawable {
}
}
}
#endif

View File

@ -3,6 +3,10 @@ import CoreFoundation
import CoreVideo
import VideoToolbox
#if os(iOS)
import UIKit
#endif
protocol VideoDecoderDelegate: class {
func sampleOutput(video sampleBuffer: CMSampleBuffer)
}

View File

@ -2,6 +2,10 @@ import AVFoundation
import CoreFoundation
import VideoToolbox
#if os(iOS)
import UIKit
#endif
protocol VideoEncoderDelegate: class {
func didSetFormatDescription(video formatDescription: CMFormatDescription?)
func sampleOutput(video sampleBuffer: CMSampleBuffer)

View File

@ -1,4 +1,5 @@
import Foundation
import CoreVideo
extension CVPixelBuffer {
var width: Int {

View File

@ -1,3 +1,5 @@
import Foundation
public enum FLVTagType: UInt8 {
case audio = 8
case video = 9

View File

@ -1,3 +1,5 @@
import Foundation
/**
- seealso: https://tools.ietf.org/html/draft-pantos-http-live-streaming-19
*/

View File

@ -2,6 +2,10 @@ import AVFoundation
import CoreMedia
import Foundation
#if canImport(SwiftPMSupport)
import SwiftPMSupport
#endif
/// MPEG-2 TS (Transport Stream) Writer delegate
public protocol TSWriterDelegate: class {
func didOutput(_ data: Data)

View File

@ -1,5 +1,9 @@
import AVFoundation
#if canImport(SwiftPMSupport)
import SwiftPMSupport
#endif
final class AudioIOComponent: IOComponent {
lazy var encoder = AudioConverter()
let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioIOComponent.lock")

View File

@ -1,4 +1,5 @@
import AVFoundation
import CoreImage
final class VideoIOComponent: IOComponent {
#if os(macOS)

View File

@ -1,4 +1,5 @@
import AVFoundation
import CoreImage
protocol NetStreamDrawable: class {
#if os(iOS) || os(macOS)

1
Sources/Platforms Symbolic link
View File

@ -0,0 +1 @@
../Platforms

View File

@ -0,0 +1,3 @@
#import "HaishinKit.h"
void HaishinKit_SwiftPMSupport_dummy_symbol() {}

View File

@ -0,0 +1,21 @@
#import <UIKit/UIKit.h>
static NSString *const __nonnull HaishinKitIdentifier = @"com.haishinkit.HaishinKit";
FOUNDATION_EXPORT double HaishinKitVersionNumber;
FOUNDATION_EXPORT const unsigned char HaishinKitVersionString[];
// @see http://stackoverflow.com/questions/35119531/catch-objective-c-exception-in-swift
NS_INLINE void nstry(void(^_Nonnull lambda)(void), void(^_Nullable error)(NSException *_Nonnull exception)) {
@try {
lambda();
}
@catch (NSException *exception) {
if (error != NULL) {
@try {
error(exception);
}@catch(NSException *exception) {
}
}
}
}

View File

@ -1,3 +1,7 @@
import Logboard
#if canImport(SwiftPMSupport)
import SwiftPMSupport
#endif
let logger = Logboard.with(HaishinKitIdentifier)

View File

@ -1,3 +1,5 @@
import Foundation
protocol DataConvertible {
var data: Data { get set }
}