This commit is contained in:
shogo4405 2017-02-02 00:21:20 +09:00
parent aed6102bd9
commit 865cdacead
7 changed files with 89 additions and 22 deletions

View File

@ -10,6 +10,7 @@ struct Preference {
}
final class LiveViewController: UIViewController {
/*
var rtmpConnection:RTMPConnection = RTMPConnection()
var rtmpStream:RTMPStream!
var sharedObject:RTMPSharedObject!
@ -293,4 +294,20 @@ final class LiveViewController: UIViewController {
currentFPSLabel.text = "\(rtmpStream.currentFPS)"
}
}
*/
override func viewDidLoad() {
super.viewDidLoad()
let nextButton = UIButton()
nextButton.setTitle("toLfViewController", for: UIControlState.normal)
nextButton.frame = CGRect(x: 130, y: 200, width: 180, height: 100)
nextButton.setTitleColor(UIColor.red, for: UIControlState.normal)
nextButton.addTarget(self, action: #selector(LiveViewController.toLfViewController), for: UIControlEvents.touchUpInside)
self.view.addSubview(nextButton)
}
dynamic private func toLfViewController() {
let publisherViewController = LfViewController()
self.present(publisherViewController, animated: true, completion: nil)
}
}

View File

@ -20,11 +20,19 @@ class LfViewController: UIViewController {
super.viewWillAppear(animated)
self.setCameraStream()
}
/*
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
rtmpStream.dispose()
}
*/
private func setCameraStream() {
rtmpConnection = RTMPConnection()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStream.syncOrientation = true
let sampleRate:Double = 48_000 // or 44_100
do {
try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
@ -66,11 +74,24 @@ class LfViewController: UIViewController {
closeButton.setTitle("Close", for: UIControlState.normal)
closeButton.setTitleColor(UIColor.red, for: UIControlState.normal)
closeButton.addTarget(self, action: #selector(LfViewController.close), for: UIControlEvents.touchUpInside)
let tButton = UIButton(frame: CGRect(x: 150, y: 500, width: 100, height: 100))
tButton.setTitle("T", for: UIControlState.normal)
tButton.setTitleColor(UIColor.red, for: UIControlState.normal)
tButton.addTarget(self, action: #selector(LfViewController.toggle), for: UIControlEvents.touchUpInside)
lfView.addSubview(tButton)
lfView.addSubview(closeButton)
view.addSubview(lfView)
}
var cameraPosition:AVCaptureDevicePosition = AVCaptureDevicePosition.back
dynamic private func toggle() {
cameraPosition = cameraPosition == .back ? .front : .back
rtmpStream.attachCamera(DeviceUtil.device(withPosition: cameraPosition))
}
dynamic private func close() {
self.dismiss(animated: true, completion: nil)
}

View File

@ -45,15 +45,27 @@ open class LFView: UIView {
super.init(coder: aDecoder)
}
deinit {
attachStream(nil)
print(self)
}
override open func awakeFromNib() {
backgroundColor = LFView.defaultBackgroundColor
layer.backgroundColor = LFView.defaultBackgroundColor.cgColor
}
open func attachStream(_ stream:NetStream?) {
layer.setValue(stream?.mixer.session, forKey: "session")
stream?.mixer.videoIO.drawable = self
currentStream = stream
guard let stream:NetStream = stream else {
layer.setValue(nil, forKey: "session")
currentStream = nil
return
}
stream.lockQueue.async {
self.layer.setValue(stream.mixer.session, forKey: "session")
stream.mixer.videoIO.drawable = self
self.currentStream = stream
}
}
}

View File

@ -45,20 +45,15 @@ final public class AVMixer: NSObject {
}
}
public var session:AVCaptureSession! = nil
public lazy var session:AVCaptureSession = {
var session = AVCaptureSession()
session.sessionPreset = AVMixer.defaultSessionPreset
return session
}()
public private(set) lazy var recorder:AVMixerRecorder = AVMixerRecorder()
override init() {
session = AVCaptureSession()
session.sessionPreset = AVMixer.defaultSessionPreset
}
deinit {
print(self)
if (session.isRunning) {
session.stopRunning()
}
dispose()
}
@ -71,8 +66,12 @@ final public class AVMixer: NSObject {
}()
public func dispose() {
if (session.isRunning) {
session.stopRunning()
}
audioIO.dispose()
videoIO.dispose()
print(session)
}
}
@ -95,6 +94,7 @@ extension AVMixer: Runnable {
guard !running else {
return
}
print("startRunning")
session.startRunning()
}

View File

@ -45,10 +45,12 @@ final class AudioIOComponent: IOComponent {
}
func attachAudio(_ audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool) {
mixer?.session.beginConfiguration()
output = nil
encoder.invalidate()
guard let audio:AVCaptureDevice = audio else {
input = nil
mixer?.session.commitConfiguration()
return
}
do {
@ -61,6 +63,7 @@ final class AudioIOComponent: IOComponent {
} catch let error as NSError {
logger.error("\(error)")
}
mixer?.session.commitConfiguration()
}
func dispose() {

View File

@ -231,9 +231,11 @@ final class VideoIOComponent: IOComponent {
}
func attachCamera(_ camera:AVCaptureDevice?) {
mixer?.session.beginConfiguration()
output = nil
guard let camera:AVCaptureDevice = camera else {
input = nil
mixer?.session.commitConfiguration()
return
}
#if os(iOS)
@ -268,19 +270,24 @@ final class VideoIOComponent: IOComponent {
} catch let error as NSError {
logger.error("\(error)")
}
mixer?.session.commitConfiguration()
}
#if os(OSX)
func attachScreen(_ screen:AVCaptureScreenInput?) {
mixer?.session.beginConfiguration()
output = nil
guard let _:AVCaptureScreenInput = screen else {
input = nil
return
}
input = screen
mixer.session.addOutput(output)
mixer?.session.addOutput(output)
output.setSampleBufferDelegate(self, queue: lockQueue)
mixer.session.startRunning()
mixer?.session.commitConfiguration()
if (mixer?.session.isRunning ?? false) {
mixer?.session.startRunning()
}
}
#else
func attachScreen(_ screen:ScreenCaptureSession?, useScreenSize:Bool = true) {

View File

@ -19,9 +19,7 @@ open class NetStream: NSObject {
public let lockQueue:DispatchQueue = DispatchQueue(label: "com.github.shogo4405.lf.NetStream.lock")
deinit {
#if os(iOS)
syncOrientation = false
#endif
NotificationCenter.default.removeObserver(self)
}
open var torch:Bool {
@ -53,8 +51,8 @@ open class NetStream: NSObject {
self.mixer.videoIO.orientation = newValue
}
}
}
}
open var syncOrientation:Bool = false {
didSet {
guard syncOrientation != oldValue else {
@ -130,14 +128,17 @@ open class NetStream: NSObject {
}
open func attachCamera(_ camera:AVCaptureDevice?) {
DispatchQueue.main.async {
self.mixer.videoIO.attachCamera(camera)
lockQueue.async {
logger.warning("attachCamera")
self.mixer.startRunning()
self.mixer.videoIO.attachCamera(camera)
logger.warning("attachCamera-End")
}
}
open func attachAudio(_ audio:AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession:Bool = true) {
lockQueue.async {
logger.warning("attachAudio")
self.mixer.audioIO.attachAudio(audio,
automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession
)
@ -185,6 +186,12 @@ open class NetStream: NSObject {
mixer.videoIO.exposurePointOfInterest = exposure
}
open func dispose() {
lockQueue.async {
self.mixer.dispose()
}
}
#if os(iOS)
@objc private func on(uiDeviceOrientationDidChange:Notification) {
var deviceOrientation:UIDeviceOrientation = .unknown