HaishinKit.swift/Examples/iOS/LiveViewController.swift

299 lines
12 KiB
Swift

import AVFoundation
import HaishinKit
import Photos
import UIKit
import VideoToolbox
final class LiveViewController: UIViewController {
private static let maxRetryCount: Int = 5
@IBOutlet private weak var lfView: MTHKView!
@IBOutlet private weak var currentFPSLabel: UILabel!
@IBOutlet private weak var publishButton: UIButton!
@IBOutlet private weak var pauseButton: UIButton!
@IBOutlet private weak var videoBitrateLabel: UILabel!
@IBOutlet private weak var videoBitrateSlider: UISlider!
@IBOutlet private weak var audioBitrateLabel: UILabel!
@IBOutlet private weak var zoomSlider: UISlider!
@IBOutlet private weak var audioBitrateSlider: UISlider!
@IBOutlet private weak var fpsControl: UISegmentedControl!
@IBOutlet private weak var effectSegmentControl: UISegmentedControl!
private var pipIntentView = UIView()
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
private var sharedObject: RTMPSharedObject!
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
pipIntentView.layer.borderWidth = 1.0
pipIntentView.layer.borderColor = UIColor.white.cgColor
pipIntentView.bounds = MultiCamCaptureSettings.default.regionOfInterest
pipIntentView.isUserInteractionEnabled = true
view.addSubview(pipIntentView)
rtmpStream = RTMPStream(connection: rtmpConnection)
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.videoOrientation = orientation
}
rtmpStream.videoSettings.videoSize = .init(width: 720, height: 1280)
rtmpStream.mixer.recorder.delegate = self
videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
}
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.warn(error)
}
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)
rtmpStream.attachCamera(back) { error in
logger.warn(error)
}
if #available(iOS 13.0, *) {
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = true
rtmpStream.attachMultiCamera(front)
}
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
lfView?.attachStream(rtmpStream)
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
}
override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.attachAudio(nil)
rtmpStream.attachCamera(nil)
if #available(iOS 13.0, *) {
rtmpStream.attachMultiCamera(nil)
}
// swiftlint:disable notification_center_detachment
NotificationCenter.default.removeObserver(self)
}
// swiftlint:disable block_based_kvo
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(rtmpStream.currentFPS)"
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else {
return
}
if touch.view == pipIntentView {
let destLocation = touch.location(in: view)
let prevLocation = touch.previousLocation(in: view)
var currentFrame = pipIntentView.frame
let deltaX = destLocation.x - prevLocation.x
let deltaY = destLocation.y - prevLocation.y
currentFrame.origin.x += deltaX
currentFrame.origin.y += deltaY
pipIntentView.frame = currentFrame
rtmpStream.multiCamCaptureSettings = MultiCamCaptureSettings(
mode: rtmpStream.multiCamCaptureSettings.mode,
cornerRadius: 16.0,
regionOfInterest: currentFrame,
direction: .east
)
}
}
@IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.videoCapture(for: 0)?.isVideoMirrored = position == .front
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in
logger.warn(error)
}
if #available(iOS 13.0, *) {
rtmpStream.videoCapture(for: 1)?.isVideoMirrored = currentPosition == .front
rtmpStream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in
logger.warn(error)
}
}
currentPosition = position
}
@IBAction func toggleTorch(_ sender: UIButton) {
rtmpStream.torch.toggle()
}
@IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings.bitRate = Int(slider.value * 1000)
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings.bitRate = UInt32(slider.value * 1000)
}
if slider == zoomSlider {
let zoomFactor = CGFloat(slider.value)
guard let device = rtmpStream.videoCapture(for: 0)?.device, 1 <= zoomFactor && zoomFactor < device.activeFormat.videoMaxZoomFactor else {
return
}
do {
try device.lockForConfiguration()
device.ramp(toVideoZoomFactor: zoomFactor, withRate: 5.0)
device.unlockForConfiguration()
} catch let error as NSError {
logger.error("while locking device for ramp: \(error)")
}
}
}
@IBAction func on(pause: UIButton) {
rtmpStream.paused.toggle()
}
@IBAction func on(close: UIButton) {
self.dismiss(animated: true, completion: nil)
}
@IBAction func on(publish: UIButton) {
if publish.isSelected {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
publish.setTitle("", for: [])
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
publish.setTitle("", for: [])
}
publish.isSelected.toggle()
}
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
logger.info(code)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
retryCount = 0
rtmpStream.publish(Preference.defaultInstance.streamName!)
// sharedObject!.connect(rtmpConnection)
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
guard retryCount <= LiveViewController.maxRetryCount else {
return
}
Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount)))
rtmpConnection.connect(Preference.defaultInstance.uri!)
retryCount += 1
default:
break
}
}
@objc
private func rtmpErrorHandler(_ notification: Notification) {
logger.error(notification)
rtmpConnection.connect(Preference.defaultInstance.uri!)
}
func tapScreen(_ gesture: UIGestureRecognizer) {
if let gestureView = gesture.view, gesture.state == .ended {
let touchPoint: CGPoint = gesture.location(in: gestureView)
let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height)
guard
let device = rtmpStream.videoCapture(for: 0)?.device, device.isFocusPointOfInterestSupported else {
return
}
do {
try device.lockForConfiguration()
device.focusPointOfInterest = pointOfInterest
device.focusMode = .continuousAutoFocus
device.unlockForConfiguration()
} catch let error as NSError {
logger.error("while locking device for focusPointOfInterest: \(error)")
}
}
}
@IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
rtmpStream.frameRate = 15
case 1:
rtmpStream.frameRate = 30
case 2:
rtmpStream.frameRate = 60
default:
break
}
}
@IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = rtmpStream.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
default:
break
}
}
@objc
private func didInterruptionNotification(_ notification: Notification) {
logger.info(notification)
}
@objc
private func didRouteChangeNotification(_ notification: Notification) {
logger.info(notification)
}
@objc
private func on(_ notification: Notification) {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
rtmpStream.videoOrientation = orientation
}
}
extension LiveViewController: IORecorderDelegate {
// MARK: IORecorderDelegate
func recorder(_ recorder: IORecorder, errorOccured error: IORecorder.Error) {
logger.error(error)
}
func recorder(_ recorder: IORecorder, finishWriting writer: AVAssetWriter) {
PHPhotoLibrary.shared().performChanges({() -> Void in
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: writer.outputURL)
}, completionHandler: { _, error -> Void in
do {
try FileManager.default.removeItem(at: writer.outputURL)
} catch {
print(error)
}
})
}
}