This commit is contained in:
shogo4405 2016-11-15 08:52:08 +09:00
parent a82dfc410c
commit a00c896453
3 changed files with 71 additions and 12 deletions

View File

@ -5,7 +5,7 @@ import AVFoundation
struct Preference {
static let defaultInstance:Preference = Preference()
var uri:String? = "rtmp://test:test@192.168.179.4/live"
var uri:String? = "rtmp://test:test@192.168.179.3/live"
var streamName:String? = "live"
}
@ -34,6 +34,15 @@ final class LiveViewController: UIViewController {
return button
}()
var pauseButton:UIButton = {
let button:UIButton = UIButton()
button.backgroundColor = UIColor.blue
button.setTitle("P", for: UIControlState())
button.layer.masksToBounds = true
return button
}()
var videoBitrateLabel:UILabel = {
let label:UILabel = UILabel()
label.textColor = UIColor.white
@ -124,7 +133,9 @@ final class LiveViewController: UIViewController {
"height": 720,
]
publishButton.addTarget(self, action: #selector(LiveViewController.onClickPublish(_:)), for: .touchUpInside)
publishButton.addTarget(self, action: #selector(LiveViewController.on(publish:)), for: .touchUpInside)
pauseButton.addTarget(self, action: #selector(LiveViewController.on(pause:)), for: .touchUpInside)
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(LiveViewController.tapScreen(_:)))
touchView.addGestureRecognizer(tapGesture)
@ -146,6 +157,7 @@ final class LiveViewController: UIViewController {
view.addSubview(fpsControl)
view.addSubview(currentFPSLabel)
view.addSubview(effectSegmentControl)
view.addSubview(pauseButton)
view.addSubview(publishButton)
}
@ -155,6 +167,7 @@ final class LiveViewController: UIViewController {
lfView.frame = view.bounds
fpsControl.frame = CGRect(x: view.bounds.width - 200 - 10 , y: navigationHeight + 40, width: 200, height: 30)
effectSegmentControl.frame = CGRect(x: view.bounds.width - 200 - 10 , y: navigationHeight, width: 200, height: 30)
pauseButton.frame = CGRect(x: view.bounds.width - 44 - 20, y: view.bounds.height - 44 * 2 - 20 * 2, width: 44, height: 44)
publishButton.frame = CGRect(x: view.bounds.width - 44 - 20, y: view.bounds.height - 44 - 20, width: 44, height: 44)
currentFPSLabel.frame = CGRect(x: 10, y: 10, width: 40, height: 40)
zoomSlider.frame = CGRect(x: 20, y: view.frame.height - 44 * 3 - 22, width: view.frame.width - 44 - 60, height: 44)
@ -199,19 +212,24 @@ final class LiveViewController: UIViewController {
}
}
func onClickPublish(_ sender:UIButton) {
if (sender.isSelected) {
func on(pause:UIButton) {
print("pause")
rtmpStream.togglePause()
}
func on(publish:UIButton) {
if (publish.isSelected) {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
sender.setTitle("", for: UIControlState())
publish.setTitle("", for: UIControlState())
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(LiveViewController.rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
sender.setTitle("", for: UIControlState())
publish.setTitle("", for: UIControlState())
}
sender.isSelected = !sender.isSelected
publish.isSelected = !publish.isSelected
}
func rtmpStatusHandler(_ notification:Notification) {

View File

@ -12,6 +12,7 @@ protocol VideoEncoderDelegate: class {
final class AVCEncoder: NSObject {
static let supportedSettingsKeys:[String] = [
"muted",
"width",
"height",
"bitrate",
@ -38,6 +39,8 @@ final class AVCEncoder: NSObject {
#endif
static let defaultDataRateLimits:[Int] = [0, 0]
var muted:Bool = false
var width:Int32 = AVCEncoder.defaultWidth {
didSet {
guard width != oldValue else {
@ -244,7 +247,7 @@ final class AVCEncoder: NSObject {
}
func encodeImageBuffer(_ imageBuffer:CVImageBuffer, presentationTimeStamp:CMTime, duration:CMTime) {
guard running else {
guard running && !muted else {
return
}
if (invalidateSession) {

View File

@ -257,6 +257,7 @@ open class RTMPStream: NetStream {
var videoTimestamp:Double = 0
fileprivate(set) var audioPlayback:RTMPAudioPlayback = RTMPAudioPlayback()
fileprivate var muxer:RTMPMuxer = RTMPMuxer()
fileprivate var paused:Bool = false
fileprivate var sampler:MP4Sampler? = nil
fileprivate var frameCount:UInt16 = 0
fileprivate var chunkTypes:[FLVTagType:Bool] = [:]
@ -310,9 +311,7 @@ open class RTMPStream: NetStream {
lockQueue.async {
guard let name:String = arguments.first as? String else {
switch self.readyState {
case .play:
fallthrough
case .playing:
case .play, .playing:
self.audioPlayback.stopRunning()
self.rtmpConnection.socket.doOutput(chunk: RTMPChunk(
type: .zero,
@ -479,7 +478,46 @@ open class RTMPStream: NetStream {
}
}
open func createMetaData() -> ASObject {
open func pause() {
lockQueue.async {
self.paused = true
switch self.readyState {
case .publish, .publishing:
self.mixer.audioIO.encoder.muted = true
self.mixer.videoIO.encoder.muted = true
default:
break
}
}
}
open func resume() {
lockQueue.async {
self.paused = false
switch self.readyState {
case .publish, .publishing:
self.mixer.audioIO.encoder.muted = false
self.mixer.videoIO.encoder.muted = false
default:
break
}
}
}
open func togglePause() {
lockQueue.async {
switch self.readyState {
case .publish, .publishing:
self.paused = !self.paused
self.mixer.audioIO.encoder.muted = self.paused
self.mixer.videoIO.encoder.muted = self.paused
default:
break
}
}
}
func createMetaData() -> ASObject {
var metadata:ASObject = [:]
if let _:AVCaptureInput = mixer.videoIO.input {
metadata["width"] = mixer.videoIO.encoder.width