如何通过相机视图与其他一些 UIView 一起使用具有相机视图的实时应用程序屏幕
how to live a live application screen having camera view with some other UIViews over Camera view
实际上,我想直播一场直播比赛,上面有一些叠加层,例如屏幕上角的赞助商图片和屏幕底部的记分卡。有人可以帮助我或指导我实现我使用这个 pod (haishinkit) 的方式,但这个 pod 没有达到目的。我使用 rtmpstream.attachScreen 函数来广播我的 UIView,但是除了这个记分卡和赞助商图像正在广播之外,这个函数没有获取我的相机视图 (AVCaptureVideoPreviewLayer)。我想连同记分卡一起广播我的相机屏幕,连同音频一起广播其他图像。
import UIKit
import HaishinKit
import AVFoundation
import VideoToolbox
import Loaf
import WebKit
class BroadcastViewController: UIViewController, RTMPStreamDelegate {
// Camera Preview View
@IBOutlet private weak var previewView: UIView!
@IBOutlet weak var videoView: UIView!
// Camera Selector
@IBOutlet weak var cameraSelector: UISegmentedControl!
@IBOutlet weak var webview: WKWebView!
// Go Live Button
@IBOutlet weak var startStopButton: UIButton!
// FPS and Bitrate Labels
@IBOutlet weak var fpsLabel: UILabel!
@IBOutlet weak var bitrateLabel: UILabel!
// RTMP Connection & RTMP Stream
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
// Default Camera
private var defaultCamera: AVCaptureDevice.Position = .back
// Flag indicates if we should be attempting to go live
private var liveDesired = false
// Reconnect attempt tracker
private var reconnectAttempt = 0
// The RTMP Stream key to broadcast to.
public var streamKey: String!
// The Preset to use
public var preset: Preset!
// A tracker of the last time we changed the bitrate in ABR
private var lastBwChange = 0
// The RTMP endpoint
let rtmpEndpoint = "rtmps://live-api-s.facebook.com:443/rtmp/"
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var isPublic = false
// Some basic presets for live streaming
enum Preset {
case hd_1080p_30fps_5mbps
case hd_720p_30fps_3mbps
case sd_540p_30fps_2mbps
case sd_360p_30fps_1mbps
}
// An encoding profile - width, height, framerate, video bitrate
private class Profile {
public var width : Int = 0
public var height : Int = 0
public var frameRate : Int = 0
public var bitrate : Int = 0
init(width: Int, height: Int, frameRate: Int, bitrate: Int) {
self.width = width
self.height = height
self.frameRate = frameRate
self.bitrate = bitrate
}
}
// Converts a Preset to a Profile
private func presetToProfile(preset: Preset) -> Profile {
switch preset {
case .hd_1080p_30fps_5mbps:
return Profile(width: 1920, height: 1080, frameRate: 30, bitrate: 5000000)
case .hd_720p_30fps_3mbps:
return Profile(width: 1280, height: 720, frameRate: 30, bitrate: 3000000)
case .sd_540p_30fps_2mbps:
return Profile(width: 960, height: 540, frameRate: 30, bitrate: 2000000)
case .sd_360p_30fps_1mbps:
return Profile(width: 640, height: 360, frameRate: 30, bitrate: 1000000)
}
}
// Configures the live stream
private func configureStream(preset: Preset) {
let profile = presetToProfile(preset: preset)
// Configure the capture settings from the camera
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1920x1080,
.continuousAutofocus: true,
.continuousExposure: true,
.fps: profile.frameRate
]
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
// let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = .landscapeRight
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height,
.bitrate: profile.bitrate,
.profileLevel: kVTProfileLevel_H264_Main_AutoLevel,
.maxKeyFrameIntervalDuration: 2, // 2 seconds
]
}
} else {
// Fallback on earlier versions
}
// Configure the RTMP audio stream
// rtmpStream.audioSettings = [
// .bitrate: 128000 // Always use 128kbps
// ]
}
// Publishes the live stream
private func publishStream() {
print("Calling publish()")
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: previewView))
rtmpStream.publish("minestreamkey")
DispatchQueue.main.async {
self.startStopButton.setTitle("Stop Streaming!", for: .normal)
}
}
// Triggers and attempt to connect to an RTMP hostname
private func connectRTMP() {
print("Calling connect()")
rtmpConnection.connect(rtmpEndpoint)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// videoView.startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
self.setupAVCapture()
previewView.bringSubviewToFront(webview)
webview.load(NSURLRequest(url: NSURL(string: "https://graphics.crickslab.com/scorecard/0865e840-f147-11eb-95cb-65228ef0512c/Blitzz-vs-Crickslab-Officials-Fri30Jul2021-1201AM-")! as URL) as URLRequest)
print("Broadcast View Controller Init")
print("Stream Key: " + "FB-3940543509404805-0-AbxeU6r48NpFcasH")
// Work out the orientation of the device, and set this on the RTMP Stream
rtmpStream = RTMPStream(connection: rtmpConnection)
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
}
} else {
// Fallback on earlier versions
}
// And a listener for orientation changes
// Note: Changing the orientation once the stream has been started will not change the orientation of the live stream, only the preview.
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
// Configure the encoder profile
configureStream(preset: self.preset)
// Attatch to the default audio device
// rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error.description)
// }
//
// // Attatch to the default camera
// rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera)) { error in
// print(error.description)
// }
// Register a tap gesture recogniser so we can use tap to focus
let tap = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
previewView.addGestureRecognizer(tap)
previewView.isUserInteractionEnabled = true
// Attatch the preview view
// previewView?.attachStream(rtmpStream)
// Add event listeners for RTMP status changes and IO Errors
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpStream.delegate = self
startStopButton.setTitle("Go Live!", for: .normal)
}
// Tap to focus / exposure
@objc func handleTap(_ sender: UITapGestureRecognizer) {
if sender.state == UIGestureRecognizer.State.ended {
let point = sender.location(in: previewView)
let pointOfInterest = CGPoint(x: point.x / previewView.bounds.size.width, y: point.y / previewView.bounds.size.height)
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
// Triggered when the user tries to change camera
@IBAction func changeCameraToggle(_ sender: UISegmentedControl) {
switch cameraSelector.selectedSegmentIndex
{
case 0:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.back))
case 1:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.front))
default:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera))
}
}
// Triggered when the user taps the go live button
@IBAction func goLiveButton(_ sender: UIButton) {
print("Go Live Button tapped!")
if !liveDesired {
if rtmpConnection.connected {
// If we're already connected to the RTMP server, wr can just call publish() to start the stream
publishStream()
} else {
// Otherwise, we need to setup the RTMP connection and wait for a callback before we can safely
// call publish() to start the stream
connectRTMP()
}
// Modify application state to streaming
liveDesired = true
startStopButton.setTitle("Connecting...", for: .normal)
} else {
// Unpublish the live stream
rtmpStream.close()
// Modify application state to idle
liveDesired = false
startStopButton.setTitle("Go Live!", for: .normal)
}
}
// Called when the RTMPStream or RTMPConnection changes status
@objc
private func rtmpStatusHandler(_ notification: Notification) {
print("RTMP Status Handler called.")
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
// Send a nicely styled notification about the RTMP Status
var loafStyle = Loaf.State.info
switch code {
case RTMPConnection.Code.connectSuccess.rawValue, RTMPStream.Code.publishStart.rawValue, RTMPStream.Code.unpublishSuccess.rawValue:
loafStyle = Loaf.State.success
case RTMPConnection.Code.connectFailed.rawValue:
loafStyle = Loaf.State.error
case RTMPConnection.Code.connectClosed.rawValue:
loafStyle = Loaf.State.warning
default:
break
}
DispatchQueue.main.async {
Loaf("RTMP Status: " + code, state: loafStyle, location: .top, sender: self).show(.short)
}
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
reconnectAttempt = 0
if liveDesired {
// Publish our stream to our stream key
publishStream()
}
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
print("RTMP Connection was not successful.")
// Retry the connection if "live" is still the desired state
if liveDesired {
reconnectAttempt += 1
DispatchQueue.main.async {
self.startStopButton.setTitle("Reconnect attempt " + String(self.reconnectAttempt) + " (Cancel)" , for: .normal)
}
// Retries the RTMP connection every 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.connectRTMP()
}
}
default:
break
}
}
// Called when there's an RTMP Error
@objc
private func rtmpErrorHandler(_ notification: Notification) {
print("RTMP Error Handler called.")
}
// Called when the device changes rotation
@objc
private func on(_ notification: Notification) {
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
// Do not change the outpur rotation if the stream has already started.
if liveDesired == false {
let profile = presetToProfile(preset: self.preset)
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height
]
}
}
} else {
// Fallback on earlier versions
}
}
// Button tapped to return to the configuration screen
@IBAction func closeButton(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
// RTMPStreamDelegate callbacks
func rtmpStreamDidClear(_ stream: RTMPStream) {
}
// Statistics callback
func rtmpStream(_ stream: RTMPStream, didStatics connection: RTMPConnection) {
DispatchQueue.main.async {
self.fpsLabel.text = String(stream.currentFPS) + " fps"
self.bitrateLabel.text = String((connection.currentBytesOutPerSecond / 125)) + " kbps"
}
}
// Insufficient bandwidth callback
func rtmpStream(_ stream: RTMPStream, didPublishInsufficientBW connection: RTMPConnection) {
print("ABR: didPublishInsufficientBW")
// If we last changed bandwidth over 10 seconds ago
if (Int(NSDate().timeIntervalSince1970) - lastBwChange) > 5 {
print("ABR: Will try to change bitrate")
// Reduce bitrate by 30% every 10 seconds
let b = Double(stream.videoSettings[.bitrate] as! UInt32) * Double(0.7)
print("ABR: Proposed bandwidth: " + String(b))
stream.videoSettings[.bitrate] = b
lastBwChange = Int(NSDate().timeIntervalSince1970)
DispatchQueue.main.async {
Loaf("Insuffient Bandwidth, changing video bandwidth to: " + String(b), state: Loaf.State.warning, location: .top, sender: self).show(.short)
}
} else {
print("ABR: Still giving grace time for last bandwidth change")
}
}
// Today this example doesn't attempt to increase bandwidth to find a sweet spot.
// An implementation might be to gently increase bandwidth by a few percent, but that's hard without getting into an aggressive cycle.
func rtmpStream(_ stream: RTMPStream, didPublishSufficientBW connection: RTMPConnection) {
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension BroadcastViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.back) else {
return
}
captureDevice = device
beginSession()
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: .video)?.isEnabled = true
previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
// let rootLayer :CALayer = self.previewView.layer
self.videoView.layer.masksToBounds=true
previewLayer.frame = videoView.bounds
videoView.layer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// do stuff here
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [
.width: dimensions.width,
.height: dimensions.height ,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
}
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
我找到了一种通过创建 2 个 RTMPStream 对象(一个用于附加相机,第二个用于附加屏幕)来实时流式传输带有叠加层的相机视图的方法。以下是代码。
import AVFoundation
import HaishinKit
import Photos
import UIKit
import VideoToolbox
import WebKit
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
static let `default` = ExampleRecorderDelegate()
override func didFinishWriting(_ recorder: AVRecorder) {
guard let writer: AVAssetWriter = recorder.writer else {
return
}
PHPhotoLibrary.shared().performChanges({() -> Void in
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: writer.outputURL)
}, completionHandler: { _, error -> Void in
do {
try FileManager.default.removeItem(at: writer.outputURL)
} catch {
print(error)
}
})
}
}
final class LiveViewController: UIViewController {
private static let maxRetryCount: Int = 5
@IBOutlet private weak var lfView: MTHKView!
@IBOutlet private weak var currentFPSLabel: UILabel!
@IBOutlet private weak var publishButton: UIButton!
@IBOutlet private weak var pauseButton: UIButton!
@IBOutlet private weak var videoBitrateLabel: UILabel!
@IBOutlet private weak var videoBitrateSlider: UISlider!
@IBOutlet private weak var audioBitrateLabel: UILabel!
@IBOutlet private weak var zoomSlider: UISlider!
@IBOutlet private weak var audioBitrateSlider: UISlider!
@IBOutlet private weak var fpsControl: UISegmentedControl!
@IBOutlet private weak var effectSegmentControl: UISegmentedControl!
@IBOutlet weak var webview: WKWebView!
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
private var rtmpStreamLayer: RTMPStream!
private var sharedObject: RTMPSharedObject!
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStreamLayer = RTMPStream(connection: rtmpConnection)
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.orientation = orientation
}
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStreamLayer.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
rtmpStreamLayer.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didEnterBackground(_:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didBecomeActive(_:)), name: UIApplication.didBecomeActiveNotification, object: nil)
}
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.warn(error.description)
}
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStream.attachCamera(DeviceUtil.device(withPosition: currentPosition)) { error in
logger.warn(error.description)
}
rtmpStreamLayer.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStreamLayer.receiveAudio = false
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
lfView?.attachStream(rtmpStream)
lfView?.attachStream(rtmpStreamLayer)
}
override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.dispose()
}
@IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.captureSettings[.isVideoMirrored] = position == .front
rtmpStream.attachCamera(DeviceUtil.device(withPosition: position)) { error in
logger.warn(error.description)
}
currentPosition = position
}
@IBAction func toggleTorch(_ sender: UIButton) {
rtmpStream.torch.toggle()
}
@IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
}
if slider == zoomSlider {
rtmpStream.setZoomFactor(CGFloat(slider.value), ramping: true, withRate: 5.0)
}
}
@IBAction func on(pause: UIButton) {
rtmpStream.paused.toggle()
}
@IBAction func on(close: UIButton) {
self.dismiss(animated: true, completion: nil)
}
@IBAction func on(publish: UIButton) {
if publish.isSelected {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
publish.setTitle("●", for: [])
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
publish.setTitle("■", for: [])
}
publish.isSelected.toggle()
}
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
logger.info(code)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
retryCount = 0
rtmpStream!.publish("yourstreamkey")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2)
{
self.rtmpStreamLayer!.publish("yourstreamkey")
}
// sharedObject!.connect(rtmpConnection)
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
guard retryCount <= LiveViewController.maxRetryCount else {
return
}
Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount)))
rtmpConnection.connect(Preference.defaultInstance.uri!)
retryCount += 1
default:
break
}
}
@objc
private func rtmpErrorHandler(_ notification: Notification) {
logger.error(notification)
rtmpConnection.connect(Preference.defaultInstance.uri!)
}
func tapScreen(_ gesture: UIGestureRecognizer) {
if let gestureView = gesture.view, gesture.state == .ended {
let touchPoint: CGPoint = gesture.location(in: gestureView)
let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height)
print("pointOfInterest: \(pointOfInterest)")
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
@IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
rtmpStream.captureSettings[.fps] = 15.0
case 1:
rtmpStream.captureSettings[.fps] = 30.0
case 2:
rtmpStream.captureSettings[.fps] = 60.0
default:
break
}
}
@IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = rtmpStream.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
default:
break
}
}
@objc
private func on(_ notification: Notification) {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
rtmpStream.orientation = orientation
}
@objc
private func didEnterBackground(_ notification: Notification) {
// rtmpStream.receiveVideo = false
}
@objc
private func didBecomeActive(_ notification: Notification) {
// rtmpStream.receiveVideo = true
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(rtmpStream.currentFPS)"
}
}
}
extension LiveViewController : UIWebViewDelegate
{
func webViewDidFinishLoad(_ webView: UIWebView) {
webview.scrollView.zoomScale = 10
}
}
实际上,我想直播一场直播比赛,上面有一些叠加层,例如屏幕上角的赞助商图片和屏幕底部的记分卡。有人可以帮助我或指导我实现我使用这个 pod (haishinkit) 的方式,但这个 pod 没有达到目的。我使用 rtmpstream.attachScreen 函数来广播我的 UIView,但是除了这个记分卡和赞助商图像正在广播之外,这个函数没有获取我的相机视图 (AVCaptureVideoPreviewLayer)。我想连同记分卡一起广播我的相机屏幕,连同音频一起广播其他图像。
import UIKit
import HaishinKit
import AVFoundation
import VideoToolbox
import Loaf
import WebKit
class BroadcastViewController: UIViewController, RTMPStreamDelegate {
// Camera Preview View
@IBOutlet private weak var previewView: UIView!
@IBOutlet weak var videoView: UIView!
// Camera Selector
@IBOutlet weak var cameraSelector: UISegmentedControl!
@IBOutlet weak var webview: WKWebView!
// Go Live Button
@IBOutlet weak var startStopButton: UIButton!
// FPS and Bitrate Labels
@IBOutlet weak var fpsLabel: UILabel!
@IBOutlet weak var bitrateLabel: UILabel!
// RTMP Connection & RTMP Stream
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
// Default Camera
private var defaultCamera: AVCaptureDevice.Position = .back
// Flag indicates if we should be attempting to go live
private var liveDesired = false
// Reconnect attempt tracker
private var reconnectAttempt = 0
// The RTMP Stream key to broadcast to.
public var streamKey: String!
// The Preset to use
public var preset: Preset!
// A tracker of the last time we changed the bitrate in ABR
private var lastBwChange = 0
// The RTMP endpoint
let rtmpEndpoint = "rtmps://live-api-s.facebook.com:443/rtmp/"
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var isPublic = false
// Some basic presets for live streaming
enum Preset {
case hd_1080p_30fps_5mbps
case hd_720p_30fps_3mbps
case sd_540p_30fps_2mbps
case sd_360p_30fps_1mbps
}
// An encoding profile - width, height, framerate, video bitrate
private class Profile {
public var width : Int = 0
public var height : Int = 0
public var frameRate : Int = 0
public var bitrate : Int = 0
init(width: Int, height: Int, frameRate: Int, bitrate: Int) {
self.width = width
self.height = height
self.frameRate = frameRate
self.bitrate = bitrate
}
}
// Converts a Preset to a Profile
private func presetToProfile(preset: Preset) -> Profile {
switch preset {
case .hd_1080p_30fps_5mbps:
return Profile(width: 1920, height: 1080, frameRate: 30, bitrate: 5000000)
case .hd_720p_30fps_3mbps:
return Profile(width: 1280, height: 720, frameRate: 30, bitrate: 3000000)
case .sd_540p_30fps_2mbps:
return Profile(width: 960, height: 540, frameRate: 30, bitrate: 2000000)
case .sd_360p_30fps_1mbps:
return Profile(width: 640, height: 360, frameRate: 30, bitrate: 1000000)
}
}
// Configures the live stream
private func configureStream(preset: Preset) {
let profile = presetToProfile(preset: preset)
// Configure the capture settings from the camera
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1920x1080,
.continuousAutofocus: true,
.continuousExposure: true,
.fps: profile.frameRate
]
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
// let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = .landscapeRight
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height,
.bitrate: profile.bitrate,
.profileLevel: kVTProfileLevel_H264_Main_AutoLevel,
.maxKeyFrameIntervalDuration: 2, // 2 seconds
]
}
} else {
// Fallback on earlier versions
}
// Configure the RTMP audio stream
// rtmpStream.audioSettings = [
// .bitrate: 128000 // Always use 128kbps
// ]
}
// Publishes the live stream
private func publishStream() {
print("Calling publish()")
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: previewView))
rtmpStream.publish("minestreamkey")
DispatchQueue.main.async {
self.startStopButton.setTitle("Stop Streaming!", for: .normal)
}
}
// Triggers and attempt to connect to an RTMP hostname
private func connectRTMP() {
print("Calling connect()")
rtmpConnection.connect(rtmpEndpoint)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// videoView.startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
self.setupAVCapture()
previewView.bringSubviewToFront(webview)
webview.load(NSURLRequest(url: NSURL(string: "https://graphics.crickslab.com/scorecard/0865e840-f147-11eb-95cb-65228ef0512c/Blitzz-vs-Crickslab-Officials-Fri30Jul2021-1201AM-")! as URL) as URLRequest)
print("Broadcast View Controller Init")
print("Stream Key: " + "FB-3940543509404805-0-AbxeU6r48NpFcasH")
// Work out the orientation of the device, and set this on the RTMP Stream
rtmpStream = RTMPStream(connection: rtmpConnection)
// Get the orientation of the app, and set the video orientation appropriately
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
}
} else {
// Fallback on earlier versions
}
// And a listener for orientation changes
// Note: Changing the orientation once the stream has been started will not change the orientation of the live stream, only the preview.
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
// Configure the encoder profile
configureStream(preset: self.preset)
// Attatch to the default audio device
// rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error.description)
// }
//
// // Attatch to the default camera
// rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera)) { error in
// print(error.description)
// }
// Register a tap gesture recogniser so we can use tap to focus
let tap = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
previewView.addGestureRecognizer(tap)
previewView.isUserInteractionEnabled = true
// Attatch the preview view
// previewView?.attachStream(rtmpStream)
// Add event listeners for RTMP status changes and IO Errors
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpStream.delegate = self
startStopButton.setTitle("Go Live!", for: .normal)
}
// Tap to focus / exposure
@objc func handleTap(_ sender: UITapGestureRecognizer) {
if sender.state == UIGestureRecognizer.State.ended {
let point = sender.location(in: previewView)
let pointOfInterest = CGPoint(x: point.x / previewView.bounds.size.width, y: point.y / previewView.bounds.size.height)
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
// Triggered when the user tries to change camera
@IBAction func changeCameraToggle(_ sender: UISegmentedControl) {
switch cameraSelector.selectedSegmentIndex
{
case 0:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.back))
case 1:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: AVCaptureDevice.Position.front))
default:
rtmpStream.attachCamera(DeviceUtil.device(withPosition: defaultCamera))
}
}
// Triggered when the user taps the go live button
@IBAction func goLiveButton(_ sender: UIButton) {
print("Go Live Button tapped!")
if !liveDesired {
if rtmpConnection.connected {
// If we're already connected to the RTMP server, wr can just call publish() to start the stream
publishStream()
} else {
// Otherwise, we need to setup the RTMP connection and wait for a callback before we can safely
// call publish() to start the stream
connectRTMP()
}
// Modify application state to streaming
liveDesired = true
startStopButton.setTitle("Connecting...", for: .normal)
} else {
// Unpublish the live stream
rtmpStream.close()
// Modify application state to idle
liveDesired = false
startStopButton.setTitle("Go Live!", for: .normal)
}
}
// Called when the RTMPStream or RTMPConnection changes status
@objc
private func rtmpStatusHandler(_ notification: Notification) {
print("RTMP Status Handler called.")
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
// Send a nicely styled notification about the RTMP Status
var loafStyle = Loaf.State.info
switch code {
case RTMPConnection.Code.connectSuccess.rawValue, RTMPStream.Code.publishStart.rawValue, RTMPStream.Code.unpublishSuccess.rawValue:
loafStyle = Loaf.State.success
case RTMPConnection.Code.connectFailed.rawValue:
loafStyle = Loaf.State.error
case RTMPConnection.Code.connectClosed.rawValue:
loafStyle = Loaf.State.warning
default:
break
}
DispatchQueue.main.async {
Loaf("RTMP Status: " + code, state: loafStyle, location: .top, sender: self).show(.short)
}
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
reconnectAttempt = 0
if liveDesired {
// Publish our stream to our stream key
publishStream()
}
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
print("RTMP Connection was not successful.")
// Retry the connection if "live" is still the desired state
if liveDesired {
reconnectAttempt += 1
DispatchQueue.main.async {
self.startStopButton.setTitle("Reconnect attempt " + String(self.reconnectAttempt) + " (Cancel)" , for: .normal)
}
// Retries the RTMP connection every 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.connectRTMP()
}
}
default:
break
}
}
// Called when there's an RTMP Error
@objc
private func rtmpErrorHandler(_ notification: Notification) {
print("RTMP Error Handler called.")
}
// Called when the device changes rotation
@objc
private func on(_ notification: Notification) {
if #available(iOS 13.0, *) {
if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation {
let videoOrientation = DeviceUtil.videoOrientation(by: orientation)
rtmpStream.orientation = videoOrientation!
// Do not change the outpur rotation if the stream has already started.
if liveDesired == false {
let profile = presetToProfile(preset: self.preset)
rtmpStream.videoSettings = [
.width: (orientation.isPortrait) ? profile.height : profile.width,
.height: (orientation.isPortrait) ? profile.width : profile.height
]
}
}
} else {
// Fallback on earlier versions
}
}
// Button tapped to return to the configuration screen
@IBAction func closeButton(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
// RTMPStreamDelegate callbacks
func rtmpStreamDidClear(_ stream: RTMPStream) {
}
// Statistics callback
func rtmpStream(_ stream: RTMPStream, didStatics connection: RTMPConnection) {
DispatchQueue.main.async {
self.fpsLabel.text = String(stream.currentFPS) + " fps"
self.bitrateLabel.text = String((connection.currentBytesOutPerSecond / 125)) + " kbps"
}
}
// Insufficient bandwidth callback
func rtmpStream(_ stream: RTMPStream, didPublishInsufficientBW connection: RTMPConnection) {
print("ABR: didPublishInsufficientBW")
// If we last changed bandwidth over 10 seconds ago
if (Int(NSDate().timeIntervalSince1970) - lastBwChange) > 5 {
print("ABR: Will try to change bitrate")
// Reduce bitrate by 30% every 10 seconds
let b = Double(stream.videoSettings[.bitrate] as! UInt32) * Double(0.7)
print("ABR: Proposed bandwidth: " + String(b))
stream.videoSettings[.bitrate] = b
lastBwChange = Int(NSDate().timeIntervalSince1970)
DispatchQueue.main.async {
Loaf("Insuffient Bandwidth, changing video bandwidth to: " + String(b), state: Loaf.State.warning, location: .top, sender: self).show(.short)
}
} else {
print("ABR: Still giving grace time for last bandwidth change")
}
}
// Today this example doesn't attempt to increase bandwidth to find a sweet spot.
// An implementation might be to gently increase bandwidth by a few percent, but that's hard without getting into an aggressive cycle.
func rtmpStream(_ stream: RTMPStream, didPublishSufficientBW connection: RTMPConnection) {
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension BroadcastViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSession.Preset.vga640x480
guard let device = AVCaptureDevice
.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
for: .video,
position: AVCaptureDevice.Position.back) else {
return
}
captureDevice = device
beginSession()
}
func beginSession(){
var deviceInput: AVCaptureDeviceInput!
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
guard deviceInput != nil else {
print("error: cant get deviceInput")
return
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.alwaysDiscardsLateVideoFrames=true
videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connection(with: .video)?.isEnabled = true
previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
// let rootLayer :CALayer = self.previewView.layer
self.videoView.layer.masksToBounds=true
previewLayer.frame = videoView.bounds
videoView.layer.addSublayer(self.previewLayer)
session.startRunning()
} catch let error as NSError {
deviceInput = nil
print("error: \(error.localizedDescription)")
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// do stuff here
if let description = CMSampleBufferGetFormatDescription(sampleBuffer) {
let dimensions = CMVideoFormatDescriptionGetDimensions(description)
rtmpStream.videoSettings = [
.width: dimensions.width,
.height: dimensions.height ,
.profileLevel: kVTProfileLevel_H264_Baseline_AutoLevel
]
}
rtmpStream.appendSampleBuffer(sampleBuffer, withType: .video)
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
我找到了一种通过创建 2 个 RTMPStream 对象(一个用于附加相机,第二个用于附加屏幕)来实时流式传输带有叠加层的相机视图的方法。以下是代码。
import AVFoundation
import HaishinKit
import Photos
import UIKit
import VideoToolbox
import WebKit
final class ExampleRecorderDelegate: DefaultAVRecorderDelegate {
static let `default` = ExampleRecorderDelegate()
override func didFinishWriting(_ recorder: AVRecorder) {
guard let writer: AVAssetWriter = recorder.writer else {
return
}
PHPhotoLibrary.shared().performChanges({() -> Void in
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: writer.outputURL)
}, completionHandler: { _, error -> Void in
do {
try FileManager.default.removeItem(at: writer.outputURL)
} catch {
print(error)
}
})
}
}
final class LiveViewController: UIViewController {
private static let maxRetryCount: Int = 5
@IBOutlet private weak var lfView: MTHKView!
@IBOutlet private weak var currentFPSLabel: UILabel!
@IBOutlet private weak var publishButton: UIButton!
@IBOutlet private weak var pauseButton: UIButton!
@IBOutlet private weak var videoBitrateLabel: UILabel!
@IBOutlet private weak var videoBitrateSlider: UISlider!
@IBOutlet private weak var audioBitrateLabel: UILabel!
@IBOutlet private weak var zoomSlider: UISlider!
@IBOutlet private weak var audioBitrateSlider: UISlider!
@IBOutlet private weak var fpsControl: UISegmentedControl!
@IBOutlet private weak var effectSegmentControl: UISegmentedControl!
@IBOutlet weak var webview: WKWebView!
private var rtmpConnection = RTMPConnection()
private var rtmpStream: RTMPStream!
private var rtmpStreamLayer: RTMPStream!
private var sharedObject: RTMPSharedObject!
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStreamLayer = RTMPStream(connection: rtmpConnection)
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
rtmpStream.orientation = orientation
}
rtmpStream.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStreamLayer.captureSettings = [
.sessionPreset: AVCaptureSession.Preset.hd1280x720,
.continuousAutofocus: true,
.continuousExposure: true
// .preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode.auto
]
rtmpStream.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
rtmpStreamLayer.videoSettings = [
.width: 720,
.height: 1280
]
rtmpStream.mixer.recorder.delegate = ExampleRecorderDelegate.shared
videoBitrateSlider?.value = Float(RTMPStream.defaultVideoBitrate) / 1000
audioBitrateSlider?.value = Float(RTMPStream.defaultAudioBitrate) / 1000
NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didEnterBackground(_:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didBecomeActive(_:)), name: UIApplication.didBecomeActiveNotification, object: nil)
}
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.warn(error.description)
}
rtmpStream.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStream.attachCamera(DeviceUtil.device(withPosition: currentPosition)) { error in
logger.warn(error.description)
}
rtmpStreamLayer.attachScreen(ScreenCaptureSession(viewToCapture: view))
rtmpStreamLayer.receiveAudio = false
rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
lfView?.attachStream(rtmpStream)
lfView?.attachStream(rtmpStreamLayer)
}
override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.dispose()
}
@IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
rtmpStream.captureSettings[.isVideoMirrored] = position == .front
rtmpStream.attachCamera(DeviceUtil.device(withPosition: position)) { error in
logger.warn(error.description)
}
currentPosition = position
}
@IBAction func toggleTorch(_ sender: UIButton) {
rtmpStream.torch.toggle()
}
@IBAction func on(slider: UISlider) {
if slider == audioBitrateSlider {
audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps"
rtmpStream.audioSettings[.bitrate] = slider.value * 1000
}
if slider == videoBitrateSlider {
videoBitrateLabel?.text = "video \(Int(slider.value))/kbps"
rtmpStream.videoSettings[.bitrate] = slider.value * 1000
}
if slider == zoomSlider {
rtmpStream.setZoomFactor(CGFloat(slider.value), ramping: true, withRate: 5.0)
}
}
@IBAction func on(pause: UIButton) {
rtmpStream.paused.toggle()
}
@IBAction func on(close: UIButton) {
self.dismiss(animated: true, completion: nil)
}
@IBAction func on(publish: UIButton) {
if publish.isSelected {
UIApplication.shared.isIdleTimerDisabled = false
rtmpConnection.close()
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
publish.setTitle("●", for: [])
} else {
UIApplication.shared.isIdleTimerDisabled = true
rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.connect(Preference.defaultInstance.uri!)
publish.setTitle("■", for: [])
}
publish.isSelected.toggle()
}
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
return
}
logger.info(code)
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
retryCount = 0
rtmpStream!.publish("yourstreamkey")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2)
{
self.rtmpStreamLayer!.publish("yourstreamkey")
}
// sharedObject!.connect(rtmpConnection)
case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue:
guard retryCount <= LiveViewController.maxRetryCount else {
return
}
Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount)))
rtmpConnection.connect(Preference.defaultInstance.uri!)
retryCount += 1
default:
break
}
}
@objc
private func rtmpErrorHandler(_ notification: Notification) {
logger.error(notification)
rtmpConnection.connect(Preference.defaultInstance.uri!)
}
func tapScreen(_ gesture: UIGestureRecognizer) {
if let gestureView = gesture.view, gesture.state == .ended {
let touchPoint: CGPoint = gesture.location(in: gestureView)
let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height)
print("pointOfInterest: \(pointOfInterest)")
rtmpStream.setPointOfInterest(pointOfInterest, exposure: pointOfInterest)
}
}
@IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) {
switch segment.selectedSegmentIndex {
case 0:
rtmpStream.captureSettings[.fps] = 15.0
case 1:
rtmpStream.captureSettings[.fps] = 30.0
case 2:
rtmpStream.captureSettings[.fps] = 60.0
default:
break
}
}
@IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) {
if let currentEffect: VideoEffect = currentEffect {
_ = rtmpStream.unregisterVideoEffect(currentEffect)
}
switch segment.selectedSegmentIndex {
case 1:
currentEffect = MonochromeEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
case 2:
currentEffect = PronamaEffect()
_ = rtmpStream.registerVideoEffect(currentEffect!)
default:
break
}
}
@objc
private func on(_ notification: Notification) {
guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else {
return
}
rtmpStream.orientation = orientation
}
@objc
private func didEnterBackground(_ notification: Notification) {
// rtmpStream.receiveVideo = false
}
@objc
private func didBecomeActive(_ notification: Notification) {
// rtmpStream.receiveVideo = true
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(rtmpStream.currentFPS)"
}
}
}
extension LiveViewController : UIWebViewDelegate
{
func webViewDidFinishLoad(_ webView: UIWebView) {
webview.scrollView.zoomScale = 10
}
}