ReplayKit 保存视频失败,第一次尝试使用麦克风
ReplayKit saving video fails first try with mic
场景 1:
- 使用音频/视频编写器输入启动 AVAssetWriter。
- 使用 RPScreenRecorder 在没有麦克风的情况下开始录音并处理样本缓冲区。
- 第一次尝试时,文件可以很好地写入照片。
场景 2:
- 使用音频/视频编写器输入启动 AVAssetWriter。
- 使用 RPScreenRecorder 在启用 麦克风 的情况下开始录制并处理示例缓冲区。
第一次写入文件失败
UserInfo={NSLocalizedRecoverySuggestion=再次尝试保存。, NSLocalizedDescription=无法保存, NSUnderlyingError=0x1c464f3c0 {Error Domain=NSOSStatusErrorDomain Code=-12412 "(null)"}}
2017-10-26 23:25:16.896673-0400 [2135:771655] 状态失败!:3 错误域=AVFoundationErrorDomain 代码=-11823 "Cannot Save"
第二次尝试效果很好。
我做错了什么?
添加了重试逻辑来规避该问题。不是最好的解决方案,但它有效。
[self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
if(CMSampleBufferDataIsReady(sampleBuffer) == false || self.assetWriter == nil)
{
return;
}
if (self.assetWriter.status == AVAssetWriterStatusFailed) {
NSLog(@"AVWriter Failed!");
return;
}
if (CMSampleBufferDataIsReady(sampleBuffer)) {
if(self.assetWriter.status == AVAssetWriterStatusWriting) {
if (bufferType == RPSampleBufferTypeVideo) {
if (!self.startedSession) {
dispatch_async(dispatch_get_main_queue(), ^{
_startDate = [NSDate date];
_recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(updateRecordingTime) userInfo:nil repeats:YES];
// Disable the idle timer while recording
[UIApplication sharedApplication].idleTimerDisabled = YES;
});
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(@"MP4Writer: started session in appendVideoSample");
}
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0) {
self.firstVideoFrameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
if (self.assetWriterVideoInput.readyForMoreMediaData) {
@try {
[self.assetWriterVideoInput appendSampleBuffer:sampleBuffer];
}
@catch(NSException *expection) {
NSLog(@"Missed Video Buffer: %@", self.assetWriter.error);
}
}
}
if (bufferType == RPSampleBufferTypeAudioMic) {
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0 ||
CMTimeCompare(self.firstVideoFrameTime, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) == 1) {
return;
}
if (!self.startedSession) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(@"MP4Writer: started session in appendAudioSample");
}
if (self.assetWriterAudioInput.isReadyForMoreMediaData) {
@try {
[self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];
}
@catch(NSException *expection) {
NSLog(@"Missed Audio Buffer: %@", self.assetWriter.error);
}
}
}
}
}
} completionHandler:^(NSError * _Nullable error) {
if (!error) {
NSLog(@"Recording started successfully.");
}
}];
import Foundation
import ReplayKit
import AVKit
import Photos
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
var audioInput:AVAssetWriterInput!
var startSesstion = false
// let viewOverlay = WindowUtil()
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height,
// AVVideoCompressionPropertiesKey : [
// AVVideoAverageBitRateKey :425000, //96000
// AVVideoMaxKeyFrameIntervalKey : 1
// ]
];
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !(self?.assetWriter.startWriting())! {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
self?.startSesstion = true
}
// if self.assetWriter.status == AVAssetWriterStatus.unknown
// {
// self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
// self?.startSesstion = true
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
recordingHandler(self.assetWriter.error)
return
}
if (bufferType == .video)
{
if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp)
{
if self.audioInput.isReadyForMoreMediaData
{
//print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
// debugPrint(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(isBack: Bool, aPathName: String ,handler: @escaping (Error?) -> Void)
{
//var isSucessFullsave = false
if #available(iOS 11.0, *)
{
self.startSesstion = false
RPScreenRecorder.shared().stopCapture{ (error) in
self.videoInput.markAsFinished()
self.audioInput.markAsFinished()
handler(error)
if error == nil{
self.assetWriter.finishWriting{
self.startSesstion = false
print(ReplayFileUtil.fetchAllReplays())
if !isBack{
self.PhotosSaveWithAurtorise(aPathName: aPathName)
}else{
self.deleteDirectory()
}
}
}else{
self.deleteDirectory()
}
}
}else {
// print("Fallback on earlier versions")
}
}
func PhotosSaveWithAurtorise(aPathName: String) {
if PHPhotoLibrary.authorizationStatus() == .authorized {
self.SaveToCamera(aPathName: aPathName)
} else {
PHPhotoLibrary.requestAuthorization({ (status) in
if status == .authorized {
self.SaveToCamera(aPathName: aPathName)
}
})
}
}
func SaveToCamera(aPathName: String){
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
}) { saved, error in
if saved {
addScreenCaptureVideo(aPath: aPathName)
print("Save")
}else{
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
print("error to save - \(error)")
}
}
}
func deleteDirectory() {
ReplayFileUtil.delete()
}
}
场景 1:
- 使用音频/视频编写器输入启动 AVAssetWriter。
- 使用 RPScreenRecorder 在没有麦克风的情况下开始录音并处理样本缓冲区。
- 第一次尝试时,文件可以很好地写入照片。
场景 2:
- 使用音频/视频编写器输入启动 AVAssetWriter。
- 使用 RPScreenRecorder 在启用 麦克风 的情况下开始录制并处理示例缓冲区。
第一次写入文件失败
UserInfo={NSLocalizedRecoverySuggestion=再次尝试保存。, NSLocalizedDescription=无法保存, NSUnderlyingError=0x1c464f3c0 {Error Domain=NSOSStatusErrorDomain Code=-12412 "(null)"}} 2017-10-26 23:25:16.896673-0400 [2135:771655] 状态失败!:3 错误域=AVFoundationErrorDomain 代码=-11823 "Cannot Save"
第二次尝试效果很好。
我做错了什么?
添加了重试逻辑来规避该问题。不是最好的解决方案,但它有效。
[self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
if(CMSampleBufferDataIsReady(sampleBuffer) == false || self.assetWriter == nil)
{
return;
}
if (self.assetWriter.status == AVAssetWriterStatusFailed) {
NSLog(@"AVWriter Failed!");
return;
}
if (CMSampleBufferDataIsReady(sampleBuffer)) {
if(self.assetWriter.status == AVAssetWriterStatusWriting) {
if (bufferType == RPSampleBufferTypeVideo) {
if (!self.startedSession) {
dispatch_async(dispatch_get_main_queue(), ^{
_startDate = [NSDate date];
_recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(updateRecordingTime) userInfo:nil repeats:YES];
// Disable the idle timer while recording
[UIApplication sharedApplication].idleTimerDisabled = YES;
});
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(@"MP4Writer: started session in appendVideoSample");
}
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0) {
self.firstVideoFrameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
if (self.assetWriterVideoInput.readyForMoreMediaData) {
@try {
[self.assetWriterVideoInput appendSampleBuffer:sampleBuffer];
}
@catch(NSException *expection) {
NSLog(@"Missed Video Buffer: %@", self.assetWriter.error);
}
}
}
if (bufferType == RPSampleBufferTypeAudioMic) {
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0 ||
CMTimeCompare(self.firstVideoFrameTime, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) == 1) {
return;
}
if (!self.startedSession) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(@"MP4Writer: started session in appendAudioSample");
}
if (self.assetWriterAudioInput.isReadyForMoreMediaData) {
@try {
[self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];
}
@catch(NSException *expection) {
NSLog(@"Missed Audio Buffer: %@", self.assetWriter.error);
}
}
}
}
}
} completionHandler:^(NSError * _Nullable error) {
if (!error) {
NSLog(@"Recording started successfully.");
}
}];
import Foundation
import ReplayKit
import AVKit
import Photos
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
var audioInput:AVAssetWriterInput!
var startSesstion = false
// let viewOverlay = WindowUtil()
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height,
// AVVideoCompressionPropertiesKey : [
// AVVideoAverageBitRateKey :425000, //96000
// AVVideoMaxKeyFrameIntervalKey : 1
// ]
];
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !(self?.assetWriter.startWriting())! {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
self?.startSesstion = true
}
// if self.assetWriter.status == AVAssetWriterStatus.unknown
// {
// self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
// self?.startSesstion = true
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
recordingHandler(self.assetWriter.error)
return
}
if (bufferType == .video)
{
if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp)
{
if self.audioInput.isReadyForMoreMediaData
{
//print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
// debugPrint(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(isBack: Bool, aPathName: String ,handler: @escaping (Error?) -> Void)
{
//var isSucessFullsave = false
if #available(iOS 11.0, *)
{
self.startSesstion = false
RPScreenRecorder.shared().stopCapture{ (error) in
self.videoInput.markAsFinished()
self.audioInput.markAsFinished()
handler(error)
if error == nil{
self.assetWriter.finishWriting{
self.startSesstion = false
print(ReplayFileUtil.fetchAllReplays())
if !isBack{
self.PhotosSaveWithAurtorise(aPathName: aPathName)
}else{
self.deleteDirectory()
}
}
}else{
self.deleteDirectory()
}
}
}else {
// print("Fallback on earlier versions")
}
}
func PhotosSaveWithAurtorise(aPathName: String) {
if PHPhotoLibrary.authorizationStatus() == .authorized {
self.SaveToCamera(aPathName: aPathName)
} else {
PHPhotoLibrary.requestAuthorization({ (status) in
if status == .authorized {
self.SaveToCamera(aPathName: aPathName)
}
})
}
}
func SaveToCamera(aPathName: String){
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
}) { saved, error in
if saved {
addScreenCaptureVideo(aPath: aPathName)
print("Save")
}else{
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
print("error to save - \(error)")
}
}
}
func deleteDirectory() {
ReplayFileUtil.delete()
}
}