AVAssetWritter 第一帧为空白或黑色

AVAssetWritter First Frames are either Blank or black

问题: 我正在通过从 CMSampleBuffer 获取音频和视频缓冲区来录制视频帧。一旦 AssetWriter 完成写入缓冲区,最终视频会导致第一帧为黑色或空白(考虑到它只考虑开头的音频帧)。 虽然,随机视频完全正常并且没有黑框。

我试过的: 我试图等到我获取第一个视频帧然后开始录制。然而我得到了同样不稳定的表现。

我想要的:一个没有空白帧的正常视频。

下面是可能有用的代码。

Capture Session

 func configureSession() {
    sessionQueue.async {
        print("SFC - Session Configuring")
        if self.setupResult != .success { return }
        
        self.session.beginConfiguration()
        self.session.sessionPreset = .high
        
        do {
            var defaultVideoDevice: AVCaptureDevice?

            if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front){
                defaultVideoDevice = frontCameraDevice
            } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back){
                defaultVideoDevice = backCameraDevice
            } else if let dualCameraDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) {
                defaultVideoDevice = dualCameraDevice
            }

            guard let videoDevice = defaultVideoDevice else {
                print("CAM - Camera unavailable")
                self.setupResult = .configurationFailed
                self.session.commitConfiguration()
                return
            }

            let videoInputDevice = try AVCaptureDeviceInput(device: videoDevice)

            if self.session.canAddInput(videoInputDevice) {
                self.session.addInput(videoInputDevice)
                self.videoDeviceInput = videoInputDevice
            } else {
                print("CAM - Couldn't add input to the session")
                self.setupResult = .configurationFailed
                self.session.commitConfiguration()
                return
            }
        } catch {
            print("CAM - Couldn't create device input. Error - ", error.localizedDescription)
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
            return
        }
        
        
        self.videoOutput.setSampleBufferDelegate(self, queue: self.videoQueue)
        if self.session.canAddOutput(self.videoOutput) {
            self.session.addOutput(self.videoOutput)
            self.photoQualityPrioritizationMode = .balanced
        } else {
            print("Could not add photo output to the session")
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
            return
        }

        self.videoOutput.connections.first?.videoOrientation = .portrait
        self.videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ]
        self.videoOutput.alwaysDiscardsLateVideoFrames = true
        
        
        // Configure for photos
        if self.session.canAddOutput(self.photoOutput) {
            self.session.addOutput(self.photoOutput)
        }
        
        do {
            let audioDevice = AVCaptureDevice.default(for: .audio)
            let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
            if self.session.canAddInput(audioDeviceInput) {
                self.session.addInput(audioDeviceInput)
            } else { print("CAM - Couldn't add audio input device to session.") }
        } catch { print("couldn't create audio input device. Error - ",error.localizedDescription) }
    
        
        self.audioOutput.setSampleBufferDelegate(self, queue: self.videoQueue)
        if self.session.canAddOutput(self.audioOutput) {
            print("SFC - registered audio output with type")
            self.session.addOutput(self.audioOutput)
        } else {
            print("Couldn't add audio output")
            self.setupResult = .configurationFailed
            self.session.commitConfiguration()
        }
    
        self.session.commitConfiguration()

    }
}

func startSession() {
    if SMConstants.currentDevice.isSimulator { return }
    sessionQueue.async {
        self.configureSession()
        print("SFC - Frame Buffers Session Starting")
        self.session.startRunning()
        self.isSessionRunning = self.session.isRunning
        self.sessionQueue.asyncAfter(deadline: .now() + 1) {
            self.addObservers()
        }
    }
}

Buffer Writer

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if !frameCaptureRunning { return }
    write(output: output, buffer: sampleBuffer)
}

public func write(output: AVCaptureOutput, buffer: CMSampleBuffer) {
    writerQueue.sync {
        
        if assetWriter == nil { self.setupWriter() }

        if self.assetWriter?.status == .unknown {
            self.assetWriter?.startWriting()
            self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(buffer))
            printDone(item: "Started AssetWriter")
        }
        if self.assetWriter?.status == .failed {
            printError(item: "Asset Writer Failed with Error: \(String(describing: self.assetWriter?.error))")
            return
        }

    
        if CMSampleBufferDataIsReady(buffer) {
            if output == videoOutput {
                if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
                    videoInput.append(buffer)
                    printLog(item: " Pulling video only \(output)")
                    video_frames_written = true
                }
            }
            
            if output == audioOutput {
                if !video_frames_written { return }
                if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
                    audioInput.append(buffer)
                    printLog(item: " Pulling Audio only \(output)")
                }
            }
        }
    }
}

private func setupWriter() {
    
    clearAndResetFilesInDirectory()
    
    self.assetWriter = try? AVAssetWriter(outputURL: self.url, fileType: AVFileType.mp4)
    
    let videoOutputSettings = [
        AVVideoCodecKey: AVVideoCodecType.h264,
        AVVideoHeightKey: 1920,
        AVVideoWidthKey:1080
    ] as [String : Any]


    self.videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
    self.videoInput?.expectsMediaDataInRealTime = true
    if let videoInput = self.videoInput, (self.assetWriter?.canAdd(videoInput))! {
        self.assetWriter?.add(videoInput)
    }
    
    
    let audioOutputSettings = [
        AVFormatIDKey: kAudioFormatMPEG4AAC,
        AVNumberOfChannelsKey: 1,
        AVSampleRateKey: 44100,
        AVEncoderBitRateKey: 64000
    ] as [String: Any]
    
    
    self.audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
    self.audioInput?.expectsMediaDataInRealTime = true
    if let audioInput = self.audioInput, (self.assetWriter?.canAdd(audioInput))! {
        printDone(item: "Added Input")
        self.assetWriter?.add(audioInput)
    } else { printError(item: "No audio Input") }
    
    
}

ViewWillAppear in CameraController

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    frameCapture = FrameCapture(filename: "test.mp4", delegate: self)
    frameCapture.startSession()
    previewView.session = frameCapture.session
}

这就是我觉得适合你的想法。如果您认为我需要提供更多信息,请告诉我。我会及时的。感谢您的宝贵时间。

您可能想要 startSession 在视频缓冲区上 - 如果音频缓冲区先到达,并且时间戳比第一个视频缓冲区早,那么您将得到空白或黑色的初始帧。