在 Swift 中使用实时滤镜录制视频

Recording videos with real-time filters in Swift

我是 swift 的新手,正在尝试构建一个可以应用实时滤镜并使用应用的滤镜保存的相机应用程序。

到目前为止,我可以使用应用的滤镜进行实时预览,但是当我保存视频时,它全黑了。

import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    @IBOutlet weak var previewView: UIView!
    @IBOutlet weak var recordButtton: UIButton!
    @IBOutlet weak var imageView: UIImageView!

    var assetWriter: AVAssetWriter?
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
    var isWriting = false
    var currentSampleTime: CMTime?
    var currentVideoDimensions: CMVideoDimensions?

    override func viewDidLoad() {
        super.viewDidLoad()
        FilterVendor.register()
        setupCaptureSession()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func setupCaptureSession() {
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
            print("Can't access the camera")
            return
        }

        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
        }

        let videoOutput = AVCaptureVideoDataOutput()

        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        if((previewLayer) != nil) {
            view.layer.addSublayer(previewLayer!)
        }

        captureSession.startRunning()
    }

    @IBAction func record(_ sender: Any) {
        if isWriting {
            print("stop record")
            self.isWriting = false
            assetWriterPixelBufferInput = nil
            assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
                self.saveMovieToCameraRoll()
            })
        } else {
            print("start record")
            createWriter()
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: currentSampleTime!)
            isWriting = true
        }
    }

    func saveMovieToCameraRoll() {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
        }) { saved, error in
            if saved {
                print("saved")
            }
        }
    }

    func movieURL() -> NSURL {
        let tempDir = NSTemporaryDirectory()
        let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
        return url! as NSURL
    }

    func checkForAndDeleteFile() {
        let fm = FileManager.default
        let url = movieURL()
        let exist = fm.fileExists(atPath: url.path!)

        if exist {
            do {
                try fm.removeItem(at: url as URL)
            } catch let error as NSError {
                print(error.localizedDescription)
            }
        }
    }

    func createWriter() {
        self.checkForAndDeleteFile()

        do {
            assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
        } catch let error as NSError {
            print(error.localizedDescription)
            return
        }

        let outputSettings = [
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : Int(currentVideoDimensions!.width),
            AVVideoHeightKey : Int(currentVideoDimensions!.height)
        ] as [String : Any]

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
        assetWriterVideoInput.expectsMediaDataInRealTime = true
        assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))

        let sourcePixelBufferAttributesDictionary = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
            String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
            String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
            String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
        ] as [String : Any]

        assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
                                                                           sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if assetWriter!.canAdd(assetWriterVideoInput) {
            assetWriter!.add(assetWriterVideoInput)
        } else {
            print("no way\(assetWriterVideoInput)")
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
        autoreleasepool {

            connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

            let filter = CIFilter(name: "Fİlter")!
            filter.setValue(cameraImage, forKey: kCIInputImageKey)


            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
            self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
            self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

            if self.isWriting {
                if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                    var newPixelBuffer: CVPixelBuffer? = nil

                    CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                    let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                    if success == false {
                        print("Pixel Buffer failed")
                    }
                }
            }

            DispatchQueue.main.async {

                if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                    let filteredImage = UIImage(ciImage: outputValue)
                    self.imageView.image = filteredImage
                }
            }
        }
    }
}

我在下面的关键部分添加了一些评论:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
    autoreleasepool {

        connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

        // COMMENT: This line makes sense - this is your pixelbuffer from the camera.
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        // COMMENT: OK, so you turn pixelBuffer into a CIImage...
        let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

        // COMMENT: And now you've create a CIImage with a Filter instruction...
        let filter = CIFilter(name: "Fİlter")!
        filter.setValue(cameraImage, forKey: kCIInputImageKey)


        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
                var newPixelBuffer: CVPixelBuffer? = nil

                // COMMENT: And you grabbed memory from the pool.
                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
                let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        // COMMENT: And now you're sending the filtered image back to the screen.
        DispatchQueue.main.async {

            if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                let filteredImage = UIImage(ciImage: outputValue)
                self.imageView.image = filteredImage
            }
        }
    }
}

在我看来,您基本上是在获取屏幕图像,创建过滤后的副本,然后制作一个空的新像素缓冲区并将其写出。

如果您写入您抓取的 pixelBuffer 而不是您正在创建的新像素缓冲区,您应该会成功写入图像。

要成功写出过滤后的视频,您需要从 CIImage 创建一个新的 CVPixelBuffer - Whosebug 上已经存在该解决方案,我知道,因为我自己需要这一步!