无法使用 AVVideoCompositionCoreAnimationTool 在视频中显示动画 CALayer

Can't show animated CALayer in video using AVVideoCompositionCoreAnimationTool

更新 6: 我已经设法完全解决了我的问题,但我仍然想要一个比我猜测的更好的解释,这是如果我不正确的话它不起作用的原因

我一直在尝试为视频制作精灵 sheet 动画,但每次导出视频时,最终结果都是我开始时使用的示例视频。

这是我的代码:

首先使用我的自定义 CALayer 来处理我自己的精灵 sheets

class SpriteLayer: CALayer {
    var frameIndex: Int

    override init() {
        // Using 0 as a default state
        self.frameIndex = 0
        super.init()
    }

    required init?(coder aDecoder: NSCoder) {
        self.frameIndex = 0
        super.init(coder: aDecoder)
    }

    override func display() {
        let currentFrameIndex = self.frameIndex
        if currentFrameIndex == 0 {
            return
        }
        let frameSize = self.contentsRect.size
        self.contentsRect = CGRect(x: 0, y: CGFloat(currentFrameIndex - 1) * frameSize.height, width: frameSize.width, height: frameSize.height)
    }

    override func action(forKey event: String) -> CAAction? {
        if event == "contentsRect" {
            return nil
        }
        return super.action(forKey: event)
    }

    override class func needsDisplay(forKey key: String) -> Bool {
        return key == "frameIndex"
    }
}

Gif 是一个基本的 class,没有什么花哨的东西,效果很好。 gif.Strip 是代表 gif 的垂直精灵 sheet 的 UIImage

现在应该导出新视频的方法(它是用于导出的更大 class 的一部分。

func convertAndExport(to url :URL , completion: @escaping () -> Void ) {
        // Get Initial info and make sure our destination is available
        self.outputURL = url
        let stripCgImage = self.gif.strip!.cgImage!
        // This is used to time how long the export took
        let start = DispatchTime.now()
        do {
            try FileManager.default.removeItem(at: outputURL)
        } catch {
            print("Remove Error: \(error.localizedDescription)")
            print(error)
        }
        // Find and load "sample.mp4" as a AVAsset
        let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
        let videoUrl = URL(fileURLWithPath: videoPath)
        let videoAsset = AVAsset(url: videoUrl)
        // Start a new mutable Composition with the same base video track
        let mixComposition = AVMutableComposition()
        let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
        let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!
        do {
            try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
        } catch {
            print("Insert Error: \(error.localizedDescription)")
            print(error)
            return
        }
        compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
        // Quick access to the video size
        let videoSize = clipVideoTrack.naturalSize
        // Setup CALayer and it's animation
        let aLayer = SpriteLayer()
        aLayer.contents = stripCgImage
        aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        aLayer.opacity = 1.0
        aLayer.masksToBounds = true
        aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        aLayer.contentsRect = CGRect(x: 0, y: 0, width: 1, height: 1.0 / 3.0)
        let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
        spriteAnimation.fromValue = 1
        spriteAnimation.toValue = 4
        spriteAnimation.duration = 2.25
        spriteAnimation.repeatCount = .infinity
        spriteAnimation.autoreverses = false
        spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
        aLayer.add(spriteAnimation, forKey: nil)
        // Setup Layers for AVVideoCompositionCoreAnimationTool
        let parentLayer = CALayer()
        let videoLayer = CALayer()
        parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
        parentLayer.addSublayer(videoLayer)
        parentLayer.addSublayer(aLayer)
        // Create the mutable video composition
        let videoComp = AVMutableVideoComposition()
        videoComp.renderSize = videoSize
        videoComp.frameDuration = CMTimeMake(1, 30)
        videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
        // Set the video composition to apply to the composition's video track
        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
        let videoTrack = mixComposition.tracks(withMediaType: .video).first!
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        instruction.layerInstructions = [layerInstruction]
        videoComp.instructions = [instruction]
        // Initialize export session
        let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
        assetExport.videoComposition = videoComp
        assetExport.outputFileType = AVFileType.mp4
        assetExport.outputURL = self.outputURL
        assetExport.shouldOptimizeForNetworkUse = true
        // Export
        assetExport.exportAsynchronously {
            let status = assetExport.status
            switch status {
            case .failed:
                print("Export Failed")
                print("Export Error: \(assetExport.error!.localizedDescription)")
                print(assetExport.error!)
            case .unknown:
                print("Export Unknown")
            case .exporting:
                print("Export Exporting")
            case .waiting:
                print("Export Waiting")
            case .cancelled:
                print("Export Cancelled")
            case .completed:
                let end = DispatchTime.now()
                let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
                let timeInterval = Double(nanoTime) / 1_000_000_000
                // Function is now over, we can print how long it took
                print("Time to generate video: \(timeInterval) seconds")
                completion()
            }
        }
}

编辑: 我的代码基于以下链接

更新 1: 我尝试删除代码的 CABasicAnimation 部分并尝试使用 CALayer 但无济于事。我什至无法显示图像。 为了测试一下,我尝试在 Xcode 游乐场中使用 contentsRect 上的 CAKeyframeAnimation 为这个精灵 sheet 设置动画,它运行良好,所以我认为问题不在于CABasicAnimation,甚至可能没有 CALayer 本身。在这方面我真的需要一些帮助,因为我不明白为什么我什至无法在导出的示例视频上显示图像。

更新 2: 为了回应马特的评论,我试着暂时忘记精灵 sheet 并将其更改为 CATextLayer 但我的视频中仍然没有看到任何东西(它有深色图像,所以白色文本应该是完美的可见)

let aLayer = CATextLayer()
aLayer.string = "This is a test"
aLayer.fontSize = videoSize.height / 6
aLayer.alignmentMode = kCAAlignmentCenter
aLayer.foregroundColor = UIColor.white.cgColor
aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height / 6)

更新 3: 根据 Matt 的要求,我尝试将 parentLayer.addSublayer(aLayer) 更改为 videoLayer.addSublayer(aLayer) 但仍然没有任何改变,但我认为同样如此,因为 AVVideoCompositionCoreAnimationTooldocumentation 如下

convenience init(postProcessingAsVideoLayer videoLayer: CALayer, 
              in animationLayer: CALayer)

意思是我的 parentLayeranimationLayer 并且可能意味着任何动画都应该在这一层完成。

更新 4: 我在这里开始发疯,我暂时放弃了显示文本或动画图像的想法我只想以任何可能的方式影响我的视频所以我将 aLayer 更改为:

let aLayer = CALayer()
aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.backgroundColor = UIColor.white.cgColor

好吧,这绝对没有任何作用,我仍然可以在我的 outputUrl 上获得我的示例视频(如果你想 "play" 一起,我开始在操场上使用以下代码测试它)

import PlaygroundSupport
import UIKit
import Foundation
import AVFoundation

func convertAndExport(to url :URL , completion: @escaping () -> Void ) {
    let start = DispatchTime.now()
    do {
        try FileManager.default.removeItem(at: url)
    } catch {
        print("Remove Error: \(error.localizedDescription)")
        print(error)
    }

    let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
    let videoUrl = URL(fileURLWithPath: videoPath)
    let videoAsset = AVURLAsset(url: videoUrl)
    let mixComposition = AVMutableComposition()
    let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
    let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!

    do {
        try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
    } catch {
        print("Insert Error: \(error.localizedDescription)")
        print(error)
        return
    }
    compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
    let videoSize = clipVideoTrack.naturalSize
    print("Video Size Detected: \(videoSize.width) x \(videoSize.height)")

    let aLayer = CALayer()
    aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    aLayer.backgroundColor = UIColor.white.cgColor

    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(aLayer)
    aLayer.setNeedsDisplay()
    let videoComp = AVMutableVideoComposition()
    videoComp.renderSize = videoSize
    videoComp.frameDuration = CMTimeMake(1, 30)
    videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
    let videoTrack = mixComposition.tracks(withMediaType: .video).first!
    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    instruction.layerInstructions = [layerInstruction]
    videoComp.instructions = [instruction]

    let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
    assetExport.videoComposition = videoComp
    assetExport.outputFileType = AVFileType.mp4
    assetExport.outputURL = url
    assetExport.shouldOptimizeForNetworkUse = true

    assetExport.exportAsynchronously {
        let status = assetExport.status
        switch status {
        case .failed:
            print("Export Failed")
            print("Export Error: \(assetExport.error!.localizedDescription)")
            print(assetExport.error!)
        case .unknown:
            print("Export Unknown")
        case .exporting:
            print("Export Exporting")
        case .waiting:
            print("Export Waiting")
        case .cancelled:
            print("Export Cancelled")
        case .completed:
            let end = DispatchTime.now()
            let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
            let timeInterval = Double(nanoTime) / 1_000_000_000
            print("Time to generate video: \(timeInterval) seconds")
            completion()
        }
    }
}

let outputUrl = FileManager.default.temporaryDirectory.appendingPathComponent("test.mp4")
convertAndExport(to: outputUrl) {
    print(outputUrl)
}

请有人帮助我理解我做错了什么...

更新 5: 我是 运行 除了来自 iPad Air 2 的游乐场测试(所以没有模拟器)之外的所有东西,因为我使用相机拍照然后将它们拼接成精灵 sheet 然后我计划制作动画在我将通过电子邮件发送的视频上。我开始进行 Playground 测试,因为 iPad 的每个测试都需要我完成整个应用程序周期(倒计时、照片、表单、电子邮件 sending/receiving)

好的,终于让它按我一直希望的那样工作了。

首先,即使他删除了他的评论,也要感谢 Matt link 提供了一个工作示例,帮助我拼凑出代码中的错误。

  • 首发
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!

我需要使用 AVAssetExportPresetHighestQuality 而不是 AVAssetExportPresetPassthrough。我的猜测是,直通预设意味着您不进行任何重新编码,因此将其设置为最高(不是中等,因为我导出的视频超过 400x400),这样我就可以真正重新编码我的视频。我猜这就是阻止导出的视频包含我正在尝试的任何 CALayer 的原因(甚至用白色覆盖视频)。

  • 其次(不确定这是否真的影响但我稍后会尝试)
parentLayer.addSublayer(aLayer)

我将其替换为:

videoLayer.addSublayer(aLayer)

不确定这是否真的重要,但我的理解是这实际上是 AVVideoCompositionCoreAnimationTool 的动画层,而 parentLayer 只是一个容器,并不意味着包含更多内容,但我可能错了。

  • 我做了第三个改变
let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
spriteAnimation.fromValue = 1
spriteAnimation.toValue = 4
spriteAnimation.duration = 2.25
spriteAnimation.repeatCount = .infinity
spriteAnimation.autoreverses = false
spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
aLayer.add(spriteAnimation, forKey: nil)

我改成了这样:

let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contentsRect))
animation.duration = 2.25
animation.calculationMode = kCAAnimationDiscrete
animation.repeatCount = .infinity
animation.values = [
    CGRect(x: 0, y: 0, width: 1, height: 1/3.0),
    CGRect(x: 0, y: 1/3.0, width: 1, height: 1/3.0),
    CGRect(x: 0, y: 2/3.0, width: 1, height: 1/3.0)
    ] as [CGRect]
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.fillMode = kCAFillModeBackwards
animation.isRemovedOnCompletion = false
aLayer.add(animation, forKey: nil)

此更改主要是删除了我为 sprite sheet 自定义的动画(因为它总是相同的,所以我首先想要一个工作示例,然后我将对其进行概括并可能将其添加到我的私有 UI荚)。但最重要的是 animation.isRemovedOnCompletion = false 我注意到删除它使得动画根本不会在导出的视频上播放。因此,对于 CABasicAnimation 在导出后没有在视频上制作动画的任何人,请尝试查看您的 isRemovedOnCompletion 是否在动画上正确设置。

我认为这几乎就是我所做的所有更改。

虽然我在技术上回答了我的问题,但我的赏金仍然是为了了解 AVVideoCompositionCoreAnimationToolAVAssetExport 是如何工作的,以及为什么我必须做我所做的改变才能最终让它工作,如果有人感兴趣的话解释。

再次感谢马特,你向我展示了你是如何做到的,从而帮助了我。