AVAssetExportSession 导出视频 AVFoundationErrorDomain Code=-11841 错误

AVAssetExportSession export video AVFoundationErrorDomain Code=-11841 error

我尝试使用以下代码导出视频。前 3 次它工作正常,然后超过 3 次尝试失败,我正在尝试通过视频添加录制的声音,我对所有这些概念都很陌生,所以任何帮助将不胜感激

open func generate(video url: URL, with frame: CGRect? = nil, byApplying transformation: CGAffineTransform? = nil, in previewArea: CGRect? = nil, previewCornerRadius: Float = 0, overlayImage: UIImage? = nil, setOverlayAsBackground: Bool = false, gifLayer: CALayer? = nil, audioUrl: URL? = nil, muteAudio: Bool = false, success: @escaping ((URL) -> Void), failure: @escaping ((Error) -> Void)) {
    let mixComposition: AVMutableComposition = AVMutableComposition()
    var mutableCompositionVideoTrack: AVMutableCompositionTrack? = nil
    var mutableCompositionOriginalAudioTrack: AVMutableCompositionTrack? = nil
    var mutableCompositionAudioTrack: AVMutableCompositionTrack? = nil
    let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
    
    let aVideoAsset: AVAsset = AVAsset(url: url)
    var aAudioAsset: AVAsset? = nil
    
    if let url = audioUrl {
        aAudioAsset = AVAsset(url: url)
    }
    
    if let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) {
        mutableCompositionVideoTrack = videoTrack
        if aAudioAsset != nil, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
            mutableCompositionAudioTrack = audioTrack
        }
        
        if !muteAudio, aVideoAsset.hasAudio, let originalAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
            /// If original audio present
            mutableCompositionOriginalAudioTrack = originalAudioTrack
        }
    }
    
    do {
        var originalVideoSize: CGSize = self.prefferedVideoSize
        let ratio = self.prefferedVideoSize.width / Utility.get9By16ScreenSize().width
        
        if let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .video).first  {
            originalVideoSize = aVideoAssetTrack.naturalSize
            var transforms = aVideoAssetTrack.preferredTransform
            if aVideoAsset.videoOrientation().orientation == .landscapeLeft || aVideoAsset.videoOrientation().orientation == .landscapeRight {
                let ratio = self.prefferedVideoSize.width / originalVideoSize.width
                let centerY: CGFloat = (self.prefferedVideoSize.height - (originalVideoSize.height * ratio)) / 2
                transforms = transforms.concatenating(CGAffineTransform(translationX: 0, y: centerY).scaledBy(x: ratio, y: ratio))
            }
                            
            try mutableCompositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: CMTime.zero)
            
            if !muteAudio, aVideoAsset.hasAudio, let audioAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: .audio).first  {
                try mutableCompositionOriginalAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: audioAssetTrack.timeRange.duration), of: audioAssetTrack, at: CMTime.zero)
            }
            
            if let audioAsset = aAudioAsset, let aAudioAssetTrack: AVAssetTrack = audioAsset.tracks(withMediaType: .audio).first  {
                try mutableCompositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: aAudioAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: CMTime.zero)
            }
            
            totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
            let mixVideoTrack = mixComposition.tracks(withMediaType: AVMediaType.video)[0]
            
            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mixVideoTrack)
            layerInstruction.setTransform(transforms, at: CMTime.zero)
            totalVideoCompositionInstruction.layerInstructions = [layerInstruction]
        }
        
        let mutableVideoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
        mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 12)
        mutableVideoComposition.renderSize =  self.prefferedVideoSize
        mutableVideoComposition.instructions = [totalVideoCompositionInstruction]
        
        let parentLayer = CALayer()
        parentLayer.frame = self.prefferedVideoRect
        parentLayer.isGeometryFlipped = true
        
        let videoLayer = CALayer()
        videoLayer.contentsGravity = .resizeAspect
        videoLayer.contentsScale = 1
        videoLayer.frame = self.prefferedVideoRect
        
        if let frame = frame {
            let scalledFrame = frame.scale(by: ratio)
            videoLayer.frame = scalledFrame
            
            let videoContainerLayer = CALayer()
            parentLayer.frame = self.prefferedVideoRect
            parentLayer.addSublayer(videoContainerLayer)
            videoContainerLayer.addSublayer(videoLayer)
            
            if let transformation = transformation {
                if let previewFrame = previewArea {
                    let maskLayer = CALayer()
                    maskLayer.backgroundColor = UIColor.black.cgColor
                    let scalledMaskFrame = previewFrame.scale(by: ratio)
                    maskLayer.frame = scalledMaskFrame
                    maskLayer.cornerRadius = previewCornerRadius.cgFloat
                    maskLayer.masksToBounds = true
                    videoContainerLayer.mask = maskLayer
                }
                videoLayer.transform = CATransform3DMakeAffineTransform(transformation)
            }
        } else {
            parentLayer.addSublayer(videoLayer)
        }
        
        /// Add overlay if overlay image present
        if let image = overlayImage {
            let imageLayer = CALayer()
            imageLayer.contents = image.cgImage
            imageLayer.frame = self.prefferedVideoRect
            imageLayer.masksToBounds = true
            if setOverlayAsBackground {
                parentLayer.insertSublayer(imageLayer, at: 0)
            } else {
                parentLayer.addSublayer(imageLayer)
            }
        }
        
        /// Add overlay if overlay image present
        if let overlay = gifLayer {
            overlay.frame = CGRect(origin: CGPoint(x: (self.prefferedVideoSize.width - overlay.frame.width) / 2, y: (self.prefferedVideoSize.height - overlay.frame.height) / 2), size: overlay.frame.size)
            
            overlay.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: ratio, y: ratio))
            parentLayer.addSublayer(overlay)
        }
        
        mutableVideoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

        let outputURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmp-rendered-video-R6S9K2B4.m4v")
        self.exportVideo(from: mixComposition, toFile: outputURL, with: mutableVideoComposition,  success: success, failure: failure)
    } catch{
        DCDebug.print(error)
        failure(error)
    }
}

func exportVideo(from composition: AVComposition, toFile output: URL, with videoComposition: AVVideoComposition? = nil, success: @escaping ((URL) -> Void), failure: @escaping ((Error) -> Void)) {
        do {
            if FileManager.default.fileExists(atPath: output.path) {
                try FileManager.default.removeItem(at: output)
            }
            
            if let exportSession = AVAssetExportSession(asset: composition, presetName: self.presetName ?? AVAssetExportPresetHighestQuality) {
                exportSession.outputURL = output
                exportSession.outputFileType = AVFileType.mp4
                exportSession.shouldOptimizeForNetworkUse = true
                if let videoComposition = videoComposition {
                    exportSession.videoComposition = videoComposition
                }
                
                /// try to export the file and handle the status cases
                exportSession.exportAsynchronously(completionHandler: {
                    DispatchQueue.main.async {
                        switch exportSession.status {
                        case .completed:
                            success(output)
                        case .failed:
                            if let _error = exportSession.error {
                                failure(_error)
                            }
                            
                        case .cancelled:
                            if let _error = exportSession.error {
                                failure(_error)
                            }
                        default:
                            success(output)
                        }
                    }
                })
            } else {
                failure(VideoMakerError(error: .kFailedToStartAssetExportSession))
            }
        } catch {
            DCDebug.print(error)
            failure(error)
        }
    }

我收到以下错误 Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=无法合成视频。}

以下是导出失败时的时间范围值

(lldb) po aVideoAsset.tracks(withMediaType: .audio).first?.timeRange.duration
▿ Optional<CMTime>
  ▿ some : CMTime
    - value : 1852
    - timescale : 600
    ▿ flags : CMTimeFlags
      - rawValue : 1
    - epoch : 0

(lldb) po aVideoAsset.tracks(withMediaType: .video).first?.timeRange.duration
▿ Optional<CMTime>
  ▿ some : CMTime
    - value : 1800
    - timescale : 600
    ▿ flags : CMTimeFlags
      - rawValue : 1
    - epoch : 0

我通过替换以下行解决了这个问题

totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)

if let originalAudioTrack = mutableCompositionOriginalAudioTrack, originalAudioTrack.timeRange.duration > aVideoAssetTrack.timeRange.duration, !muteAudio, aVideoAsset.hasAudio {
                    totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: originalAudioTrack.timeRange.duration)
                } else {
                    totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: aVideoAssetTrack.timeRange.duration)
                }

这解决了我的问题,但我不确定这是对这个问题的正确解决方案还是只是一个 hack,所以如果有人向我提供对这个问题的正确解释和除此之外的有效解决方案,那么赏金就是你的了.