如何使用 AVMutableComposition iOS Swift 合并 mp4 和 MOV 类型的视频?

How to merge mp4 and MOV type videos using AVMutableComposition iOS Swift?

我正在使用 AVMutableComposition 合并视频,当两个视频的类型相同时(例如 mp4 视频或两个 mov 视频,它工作正常。

但是如果我尝试合并一个 mp4 视频和一个 mov 视频,那么合并的视频会在第一个视频完成后停止。

您可以使用此代码

    private func mergeAssets(arrayAssets:[AVAsset],audioAsset:AVAsset,completionHandler:@escaping MergedVideoCompletionHandler){
        var insertTime = kCMTimeZero
        let  animation = true
        var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
        var outputSize = CGSize.init(width: 0, height: 0)
        // Determine video output size
        for videoAsset in arrayAssets {
            let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]

            let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)

            var videoSize = videoTrack.naturalSize
            if assetInfo.isPortrait == true {
                videoSize.width = videoTrack.naturalSize.height
                videoSize.height = videoTrack.naturalSize.width
            }

            if videoSize.height > outputSize.height {
                outputSize = videoSize
            }
        }

        if outputSize.width == 0 || outputSize.height == 0 {
            outputSize = defaultSize
        }

        let audioSoundTrack = audioAsset.tracks(withMediaType: AVMediaType.audio).first
        // Init composition
        let mixComposition = AVMutableComposition.init()
        for videoAsset in arrayAssets {
            // Get video track
            guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }

            // Get audio track

            // Init video & audio composition track
            let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                                       preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
                                                                       preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            do {
                let startTime = kCMTimeZero
                let duration = videoAsset.duration

                // Add video track to video composition at specific time
                try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration),
                                                           of: videoTrack,
                                                           at: insertTime)

                // Add audio track to audio composition at specific time
                if let audioTrack = audioSoundTrack {
                    try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration),
                                                               of: audioTrack,
                                                               at: insertTime)
                }

                // Add instruction for video track
                let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack!,
                                                                           asset: videoAsset,
                                                                           standardSize: outputSize,
                                                                           atTime: insertTime)

                // Hide video track before changing to new track
                let endTime = CMTimeAdd(insertTime, duration)

                if animation {
                    let timeScale = videoAsset.duration.timescale
                    let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)

                    layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
                }
                else {
                    layerInstruction.setOpacity(0, at: endTime)
                }

                arrayLayerInstructions.append(layerInstruction)

                // Increase the insert time
                insertTime = CMTimeAdd(insertTime, duration)
            }
            catch {
                let appError =  getAppError(error, message: "Failed to loadvideoTrack")
                completionHandler(nil,appError)
            }
        }

        // Main video composition instruction
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
        mainInstruction.layerInstructions = arrayLayerInstructions

        // Main video composition
        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = outputSize
        let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
        let exportURL = URL.init(fileURLWithPath: path)

        // Remove file if existed
        FileManager.default.removeItemIfExisted(exportURL)
        //        //4 - Get path
        //        let fileName =  IGMediaUtil.createNewFile(fileName: "mergeVideo", fileExtension: "mp4")
        //        guard let  docSubFolder =  IGMediaUtil.createFolderInDoc(folderName: Constants.kMergedVideosFolder) else{
        //            return
        //        }
        //        let mergingURL = docSubFolder.appendingPathComponent(fileName)
        //        // Remove file if existed
        //        FileManager.default.removeItemIfExisted(mergingURL)

        let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
        exporter?.outputURL = exportURL
        exporter?.outputFileType = AVFileType.mp4
        exporter?.shouldOptimizeForNetworkUse = true
        exporter?.videoComposition = mainComposition

        // Do export
        exporter?.exportAsynchronously(completionHandler: {
            DispatchQueue.main.async {
                self.exportDidFinish(exporter: exporter, videoURL: exportURL)
            }
        })
    }
        fileprivate func exportDidFinish(exporter:AVAssetExportSession?, videoURL:URL) {
        var progressValue:Float = 0
        if let status = exporter?.status {
            switch status{
            case .exporting:
                progressValue = (exporter?.progress)!

            case .failed, .cancelled,.unknown:
                progressValue = 1.0
                let appError =  self.getAppError(exporter?.error,message:"Failed to create Data")
                print( "localizedDescription ::::::AVExport ********** \(exporter?.error?.localizedDescription)" ?? "No Error")
                print(exporter?.error.debugDescription)
                if let  exportBlock = self.callback{
                    exportBlock(nil ,appError)
                }

            case .waiting:
                break
            case .completed:
                progressValue = 1.0;
                print("Exported file: \(videoURL.absoluteString)")

                if let  exportBlock = self.callback{
                    exportBlock(videoURL ,nil)
                }
            }

            if let progressBlock =   self.progressCallback{
                DispatchQueue.main.async {
                    progressBlock(progressValue)

                }
            }
        }

    }
}
extension MediaAudioMergerServiceManager{
    fileprivate func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }
    fileprivate func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset, standardSize:CGSize, atTime: CMTime) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var aspectFillRatio:CGFloat = 1
        if assetTrack.naturalSize.height < assetTrack.naturalSize.width {
            aspectFillRatio = standardSize.height / assetTrack.naturalSize.height
        }
        else {
            aspectFillRatio = standardSize.width / assetTrack.naturalSize.width
        }

        if assetInfo.isPortrait {
            let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

            let posX = standardSize.width/2 - (assetTrack.naturalSize.height * aspectFillRatio)/2
            let posY = standardSize.height/2 - (assetTrack.naturalSize.width * aspectFillRatio)/2
            let moveFactor = CGAffineTransform(translationX: posX, y: posY)

            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor), at: atTime)

        } else {
            let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

            let posX = standardSize.width/2 - (assetTrack.naturalSize.width * aspectFillRatio)/2
            let posY = standardSize.height/2 - (assetTrack.naturalSize.height * aspectFillRatio)/2
            let moveFactor = CGAffineTransform(translationX: posX, y: posY)

            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor)

            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                concat = fixUpsideDown.concatenating(scaleFactor).concatenating(moveFactor)
            }

            instruction.setTransform(concat, at: atTime)
        }
        return instruction
    }

    fileprivate func exportDidFinish(exporter:AVAssetExportSession?, videoURL:URL) {
        var progressValue:Float = 0
        if let status = exporter?.status {
            switch status{
            case .exporting:
                progressValue = (exporter?.progress)!

            case .failed, .cancelled,.unknown:
                progressValue = 1.0
                let appError =  self.getAppError(exporter?.error,message:"Failed to create Data")
                print( "localizedDescription ::::::AVExport ********** \(exporter?.error?.localizedDescription)" ?? "No Error")
                print(exporter?.error.debugDescription)
                if let  exportBlock = self.callback{
                    exportBlock(nil ,appError)
                }

            case .waiting:
                break
            case .completed:
                progressValue = 1.0;
                print("Exported file: \(videoURL.absoluteString)")

                if let  exportBlock = self.callback{
                    exportBlock(videoURL ,nil)
                }
            }

            if let progressBlock =   self.progressCallback{
                DispatchQueue.main.async {
                    progressBlock(progressValue)

                }
            }
        }

    }
}