使用过渡将视频组合成一部电影
Composing videos into a single movie with transition
我有几个 h.264 quicktime 视频,我想将它们连接成一个带有过渡(淡入淡出 in/out)的视频。下面的代码应该接受一个 AVAsset 数组并将它们插入到一个 AVMutableVideoComposition 中。
但是我一直从 AVAssetExportSession 得到 -11841!请帮我找出问题所在。
index = Int(0)
let mutableComposition = AVMutableComposition()
var currentTime = kCMTimeZero
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(1, 30)
let videoComposition = AVMutableVideoComposition()
var videoCompInstructions: [AVMutableVideoCompositionInstruction] = []
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastCompositionVideoTrack: AVMutableCompositionTrack? = nil
for asset in movieAssets as! [AVAsset] {
// Add video track into composition
let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: currentTime)
compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
// Add audio track into composition
let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: currentTime)
if index > 0 {
let fromLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: lastCompositionVideoTrack!)
let transitionTimeRange = CMTimeRangeMake(currentTime, timeOffsetBetweenVideos)
fromLayerInstruction.setOpacityRampFromStartOpacity(1.0, toEndOpacity: 0.5, timeRange: transitionTimeRange)
let toLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
toLayerInstruction.setOpacityRampFromStartOpacity(0.5, toEndOpacity: 1.0, timeRange: transitionTimeRange)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
videoCompositionInstruction.timeRange = transitionTimeRange
videoCompositionInstruction.layerInstructions = [fromLayerInstruction, toLayerInstruction]
videoCompInstructions.append(videoCompositionInstruction)
}
lastCompositionVideoTrack = compositionVideoTrack
lastVideoTrack = videoTrack
currentTime = CMTimeAdd(currentTime, asset.duration)
++index
}
let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
videoComposition.instructions = videoCompInstructions
videoComposition.renderSize = mutableComposition.naturalSize
print("frame size: \(mutableComposition.naturalSize)")
videoComposition.renderScale = 1.0
videoComposition.frameDuration = CMTimeMake(1, 30)
exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
print("Video file exported: \(retFileUrl)")
} else {
print(exportSesstion!.error!)
print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
print(exportSesstion?.error?.localizedFailureReason)
}
})
终于解决问题了!看来我必须将所有 AVMutableVideoCompositionInstruction 包含到一个数组中,因为所有 AVMutableVideoCompositionLayerInstruction 都重叠在另一个数组中。
我会 post 下面的工作代码,以防有人有同样的问题。
index = Int(0)
let mutableComposition = AVMutableComposition()
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(0.3, 30)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastVideoEndTime = kCMTimeZero
for asset in movieAssets as! [AVAsset] {
// Add video track into composition
let videoStartTime = CMTimeCompare(lastVideoEndTime, kCMTimeZero) == 0 ? kCMTimeZero : CMTimeSubtract(lastVideoEndTime, timeOffsetBetweenVideos)
let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: videoStartTime)
if index == (movieAssets.count - 1) {
compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
}
// Add audio track into composition
let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: videoStartTime)
if movieAssets.count == 1 {
break
}
if index == 0 {
// First movie has ending animation only
let transitionTimeRange = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRange)
layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
} else if index == (movieAssets.count - 1) {
// Last movie has begining animation only
let transitionTimeRange = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
var transform = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
transform = CGAffineTransformTranslate(transform, movieFrameSize.width / 2, movieFrameSize.height / 2)
layerInstruction.setTransformRampFromStartTransform(transform, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRange)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
} else {
// Other movies has both begining/ending animation
let transitionTimeRangeBegin = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
var transformBegin = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
transformBegin = CGAffineTransformTranslate(transformBegin, movieFrameSize.width / 2, movieFrameSize.height / 2)
layerInstruction.setTransformRampFromStartTransform(transformBegin, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRangeBegin)
let transitionTimeRangeEnd = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRangeEnd)
layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
}
lastVideoEndTime = CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos)
++index
}
let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)
videoCompositionInstruction.enablePostProcessing = false
let videoComposition = AVMutableVideoComposition(propertiesOfAsset: mutableComposition)
videoComposition.instructions = [videoCompositionInstruction]
videoComposition.renderSize = mutableComposition.naturalSize
videoComposition.renderScale = 1.0
videoComposition.frameDuration = CMTimeMake(1, 30)
exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
print("Video file exported: \(retFileUrl)")
} else {
print(exportSesstion!.error!)
print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
print(exportSesstion?.error?.localizedFailureReason)
}
})
我有几个 h.264 quicktime 视频,我想将它们连接成一个带有过渡(淡入淡出 in/out)的视频。下面的代码应该接受一个 AVAsset 数组并将它们插入到一个 AVMutableVideoComposition 中。 但是我一直从 AVAssetExportSession 得到 -11841!请帮我找出问题所在。
index = Int(0)
let mutableComposition = AVMutableComposition()
var currentTime = kCMTimeZero
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(1, 30)
let videoComposition = AVMutableVideoComposition()
var videoCompInstructions: [AVMutableVideoCompositionInstruction] = []
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastCompositionVideoTrack: AVMutableCompositionTrack? = nil
for asset in movieAssets as! [AVAsset] {
// Add video track into composition
let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: currentTime)
compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
// Add audio track into composition
let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: currentTime)
if index > 0 {
let fromLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: lastCompositionVideoTrack!)
let transitionTimeRange = CMTimeRangeMake(currentTime, timeOffsetBetweenVideos)
fromLayerInstruction.setOpacityRampFromStartOpacity(1.0, toEndOpacity: 0.5, timeRange: transitionTimeRange)
let toLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
toLayerInstruction.setOpacityRampFromStartOpacity(0.5, toEndOpacity: 1.0, timeRange: transitionTimeRange)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
videoCompositionInstruction.timeRange = transitionTimeRange
videoCompositionInstruction.layerInstructions = [fromLayerInstruction, toLayerInstruction]
videoCompInstructions.append(videoCompositionInstruction)
}
lastCompositionVideoTrack = compositionVideoTrack
lastVideoTrack = videoTrack
currentTime = CMTimeAdd(currentTime, asset.duration)
++index
}
let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
videoComposition.instructions = videoCompInstructions
videoComposition.renderSize = mutableComposition.naturalSize
print("frame size: \(mutableComposition.naturalSize)")
videoComposition.renderScale = 1.0
videoComposition.frameDuration = CMTimeMake(1, 30)
exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
print("Video file exported: \(retFileUrl)")
} else {
print(exportSesstion!.error!)
print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
print(exportSesstion?.error?.localizedFailureReason)
}
})
终于解决问题了!看来我必须将所有 AVMutableVideoCompositionInstruction 包含到一个数组中,因为所有 AVMutableVideoCompositionLayerInstruction 都重叠在另一个数组中。
我会 post 下面的工作代码,以防有人有同样的问题。
index = Int(0)
let mutableComposition = AVMutableComposition()
// Transition relate
let timeOffsetBetweenVideos = CMTimeMakeWithSeconds(0.3, 30)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
let compositionAudioTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
var lastVideoEndTime = kCMTimeZero
for asset in movieAssets as! [AVAsset] {
// Add video track into composition
let videoStartTime = CMTimeCompare(lastVideoEndTime, kCMTimeZero) == 0 ? kCMTimeZero : CMTimeSubtract(lastVideoEndTime, timeOffsetBetweenVideos)
let compositionVideoTrack = mutableComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]
try compositionVideoTrack.insertTimeRange(videoTrack.timeRange, ofTrack: videoTrack, atTime: videoStartTime)
if index == (movieAssets.count - 1) {
compositionVideoTrack.scaleTimeRange(videoTrack.timeRange, toDuration: CMTimeAdd(asset.duration, timeOffsetBetweenVideos))
}
// Add audio track into composition
let audioTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0]
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: videoStartTime)
if movieAssets.count == 1 {
break
}
if index == 0 {
// First movie has ending animation only
let transitionTimeRange = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRange)
layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
} else if index == (movieAssets.count - 1) {
// Last movie has begining animation only
let transitionTimeRange = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
var transform = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
transform = CGAffineTransformTranslate(transform, movieFrameSize.width / 2, movieFrameSize.height / 2)
layerInstruction.setTransformRampFromStartTransform(transform, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRange)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
} else {
// Other movies has both begining/ending animation
let transitionTimeRangeBegin = CMTimeRangeMake(lastVideoEndTime, timeOffsetBetweenVideos)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
var transformBegin = CGAffineTransformScale(videoTrack.preferredTransform, 0.5, 0.5)
transformBegin = CGAffineTransformTranslate(transformBegin, movieFrameSize.width / 2, movieFrameSize.height / 2)
layerInstruction.setTransformRampFromStartTransform(transformBegin, toEndTransform: videoTrack.preferredTransform, timeRange: transitionTimeRangeBegin)
let transitionTimeRangeEnd = CMTimeRangeMake(CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos), timeOffsetBetweenVideos)
let transform = CGAffineTransformTranslate(videoTrack.preferredTransform, movieFrameSize.width / -1.0, 0)
layerInstruction.setTransformRampFromStartTransform(videoTrack.preferredTransform, toEndTransform: transform, timeRange: transitionTimeRangeEnd)
layerInstruction.setOpacity(0.0, atTime: compositionVideoTrack.timeRange.end)
videoCompositionInstruction.layerInstructions.append(layerInstruction)
}
lastVideoEndTime = CMTimeSubtract(compositionVideoTrack.timeRange.end, timeOffsetBetweenVideos)
++index
}
let exportSesstion = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality)
exportSesstion?.outputFileType = AVFileTypeQuickTimeMovie
exportSesstion?.outputURL = retFileUrl
if movieAssets.count > 1 {
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)
videoCompositionInstruction.enablePostProcessing = false
let videoComposition = AVMutableVideoComposition(propertiesOfAsset: mutableComposition)
videoComposition.instructions = [videoCompositionInstruction]
videoComposition.renderSize = mutableComposition.naturalSize
videoComposition.renderScale = 1.0
videoComposition.frameDuration = CMTimeMake(1, 30)
exportSesstion?.videoComposition = videoComposition
}
exportSesstion?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
if exportSesstion?.status == AVAssetExportSessionStatus.Completed {
print("Video file exported: \(retFileUrl)")
} else {
print(exportSesstion!.error!)
print("Failed exporting video: \(exportSesstion?.error?.localizedDescription)")
print(exportSesstion?.error?.localizedFailureReason)
}
})