iOS 合并三个视频 - 旋转中心视频
iOS Combine three videos - rotate the center video
我有三个视频。第一个来自后置摄像头。第二个来自前置摄像头,第三个来自后置摄像头。视频始终以横向模式拍摄,主页按钮在右侧。
后置视频的方向正确。使用前置摄像头拍摄的中心视频旋转 180 度(上下颠倒)。我一直在研究并尝试多种方法来转换中心视频,但没有成功。我每次都得到相同的结果。
我对整个过程感到非常沮丧。我在网上阅读的所有内容以及此处审阅者的 comments/suggestions 都应该有效,但它不起作用。无论我尝试进行何种转换,视频都是一样的。它一直表现得好像我没有应用任何转换一样。没有什么。我不明白为什么在此忽略转换。我在这上面花了几个星期,但我已经结束了——它根本行不通。
这是我的代码的当前迭代:
- (void)mergeVideos2:(NSMutableArray *)assets withCompletion:(void (^)(NSString *))completion {
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
__block NSMutableArray *instructions = [[NSMutableArray alloc] init];
__block CGSize size = CGSizeZero;
__block CMTime time = kCMTimeZero;
__block AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
__block CGAffineTransform transformflip = CGAffineTransformMakeScale(1, -1);
// __block CGAffineTransform transformflip = CGAffineTransformMakeRotation(M_PI);
__block int32_t commontimescale = 600;
[assets enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
NSURL *assetUrl = (NSURL *)obj;
AVAsset *asset = [AVAsset assetWithURL:assetUrl];
CMTime cliptime = CMTimeConvertScale(asset.duration, commontimescale, kCMTimeRoundingMethod_QuickTime);
NSLog(@"%s: Number of tracks: %lu", __PRETTY_FUNCTION__, (unsigned long)[[asset tracks] count]);
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, cliptime)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(@"%s: Error - %@", __PRETTY_FUNCTION__, error.debugDescription);
}
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
CGAffineTransform transform = assetTrack.preferredTransform;
[videoLayerInstruction setTransform:CGAffineTransformConcat(transform, transformflip) atTime:time];
// the main instruction set - this is wrapping the time
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(time, assetTrack.timeRange.duration);
if (videoLayerInstruction != nil)
videoCompositionInstruction.layerInstructions = @[videoLayerInstruction];
[instructions addObject:videoCompositionInstruction];
// time increment variables
time = CMTimeAdd(time, cliptime);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}];
mutableVideoComposition.instructions = instructions;
// set the frame rate to 9fps
mutableVideoComposition.frameDuration = CMTimeMake(1, 12);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
int number = arc4random_uniform(10000);
self.outputFile = [documentsDirectory stringByAppendingFormat:@"/export_%i.mov",number];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition
presetName:AVAssetExportPreset1280x720];
exporter.outputURL = [NSURL fileURLWithPath:self.outputFile];
//Set the output file type
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_group_leave(group);
}];
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
// get the size of the file
unsigned long long size= ([[[NSFileManager defaultManager] attributesOfItemAtPath:self.outputFile error:nil] fileSize]);
NSString *filesize = [NSByteCountFormatter stringFromByteCount:size countStyle:NSByteCountFormatterCountStyleFile];
NSString *thereturn = [NSString stringWithFormat:@"%@: %@", self.outputFile, filesize];
NSLog(@"Export File (Final) - %@", self.outputFile);
completion(thereturn);
});
}
有什么想法或建议吗?
每个 AVAssetTrack 都有一个 preferredTransform
属性。它包含有关如何旋转和平移视频以正确显示视频的信息,因此您不必猜测。在每个图层指令中使用每个视频的 preferredTransform。
不设置"videoCompositionTrack.preferredTransform = ..."
移除变换渐变“[videoLayerInstruction setTransformRampFromStartTransform:...”
在该枚举中,只需使用:
CGAffineTransform transform = assetTrack.preferredTransform;
[videoLayerInstruction setTransform:transform atTime:time];
我假设您的视频是用与输出相同的尺寸拍摄的,中间视频的宽度和高度颠倒了。如果不是,则必须添加适当的缩放比例:
float scaleFactor = ...// i.e. (outputWidth / videoWidth)
CGAffineTransform scale = CGAffineTransformMakeScale(scaleFactor,scaleFactor)
transform = CGAffineTransformConcat(transform, scale);
[videoLayerInstruction setTransform:transform atTime:time];
编辑:似乎在合成中出现倒置的源视频开始时是倒置的,但具有身份 CGAffineTransform。此代码用于以正确的方向显示它们:
- (void)mergeVideos2:(NSMutableArray *)assets withCompletion:(void (^)(NSString *))completion {
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
__block NSMutableArray *instructions = [[NSMutableArray alloc] init];
__block CMTime time = kCMTimeZero;
__block AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
__block int32_t commontimescale = 600;
// Create one layer instruction. We have one video track, and there should be one layer instruction per video track.
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
[assets enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
NSURL *assetUrl = (NSURL *)obj;
AVAsset *asset = [AVAsset assetWithURL:assetUrl];
CMTime cliptime = CMTimeConvertScale(asset.duration, commontimescale, kCMTimeRoundingMethod_QuickTime);
NSLog(@"%s: Number of tracks: %lu", __PRETTY_FUNCTION__, (unsigned long)[[asset tracks] count]);
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
CGSize naturalSize = assetTrack.naturalSize;
NSError *error;
//insert the video from the assetTrack into the composition track
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, cliptime)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(@"%s: Error - %@", __PRETTY_FUNCTION__, error.debugDescription);
}
CGAffineTransform transform = assetTrack.preferredTransform;
//set the layer to have this videos transform at the time that this video starts
if (<* the video is an intermediate video - has the wrong orientation*>) {
//these videos have the identity transform, yet they are upside down.
//we need to rotate them by M_PI radians (180 degrees) and shift the video back into place
CGAffineTransform rotateTransform = CGAffineTransformMakeRotation(M_PI);
CGAffineTransform translateTransform = CGAffineTransformMakeTranslation(naturalSize.width, naturalSize.height);
[videoLayerInstruction setTransform:CGAffineTransformConcat(rotateTransform, translateTransform) atTime:time];
} else {
[videoLayerInstruction setTransform:transform atTime:time];
}
// time increment variables
time = CMTimeAdd(time, cliptime);
}];
// the main instruction set - this is wrapping the time
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,mutableComposition.duration); //make the instruction last for the entire composition
videoCompositionInstruction.layerInstructions = @[videoLayerInstruction];
[instructions addObject:videoCompositionInstruction];
mutableVideoComposition.instructions = instructions;
// set the frame rate to 9fps
mutableVideoComposition.frameDuration = CMTimeMake(1, 12);
//set the rendersize for the video we're about to write
mutableVideoComposition.renderSize = CGSizeMake(1280,720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
int number = arc4random_uniform(10000);
self.outputFile = [documentsDirectory stringByAppendingFormat:@"/export_%i.mov",number];
//let the rendersize of the video composition dictate size. use quality preset here
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = [NSURL fileURLWithPath:self.outputFile];
//Set the output file type
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mutableVideoComposition;
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_group_leave(group);
}];
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
// get the size of the file
unsigned long long size= ([[[NSFileManager defaultManager] attributesOfItemAtPath:self.outputFile error:nil] fileSize]);
NSString *filesize = [NSByteCountFormatter stringFromByteCount:size countStyle:NSByteCountFormatterCountStyleFile];
NSString *thereturn = [NSString stringWithFormat:@"%@: %@", self.outputFile, filesize];
NSLog(@"Export File (Final) - %@", self.outputFile);
completion(thereturn);
});
}
我有三个视频。第一个来自后置摄像头。第二个来自前置摄像头,第三个来自后置摄像头。视频始终以横向模式拍摄,主页按钮在右侧。
后置视频的方向正确。使用前置摄像头拍摄的中心视频旋转 180 度(上下颠倒)。我一直在研究并尝试多种方法来转换中心视频,但没有成功。我每次都得到相同的结果。
我对整个过程感到非常沮丧。我在网上阅读的所有内容以及此处审阅者的 comments/suggestions 都应该有效,但它不起作用。无论我尝试进行何种转换,视频都是一样的。它一直表现得好像我没有应用任何转换一样。没有什么。我不明白为什么在此忽略转换。我在这上面花了几个星期,但我已经结束了——它根本行不通。
这是我的代码的当前迭代:
- (void)mergeVideos2:(NSMutableArray *)assets withCompletion:(void (^)(NSString *))completion {
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
__block NSMutableArray *instructions = [[NSMutableArray alloc] init];
__block CGSize size = CGSizeZero;
__block CMTime time = kCMTimeZero;
__block AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
__block CGAffineTransform transformflip = CGAffineTransformMakeScale(1, -1);
// __block CGAffineTransform transformflip = CGAffineTransformMakeRotation(M_PI);
__block int32_t commontimescale = 600;
[assets enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
NSURL *assetUrl = (NSURL *)obj;
AVAsset *asset = [AVAsset assetWithURL:assetUrl];
CMTime cliptime = CMTimeConvertScale(asset.duration, commontimescale, kCMTimeRoundingMethod_QuickTime);
NSLog(@"%s: Number of tracks: %lu", __PRETTY_FUNCTION__, (unsigned long)[[asset tracks] count]);
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, cliptime)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(@"%s: Error - %@", __PRETTY_FUNCTION__, error.debugDescription);
}
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
CGAffineTransform transform = assetTrack.preferredTransform;
[videoLayerInstruction setTransform:CGAffineTransformConcat(transform, transformflip) atTime:time];
// the main instruction set - this is wrapping the time
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(time, assetTrack.timeRange.duration);
if (videoLayerInstruction != nil)
videoCompositionInstruction.layerInstructions = @[videoLayerInstruction];
[instructions addObject:videoCompositionInstruction];
// time increment variables
time = CMTimeAdd(time, cliptime);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;;
}
}];
mutableVideoComposition.instructions = instructions;
// set the frame rate to 9fps
mutableVideoComposition.frameDuration = CMTimeMake(1, 12);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
int number = arc4random_uniform(10000);
self.outputFile = [documentsDirectory stringByAppendingFormat:@"/export_%i.mov",number];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition
presetName:AVAssetExportPreset1280x720];
exporter.outputURL = [NSURL fileURLWithPath:self.outputFile];
//Set the output file type
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_group_leave(group);
}];
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
// get the size of the file
unsigned long long size= ([[[NSFileManager defaultManager] attributesOfItemAtPath:self.outputFile error:nil] fileSize]);
NSString *filesize = [NSByteCountFormatter stringFromByteCount:size countStyle:NSByteCountFormatterCountStyleFile];
NSString *thereturn = [NSString stringWithFormat:@"%@: %@", self.outputFile, filesize];
NSLog(@"Export File (Final) - %@", self.outputFile);
completion(thereturn);
});
}
有什么想法或建议吗?
每个 AVAssetTrack 都有一个 preferredTransform
属性。它包含有关如何旋转和平移视频以正确显示视频的信息,因此您不必猜测。在每个图层指令中使用每个视频的 preferredTransform。
不设置"videoCompositionTrack.preferredTransform = ..."
移除变换渐变“[videoLayerInstruction setTransformRampFromStartTransform:...”
在该枚举中,只需使用:
CGAffineTransform transform = assetTrack.preferredTransform;
[videoLayerInstruction setTransform:transform atTime:time];
我假设您的视频是用与输出相同的尺寸拍摄的,中间视频的宽度和高度颠倒了。如果不是,则必须添加适当的缩放比例:
float scaleFactor = ...// i.e. (outputWidth / videoWidth)
CGAffineTransform scale = CGAffineTransformMakeScale(scaleFactor,scaleFactor)
transform = CGAffineTransformConcat(transform, scale);
[videoLayerInstruction setTransform:transform atTime:time];
编辑:似乎在合成中出现倒置的源视频开始时是倒置的,但具有身份 CGAffineTransform。此代码用于以正确的方向显示它们:
- (void)mergeVideos2:(NSMutableArray *)assets withCompletion:(void (^)(NSString *))completion {
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
__block NSMutableArray *instructions = [[NSMutableArray alloc] init];
__block CMTime time = kCMTimeZero;
__block AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
__block int32_t commontimescale = 600;
// Create one layer instruction. We have one video track, and there should be one layer instruction per video track.
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
[assets enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
NSURL *assetUrl = (NSURL *)obj;
AVAsset *asset = [AVAsset assetWithURL:assetUrl];
CMTime cliptime = CMTimeConvertScale(asset.duration, commontimescale, kCMTimeRoundingMethod_QuickTime);
NSLog(@"%s: Number of tracks: %lu", __PRETTY_FUNCTION__, (unsigned long)[[asset tracks] count]);
AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
CGSize naturalSize = assetTrack.naturalSize;
NSError *error;
//insert the video from the assetTrack into the composition track
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, cliptime)
ofTrack:assetTrack
atTime:time
error:&error];
if (error) {
NSLog(@"%s: Error - %@", __PRETTY_FUNCTION__, error.debugDescription);
}
CGAffineTransform transform = assetTrack.preferredTransform;
//set the layer to have this videos transform at the time that this video starts
if (<* the video is an intermediate video - has the wrong orientation*>) {
//these videos have the identity transform, yet they are upside down.
//we need to rotate them by M_PI radians (180 degrees) and shift the video back into place
CGAffineTransform rotateTransform = CGAffineTransformMakeRotation(M_PI);
CGAffineTransform translateTransform = CGAffineTransformMakeTranslation(naturalSize.width, naturalSize.height);
[videoLayerInstruction setTransform:CGAffineTransformConcat(rotateTransform, translateTransform) atTime:time];
} else {
[videoLayerInstruction setTransform:transform atTime:time];
}
// time increment variables
time = CMTimeAdd(time, cliptime);
}];
// the main instruction set - this is wrapping the time
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,mutableComposition.duration); //make the instruction last for the entire composition
videoCompositionInstruction.layerInstructions = @[videoLayerInstruction];
[instructions addObject:videoCompositionInstruction];
mutableVideoComposition.instructions = instructions;
// set the frame rate to 9fps
mutableVideoComposition.frameDuration = CMTimeMake(1, 12);
//set the rendersize for the video we're about to write
mutableVideoComposition.renderSize = CGSizeMake(1280,720);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
int number = arc4random_uniform(10000);
self.outputFile = [documentsDirectory stringByAppendingFormat:@"/export_%i.mov",number];
//let the rendersize of the video composition dictate size. use quality preset here
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = [NSURL fileURLWithPath:self.outputFile];
//Set the output file type
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mutableVideoComposition;
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_group_leave(group);
}];
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
// get the size of the file
unsigned long long size= ([[[NSFileManager defaultManager] attributesOfItemAtPath:self.outputFile error:nil] fileSize]);
NSString *filesize = [NSByteCountFormatter stringFromByteCount:size countStyle:NSByteCountFormatterCountStyleFile];
NSString *thereturn = [NSString stringWithFormat:@"%@: %@", self.outputFile, filesize];
NSLog(@"Export File (Final) - %@", self.outputFile);
completion(thereturn);
});
}