AVMutableComposition 输出冻结在第一个视频的最后一帧
AVMutableComposition output freezes at the last frame of the first video
我正在尝试使用 AVMutableComposition 将多个剪辑(视频)合并为一个,我已经成功地完成了这项工作以及旋转和平移每条指令,但是,仍然存在一个问题。
当第一个剪辑完成时,输出冻结在它的最后一帧(第一个剪辑的最后一帧);只有当有另一个剪辑可见时才会发生这种情况,例如,如果我将第二个和第三个剪辑的不透明度在 CMTime.Zero 处设置为 0,第一个在 firstClip.Duration 处设置为 0,则结果将是一个显示第一个剪辑视频的视频,一旦完成,它就会显示黑色背景。
剪辑的音频完美无缺。
这是我的代码:
public void TESTING()
{
//microphone
AVCaptureDevice microphone = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
AVMutableComposition mixComposition = AVMutableComposition.Create();
AVVideoCompositionLayerInstruction[] Instruction_Array = new AVVideoCompositionLayerInstruction[Clips.Count];
foreach (string clip in Clips)
{
var asset = AVUrlAsset.FromUrl(new NSUrl(clip, false)) as AVUrlAsset;
#region HoldVideoTrack
//This range applies to the video, not to the mixcomposition
CMTimeRange range = new CMTimeRange()
{
Start = CMTime.Zero,
Duration = asset.Duration
};
var duration = mixComposition.Duration;
NSError error;
AVMutableCompositionTrack videoTrack = mixComposition.AddMutableTrack(AVMediaType.Video, 0);
AVAssetTrack assetVideoTrack = asset.TracksWithMediaType(AVMediaType.Video)[0];
videoTrack.InsertTimeRange(range, assetVideoTrack, duration, out error);
videoTrack.PreferredTransform = assetVideoTrack.PreferredTransform;
if (microphone != null)
{
AVMutableCompositionTrack audioTrack = mixComposition.AddMutableTrack(AVMediaType.Audio, 0);
AVAssetTrack assetAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio)[0];
audioTrack.InsertTimeRange(range, assetAudioTrack, duration, out error);
}
#endregion
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrackWithMediaType);
#endregion
}
// 6
AVMutableVideoCompositionInstruction mainInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
CMTimeRange rangeIns = new CMTimeRange()
{
Start = new CMTime(0, 0),
Duration = mixComposition.Duration
};
mainInstruction.TimeRange = rangeIns;
mainInstruction.LayerInstructions = Instruction_Array;
var mainComposition = AVMutableVideoComposition.Create();
mainComposition.Instructions = new AVVideoCompositionInstruction[1] { mainInstruction };
mainComposition.FrameDuration = new CMTime(1, 30);
mainComposition.RenderSize = new CGSize(mixComposition.NaturalSize.Height, mixComposition.NaturalSize.Width);
finalVideo_path = NSUrl.FromFilename(Path.Combine(Path.GetTempPath(), "Whole2.mov"));
if (File.Exists(Path.GetTempPath() + "Whole2.mov"))
{
File.Delete(Path.GetTempPath() + "Whole2.mov");
}
//... export video ...
AVAssetExportSession exportSession = new AVAssetExportSession(mixComposition, AVAssetExportSessionPreset.HighestQuality)
{
OutputUrl = NSUrl.FromFilename(Path.Combine(Path.GetTempPath(), "Whole2.mov")),
OutputFileType = AVFileType.QuickTimeMovie,
ShouldOptimizeForNetworkUse = true,
VideoComposition = mainComposition
};
exportSession.ExportAsynchronously(_OnExportDone);
FinalVideo = Path.Combine(Path.GetTempPath(), "Whole2.mov");
}
private AVMutableVideoCompositionLayerInstruction TestingInstruction(AVAsset asset, CMTime currentTime, AVAssetTrack mixComposition_video_Track)
{
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(mixComposition_video_Track);
var startTime = CMTime.Subtract(currentTime, asset.Duration);
//NaturalSize.Height is passed as a width parameter because IOS stores the video recording horizontally
CGAffineTransform translateToCenter = CGAffineTransform.MakeTranslation(mixComposition_video_Track.NaturalSize.Height, 0);
//Angle in radiants, not in degrees
CGAffineTransform rotate = CGAffineTransform.Rotate(translateToCenter, (nfloat)(Math.PI / 2));
instruction.SetTransform(rotate, (CMTime.Subtract(currentTime, asset.Duration)));
instruction.SetOpacity(1, startTime);
instruction.SetOpacity(0, currentTime);
return instruction;
}
}
有人知道怎么解决吗?
如果您需要更多信息,我会在看到您的请求后立即提供。谢谢大家的宝贵时间,祝你有美好的一天。 (:
我相信我已经解决了您代码中的问题。您只是在第一条轨道上创建说明。看看这里的这两行:
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
AVMutableComposition.tracksWithMediaType 获取一个轨道数组,因此在第一行末尾 [0],仅抓取组合中的第一条轨道,即第一个视频。当您循环播放时,您只是在多次为第一个视频创建说明。
你的代码和我对 Xamarin 的不熟悉让我感到困惑,但我相信你可以这样做并且它应该可以工作:
更改这些行:
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrackWithMediaType);
#endregion
为此:
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrack);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrack);
#endregion
我在这里所做的只是删除您创建的 videoTracksWithMediaType 变量,并改用 videoTrack。无需获取相应的轨道,因为您已经创建了它,并且在创建指令时仍然可以在您所在的代码块中访问它。
我正在尝试使用 AVMutableComposition 将多个剪辑(视频)合并为一个,我已经成功地完成了这项工作以及旋转和平移每条指令,但是,仍然存在一个问题。 当第一个剪辑完成时,输出冻结在它的最后一帧(第一个剪辑的最后一帧);只有当有另一个剪辑可见时才会发生这种情况,例如,如果我将第二个和第三个剪辑的不透明度在 CMTime.Zero 处设置为 0,第一个在 firstClip.Duration 处设置为 0,则结果将是一个显示第一个剪辑视频的视频,一旦完成,它就会显示黑色背景。 剪辑的音频完美无缺。
这是我的代码:
public void TESTING()
{
//microphone
AVCaptureDevice microphone = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
AVMutableComposition mixComposition = AVMutableComposition.Create();
AVVideoCompositionLayerInstruction[] Instruction_Array = new AVVideoCompositionLayerInstruction[Clips.Count];
foreach (string clip in Clips)
{
var asset = AVUrlAsset.FromUrl(new NSUrl(clip, false)) as AVUrlAsset;
#region HoldVideoTrack
//This range applies to the video, not to the mixcomposition
CMTimeRange range = new CMTimeRange()
{
Start = CMTime.Zero,
Duration = asset.Duration
};
var duration = mixComposition.Duration;
NSError error;
AVMutableCompositionTrack videoTrack = mixComposition.AddMutableTrack(AVMediaType.Video, 0);
AVAssetTrack assetVideoTrack = asset.TracksWithMediaType(AVMediaType.Video)[0];
videoTrack.InsertTimeRange(range, assetVideoTrack, duration, out error);
videoTrack.PreferredTransform = assetVideoTrack.PreferredTransform;
if (microphone != null)
{
AVMutableCompositionTrack audioTrack = mixComposition.AddMutableTrack(AVMediaType.Audio, 0);
AVAssetTrack assetAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio)[0];
audioTrack.InsertTimeRange(range, assetAudioTrack, duration, out error);
}
#endregion
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrackWithMediaType);
#endregion
}
// 6
AVMutableVideoCompositionInstruction mainInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
CMTimeRange rangeIns = new CMTimeRange()
{
Start = new CMTime(0, 0),
Duration = mixComposition.Duration
};
mainInstruction.TimeRange = rangeIns;
mainInstruction.LayerInstructions = Instruction_Array;
var mainComposition = AVMutableVideoComposition.Create();
mainComposition.Instructions = new AVVideoCompositionInstruction[1] { mainInstruction };
mainComposition.FrameDuration = new CMTime(1, 30);
mainComposition.RenderSize = new CGSize(mixComposition.NaturalSize.Height, mixComposition.NaturalSize.Width);
finalVideo_path = NSUrl.FromFilename(Path.Combine(Path.GetTempPath(), "Whole2.mov"));
if (File.Exists(Path.GetTempPath() + "Whole2.mov"))
{
File.Delete(Path.GetTempPath() + "Whole2.mov");
}
//... export video ...
AVAssetExportSession exportSession = new AVAssetExportSession(mixComposition, AVAssetExportSessionPreset.HighestQuality)
{
OutputUrl = NSUrl.FromFilename(Path.Combine(Path.GetTempPath(), "Whole2.mov")),
OutputFileType = AVFileType.QuickTimeMovie,
ShouldOptimizeForNetworkUse = true,
VideoComposition = mainComposition
};
exportSession.ExportAsynchronously(_OnExportDone);
FinalVideo = Path.Combine(Path.GetTempPath(), "Whole2.mov");
}
private AVMutableVideoCompositionLayerInstruction TestingInstruction(AVAsset asset, CMTime currentTime, AVAssetTrack mixComposition_video_Track)
{
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(mixComposition_video_Track);
var startTime = CMTime.Subtract(currentTime, asset.Duration);
//NaturalSize.Height is passed as a width parameter because IOS stores the video recording horizontally
CGAffineTransform translateToCenter = CGAffineTransform.MakeTranslation(mixComposition_video_Track.NaturalSize.Height, 0);
//Angle in radiants, not in degrees
CGAffineTransform rotate = CGAffineTransform.Rotate(translateToCenter, (nfloat)(Math.PI / 2));
instruction.SetTransform(rotate, (CMTime.Subtract(currentTime, asset.Duration)));
instruction.SetOpacity(1, startTime);
instruction.SetOpacity(0, currentTime);
return instruction;
}
}
有人知道怎么解决吗?
如果您需要更多信息,我会在看到您的请求后立即提供。谢谢大家的宝贵时间,祝你有美好的一天。 (:
我相信我已经解决了您代码中的问题。您只是在第一条轨道上创建说明。看看这里的这两行:
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
AVMutableComposition.tracksWithMediaType 获取一个轨道数组,因此在第一行末尾 [0],仅抓取组合中的第一条轨道,即第一个视频。当您循环播放时,您只是在多次为第一个视频创建说明。
你的代码和我对 Xamarin 的不熟悉让我感到困惑,但我相信你可以这样做并且它应该可以工作:
更改这些行:
AVAssetTrack videoTrackWithMediaType = mixComposition.TracksWithMediaType(AVMediaType.Video)[0];
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrackWithMediaType);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrackWithMediaType);
#endregion
为此:
var instruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(videoTrack);
#region Instructions
int counter = Clips.IndexOf(clip);
Instruction_Array[counter] = TestingInstruction(asset, mixComposition.Duration, videoTrack);
#endregion
我在这里所做的只是删除您创建的 videoTracksWithMediaType 变量,并改用 videoTrack。无需获取相应的轨道,因为您已经创建了它,并且在创建指令时仍然可以在您所在的代码块中访问它。