在 iOS 中创建具有模糊背景的 AVComposition
Creating AVComposition with a Blurry Background in iOS
我不太确定如何提出这个问题,如果有任何有助于改进这个问题的反馈,我将不胜感激。我正在尝试制作一个接受视频 URL 作为输入(本地视频)的函数,而这个函数又试图创建一个背景模糊的视频,原始视频位于其中心并按比例缩小。我的问题是我的代码工作正常,除了当我使用直接从 iPhone 相机录制的视频时。
我试图实现的一个例子如下(取自我的代码):
这里输入的视频是mp4。我已经能够使代码与我在线下载的 mov 文件一起工作。但是当我使用从 iOS 相机录制的 mov 文件时,我得到以下结果:
(题目中怎么post拍的照片少space?)
现在,我不确定如何提出这个问题的原因是因为这个过程中有相当多的代码,我还不能完全缩小问题的范围,但我相信它在我将在下面粘贴的功能。我还将 post 一个 link 到一个 github 存储库,我的项目的准系统版本已被 post 编辑,供任何好奇或愿意提供帮助的人使用。我必须承认,我使用的代码最初是由名为 TheTiger 的 Whosebug 用户针对以下问题编写的:。我已经重构了其中的部分,并在他们的许可下,被允许 post 这里的问题。
我的 github 存储库在 link 此处编辑:GITHUB REPO
我的演示设置了 3 个不同的视频,一个从网上下载的 mp4(工作),一个从网上下载的 mov(工作)和一个我在 phone 上录制的 mov(不工作)
我认为导致问题的代码在这里:
fileprivate func addAllVideosAtCenterOfBlur(asset: AVURLAsset, blurVideo: AVURLAsset, scale: CGFloat, completion: @escaping BlurredBackgroundManagerCompletion) {
let mixComposition = AVMutableComposition()
var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []
let blurVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
if let videoTrack = blurVideo.tracks(withMediaType: AVMediaType.video).first {
let timeRange = CMTimeRange(start: .zero, duration: blurVideo.duration)
try? blurVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
}
let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {
try? track?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
let properties = scaleAndPositionInAspectFitMode(forTrack: videoTrack, inArea: size, scale: scale)
let videoOrientation = videoTrack.getVideoOrientation()
let assetSize = videoTrack.assetSize()
let preferredTransform = getPreferredTransform(videoOrientation: videoOrientation, assetSize: assetSize, defaultTransform: asset.preferredTransform, properties: properties)
layerInstruction.setTransform(preferredTransform, at: .zero)
instructionLayers.append(layerInstruction)
}
/// Adding audio
if let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first {
let aTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try? aTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero)
}
/// Blur layer instruction
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: blurVideoTrack!)
instructionLayers.append(layerInstruction)
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = timeRange
mainInstruction.layerInstructions = instructionLayers
let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainCompositionInst.renderSize = size
//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputUrl(filename: "finalBlurred")
try? FileManager.default.removeItem(at: url)
performExport(composition: mixComposition, instructions: mainCompositionInst, stage: 2, outputUrl: url) { (error) in
if let error = error {
completion(nil, error)
} else {
completion(url, nil)
}
}
}
getPreferredTransform() 函数也很相关:
fileprivate func getPreferredTransform(videoOrientation: UIImage.Orientation, assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
switch videoOrientation {
case .down:
return handleDownOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
case .left:
return handleLeftOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
case .right:
return handleRightOrientation(properties: properties)
case .up:
return handleUpOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
default:
return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
}
}
fileprivate func handleDownOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = 0
if properties.position.y == 0 {
xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
// Final transformation - Concatination
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
fileprivate func handleLeftOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
} else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
// Final transformation - Concatination
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
fileprivate func handleRightOrientation(properties: Properties) -> CGAffineTransform {
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)
let finalTransform = scaleTransform.concatenating(translationTransform)
return finalTransform
}
fileprivate func handleUpOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
}
fileprivate func handleOtherCases(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
var ytranslation: CGFloat = 0
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
问题出在您创建的 handleOtherCases 函数和要应用于框架的 return CGAffineTransform 中。应用的缩放和旋转变换很好。但是你计算的平移变换不正确。请尝试下面的代码片段,它会产生所需的结果,如附图所示。
fileprivate func handleOtherCases(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
let ytranslation: CGFloat = ( self.size.height - ( assetSize.height * properties.scale.height ) ) / 2
let xtranslation: CGFloat = ( assetSize.width * properties.scale.width ) + ( self.size.width - ( assetSize.width * properties.scale.width ) ) / 2
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
let finalTransform = defaultTransform.concatenating(scaleTransform).concatenating(rotateTransform).concatenating(translationTransform)
return finalTransform
}
我不太确定如何提出这个问题,如果有任何有助于改进这个问题的反馈,我将不胜感激。我正在尝试制作一个接受视频 URL 作为输入(本地视频)的函数,而这个函数又试图创建一个背景模糊的视频,原始视频位于其中心并按比例缩小。我的问题是我的代码工作正常,除了当我使用直接从 iPhone 相机录制的视频时。
我试图实现的一个例子如下(取自我的代码):
这里输入的视频是mp4。我已经能够使代码与我在线下载的 mov 文件一起工作。但是当我使用从 iOS 相机录制的 mov 文件时,我得到以下结果:
(题目中怎么post拍的照片少space?)
现在,我不确定如何提出这个问题的原因是因为这个过程中有相当多的代码,我还不能完全缩小问题的范围,但我相信它在我将在下面粘贴的功能。我还将 post 一个 link 到一个 github 存储库,我的项目的准系统版本已被 post 编辑,供任何好奇或愿意提供帮助的人使用。我必须承认,我使用的代码最初是由名为 TheTiger 的 Whosebug 用户针对以下问题编写的:
我的 github 存储库在 link 此处编辑:GITHUB REPO 我的演示设置了 3 个不同的视频,一个从网上下载的 mp4(工作),一个从网上下载的 mov(工作)和一个我在 phone 上录制的 mov(不工作)
我认为导致问题的代码在这里:
fileprivate func addAllVideosAtCenterOfBlur(asset: AVURLAsset, blurVideo: AVURLAsset, scale: CGFloat, completion: @escaping BlurredBackgroundManagerCompletion) {
let mixComposition = AVMutableComposition()
var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []
let blurVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
if let videoTrack = blurVideo.tracks(withMediaType: AVMediaType.video).first {
let timeRange = CMTimeRange(start: .zero, duration: blurVideo.duration)
try? blurVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
}
let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {
try? track?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
let properties = scaleAndPositionInAspectFitMode(forTrack: videoTrack, inArea: size, scale: scale)
let videoOrientation = videoTrack.getVideoOrientation()
let assetSize = videoTrack.assetSize()
let preferredTransform = getPreferredTransform(videoOrientation: videoOrientation, assetSize: assetSize, defaultTransform: asset.preferredTransform, properties: properties)
layerInstruction.setTransform(preferredTransform, at: .zero)
instructionLayers.append(layerInstruction)
}
/// Adding audio
if let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first {
let aTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try? aTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero)
}
/// Blur layer instruction
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: blurVideoTrack!)
instructionLayers.append(layerInstruction)
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = timeRange
mainInstruction.layerInstructions = instructionLayers
let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(value: 1, timescale: 30)
mainCompositionInst.renderSize = size
//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputUrl(filename: "finalBlurred")
try? FileManager.default.removeItem(at: url)
performExport(composition: mixComposition, instructions: mainCompositionInst, stage: 2, outputUrl: url) { (error) in
if let error = error {
completion(nil, error)
} else {
completion(url, nil)
}
}
}
getPreferredTransform() 函数也很相关:
fileprivate func getPreferredTransform(videoOrientation: UIImage.Orientation, assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
switch videoOrientation {
case .down:
return handleDownOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
case .left:
return handleLeftOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
case .right:
return handleRightOrientation(properties: properties)
case .up:
return handleUpOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
default:
return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
}
}
fileprivate func handleDownOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = 0
if properties.position.y == 0 {
xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
// Final transformation - Concatination
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
fileprivate func handleLeftOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
} else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
// Final transformation - Concatination
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
fileprivate func handleRightOrientation(properties: Properties) -> CGAffineTransform {
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
// Translate
let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)
let finalTransform = scaleTransform.concatenating(translationTransform)
return finalTransform
}
fileprivate func handleUpOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
}
fileprivate func handleOtherCases(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
var ytranslation: CGFloat = 0
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
return finalTransform
}
问题出在您创建的 handleOtherCases 函数和要应用于框架的 return CGAffineTransform 中。应用的缩放和旋转变换很好。但是你计算的平移变换不正确。请尝试下面的代码片段,它会产生所需的结果,如附图所示。
fileprivate func handleOtherCases(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
let ytranslation: CGFloat = ( self.size.height - ( assetSize.height * properties.scale.height ) ) / 2
let xtranslation: CGFloat = ( assetSize.width * properties.scale.width ) + ( self.size.width - ( assetSize.width * properties.scale.width ) ) / 2
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
let finalTransform = defaultTransform.concatenating(scaleTransform).concatenating(rotateTransform).concatenating(translationTransform)
return finalTransform
}