如何在运行时将 CIFilter 添加到视频
how to add CIFilters to a video at runtime
我使用 avfoundation 录制了一段视频,点击视频后开始在新的 viewcontroller 上播放。问题是,我无法为正在播放的视频添加滤镜效果。以下是我实现的代码。
import UIKit
import AVFoundation
class VideoPlayback: UIViewController {
let avPlayer = AVPlayer()
var avPlayerLayer: AVPlayerLayer!
var videoURL: URL!
@IBOutlet weak var videoView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(self, selector: #selector(finishVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.frame = view.bounds
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
view.layoutIfNeeded()
let playerItem = AVPlayerItem(url: videoURL as URL)
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
}
@objc func finishVideo()
{
print("Video Finished")
self.performSegue(withIdentifier: "unwindToFifteenSeconds", sender: self)
}
// Remove Observer
deinit {
NotificationCenter.default.removeObserver(self)
}
}
以上代码只是为了播放使用AVfoundation拍摄的摄像头视频。我只需要将 CIfilters 添加到这个播放的视频中。
you can apply a filter to a video
let filter = CIFilter(name: "CIGaussianBlur")!
let asset = AVAsset(url: streamURL)
let item = AVPlayerItem(asset: asset)
item.videoComposition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source = request.sourceImage.clampingToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
// Vary filter parameters based on video timing
let seconds = CMTimeGetSeconds(request.compositionTime)
filter.setValue(seconds * 10.0, forKey: kCIInputRadiusKey)
// Crop the blurred output to the bounds of the original image
let output = filter.outputImage!.cropping(to: request.sourceImage.extent)
// Provide the filter output to the composition
request.finish(with: output, context: nil)
})
我使用 avfoundation 录制了一段视频,点击视频后开始在新的 viewcontroller 上播放。问题是,我无法为正在播放的视频添加滤镜效果。以下是我实现的代码。
import UIKit
import AVFoundation
class VideoPlayback: UIViewController {
let avPlayer = AVPlayer()
var avPlayerLayer: AVPlayerLayer!
var videoURL: URL!
@IBOutlet weak var videoView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
NotificationCenter.default.addObserver(self, selector: #selector(finishVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: nil)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.frame = view.bounds
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoView.layer.insertSublayer(avPlayerLayer, at: 0)
view.layoutIfNeeded()
let playerItem = AVPlayerItem(url: videoURL as URL)
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
}
@objc func finishVideo()
{
print("Video Finished")
self.performSegue(withIdentifier: "unwindToFifteenSeconds", sender: self)
}
// Remove Observer
deinit {
NotificationCenter.default.removeObserver(self)
}
}
以上代码只是为了播放使用AVfoundation拍摄的摄像头视频。我只需要将 CIfilters 添加到这个播放的视频中。
you can apply a filter to a video
let filter = CIFilter(name: "CIGaussianBlur")!
let asset = AVAsset(url: streamURL)
let item = AVPlayerItem(asset: asset)
item.videoComposition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source = request.sourceImage.clampingToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
// Vary filter parameters based on video timing
let seconds = CMTimeGetSeconds(request.compositionTime)
filter.setValue(seconds * 10.0, forKey: kCIInputRadiusKey)
// Crop the blurred output to the bounds of the original image
let output = filter.outputImage!.cropping(to: request.sourceImage.extent)
// Provide the filter output to the composition
request.finish(with: output, context: nil)
})