使用 Swift 将核心图像过滤器应用于 OS X 上的视频

Apply Core Image Filter to Video on OS X using Swift

我计划使用 SWIFT 为 OS X 应用构建一个 NSOpenGLView,它可用于将 Core Image Filter 和效果应用于视频,到目前为止我已经完成了代码为视频控制器添加视频播放,但我不确定如何将过滤器应用于视频:

class VideoMediaViewController: NSViewController {

    weak var mainView : DTMainViewController?

    @IBOutlet weak var aVPlayerView: AVPlayerView!

    var  url:NSURL?{
        didSet{
            // this is the setter 


        }
    }


    var observer:AnyObject?

    var player:AVPlayer?

    var videoOutput:AVPlayerItemVideoOutput?

    var  ciContext:CIContext?

    var loadStatus:NSNumber?



    override func viewDidLoad() {

        aVPlayerView.controlsStyle = AVPlayerViewControlsStyle.None

    }

   func loadMedia() {

    unloadMedia()


    //Create AVPlayerItem

    player = AVPlayer.playerWithURL(url) as? AVPlayer

    //Create VideoOutput

    videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32ARGB])


    //Get notifications for status

    player?.addObserver(self, forKeyPath: "status", options:.New, context: nil)


   //looping logic start 

    player?.actionAtItemEnd = AVPlayerActionAtItemEnd.None

    NSNotificationCenter.defaultCenter().addObserver(self,
        selector: "playerItemDidReachEnd:",
        name: AVPlayerItemDidPlayToEndTimeNotification,
        object: player?.currentItem)


    //looping logic end

    observer =  player?.addPeriodicTimeObserverForInterval(CMTimeMake(150, 600),
        queue: dispatch_get_main_queue(),
        usingBlock: {[unowned self](CMTime) in

            self.updateSliderProgress()
            self.updateStartAndEndTimes()
        })


    //Set videoOutput to player

    player?.currentItem.addOutput(videoOutput)

    aVPlayerView.player = player

    aVPlayerView.player.play()

    }

    func playerItemDidReachEnd(notification: NSNotification) {
        let aVPlayerItem: AVPlayerItem = notification.object as! AVPlayerItem
        aVPlayerItem.seekToTime(kCMTimeZero)
    }


    func scrubberSliderChanged(sender: AnyObject){
        var duration:Float64 = CMTimeGetSeconds(player!.currentItem.duration)

        var sliderValue:Float64  =  Float64(mainView!.scrubberSlider!.floatValue)

        var seekToTime = CMTimeMakeWithSeconds(((sliderValue * duration)/100.00), 1)

        player?.seekToTime(seekToTime, completionHandler: { (Bool) -> Void in


        })


    }

    func secondsToHoursMinutesSeconds (seconds : Int) -> (Int, Int, Int) {

        return (seconds / 3600, (seconds % 3600) / 60, (seconds % 3600) % 60)



    }


    func getStringFromHoursMinutesSeconds(h:Int, m:Int, s:Int) -> (String){

        let formatter = NSNumberFormatter()
        formatter.minimumIntegerDigits = 2

        let hours = formatter.stringFromNumber(h) //"00"
        let minutes = formatter.stringFromNumber(m) //"01"
        let seconds = formatter.stringFromNumber(s) //"10"


        var timerString  = hours! + ":" + minutes! + ":" + seconds!

        return timerString

    }



    func updateStartAndEndTimes(){

        var time = CMTimeGetSeconds(player!.currentItem.duration) - CMTimeGetSeconds(player!.currentItem.currentTime())

        var (hours,minutes,seconds) = secondsToHoursMinutesSeconds(Int(time))

        let timeRemainingString = getStringFromHoursMinutesSeconds(hours, m: minutes, s: seconds)

         mainView!.videoTimeRemaing.stringValue = timeRemainingString

         time = CMTimeGetSeconds(player!.currentItem.currentTime())

         (hours,minutes,seconds) = secondsToHoursMinutesSeconds(Int(time))

        let timePassedString = getStringFromHoursMinutesSeconds(hours, m: minutes, s: seconds)

         mainView?.videoTimePassed.stringValue = timePassedString


//         mainView!.videoTimeRemaing.stringValue = "\(CMTimeGetSeconds(player!.currentItem.duration) - CMTimeGetSeconds(player!.currentItem.currentTime()))!"


//        mainView?.videoTimePassed.stringValue = "\(CMTimeGetSeconds(player!.currentItem.currentTime()))!"
    }

    func updateSliderProgress(){

        var currentTime  = CMTimeGetSeconds(player!.currentItem.currentTime())

        var duration = CMTimeGetSeconds(player!.currentItem.duration)

        var progress = currentTime/duration*100

        mainView!.scrubberSlider!.doubleValue = progress
    }


   func unloadMedia() {

    //Cancel Prerolls and Notifications

    player?.cancelPendingPrerolls

    player?.removeTimeObserver(observer)

    player?.removeObserver(self, forKeyPath: "status")

    NSNotificationCenter.defaultCenter().removeObserver(self, name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)

    //dispose of assets and players, etc

    //...

    }

    override func viewWillDisappear() {

        unloadMedia()
    }


    //Get Status updates and log success or failure

    override func observeValueForKeyPath(keyPath: String, ofObject object: AnyObject, change: [NSObject : AnyObject], context: UnsafeMutablePointer<Void>) {


        if ( (object as! NSObject) == player && keyPath == "status") {

            if (player?.status == .ReadyToPlay) {

                NSLog("Status - Loaded")

            } else if (player?.status == .Failed) {

                NSLog("Status - Failed")

            } else {

                NSLog("Status - Unknown")

            }

        }
    }

但我不明白在何处以及如何对其应用 Core Image Filter。我在网上搜索过,但没能找到一个完整的例子来做到这一点。

任何帮助将不胜感激,愿上帝保佑,我很快就能搞定:)

实际上我想通了..这是一个需要 OS X El Capitan 或更高版本的解决方案..

let url = NSBundle.mainBundle().URLForResource("skies", withExtension: "mp4”)!


        let avAsset =  AVAsset(URL: url)
        
       
        
        let vidComp = AVVideoComposition(asset: avAsset,
            applyingCIFiltersWithHandler: {
            
            request in

            self.image = request.sourceImage.imageByClampingToExtent();
                
         
                
           self.image = (self.image!.imageByApplyingFilter("CIGaussianBlur”,  withInputParameters: [kCIInputRadiusKey: self.blurTestSlider.integerValue])
            
            self.image = self.image!.imageByCroppingToRect(request.sourceImage.extent)
                        
            request.finishWithImage(self.image!, context: self.context)
            
            })
        
        
        let playerItem = AVPlayerItem(asset: avAsset)
        playerItem.videoComposition = vidComp
         player = AVPlayer(playerItem: playerItem)
        
        
        dTMediaViewControllerA?.player = player
         dTMediaViewControllerA?.player.play()