Apply Core Image Filter to Video on OS X using Swift

I am planning to build an NSOpenGLView for an OS X app using SWIFT which can be used to apply Core Image Filter and effects to a video, so far I have worked on the code for the video Controller to add video playback, but I am not sure how to apply the filter to the video:

class VideoMediaViewController: NSViewController {

    weak var mainView : DTMainViewController?

    @IBOutlet weak var aVPlayerView: AVPlayerView!

    var  url:NSURL?{
        didSet{
            // this is the setter 


        }
    }


    var observer:AnyObject?

    var player:AVPlayer?

    var videoOutput:AVPlayerItemVideoOutput?

    var  ciContext:CIContext?

    var loadStatus:NSNumber?



    override func viewDidLoad() {

        aVPlayerView.controlsStyle = AVPlayerViewControlsStyle.None

    }

   func loadMedia() {

    unloadMedia()


    //Create AVPlayerItem

    player = AVPlayer.playerWithURL(url) as? AVPlayer

    //Create VideoOutput

    videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32ARGB])


    //Get notifications for status

    player?.addObserver(self, forKeyPath: "status", options:.New, context: nil)


   //looping logic start 

    player?.actionAtItemEnd = AVPlayerActionAtItemEnd.None

    NSNotificationCenter.defaultCenter().addObserver(self,
        selector: "playerItemDidReachEnd:",
        name: AVPlayerItemDidPlayToEndTimeNotification,
        object: player?.currentItem)


    //looping logic end

    observer =  player?.addPeriodicTimeObserverForInterval(CMTimeMake(150, 600),
        queue: dispatch_get_main_queue(),
        usingBlock: {[unowned self](CMTime) in

            self.updateSliderProgress()
            self.updateStartAndEndTimes()
        })


    //Set videoOutput to player

    player?.currentItem.addOutput(videoOutput)

    aVPlayerView.player = player

    aVPlayerView.player.play()

    }

    func playerItemDidReachEnd(notification: NSNotification) {
        let aVPlayerItem: AVPlayerItem = notification.object as! AVPlayerItem
        aVPlayerItem.seekToTime(kCMTimeZero)
    }


    func scrubberSliderChanged(sender: AnyObject){
        var duration:Float64 = CMTimeGetSeconds(player!.currentItem.duration)

        var sliderValue:Float64  =  Float64(mainView!.scrubberSlider!.floatValue)

        var seekToTime = CMTimeMakeWithSeconds(((sliderValue * duration)/100.00), 1)

        player?.seekToTime(seekToTime, completionHandler: { (Bool) -> Void in


        })


    }

    func secondsToHoursMinutesSeconds (seconds : Int) -> (Int, Int, Int) {

        return (seconds / 3600, (seconds % 3600) / 60, (seconds % 3600) % 60)



    }


    func getStringFromHoursMinutesSeconds(h:Int, m:Int, s:Int) -> (String){

        let formatter = NSNumberFormatter()
        formatter.minimumIntegerDigits = 2

        let hours = formatter.stringFromNumber(h) //"00"
        let minutes = formatter.stringFromNumber(m) //"01"
        let seconds = formatter.stringFromNumber(s) //"10"


        var timerString  = hours! + ":" + minutes! + ":" + seconds!

        return timerString

    }



    func updateStartAndEndTimes(){

        var time = CMTimeGetSeconds(player!.currentItem.duration) - CMTimeGetSeconds(player!.currentItem.currentTime())

        var (hours,minutes,seconds) = secondsToHoursMinutesSeconds(Int(time))

        let timeRemainingString = getStringFromHoursMinutesSeconds(hours, m: minutes, s: seconds)

         mainView!.videoTimeRemaing.stringValue = timeRemainingString

         time = CMTimeGetSeconds(player!.currentItem.currentTime())

         (hours,minutes,seconds) = secondsToHoursMinutesSeconds(Int(time))

        let timePassedString = getStringFromHoursMinutesSeconds(hours, m: minutes, s: seconds)

         mainView?.videoTimePassed.stringValue = timePassedString


//         mainView!.videoTimeRemaing.stringValue = "\(CMTimeGetSeconds(player!.currentItem.duration) - CMTimeGetSeconds(player!.currentItem.currentTime()))!"


//        mainView?.videoTimePassed.stringValue = "\(CMTimeGetSeconds(player!.currentItem.currentTime()))!"
    }

    func updateSliderProgress(){

        var currentTime  = CMTimeGetSeconds(player!.currentItem.currentTime())

        var duration = CMTimeGetSeconds(player!.currentItem.duration)

        var progress = currentTime/duration*100

        mainView!.scrubberSlider!.doubleValue = progress
    }


   func unloadMedia() {

    //Cancel Prerolls and Notifications

    player?.cancelPendingPrerolls

    player?.removeTimeObserver(observer)

    player?.removeObserver(self, forKeyPath: "status")

    NSNotificationCenter.defaultCenter().removeObserver(self, name: AVPlayerItemDidPlayToEndTimeNotification, object: nil)

    //dispose of assets and players, etc

    //...

    }

    override func viewWillDisappear() {

        unloadMedia()
    }


    //Get Status updates and log success or failure

    override func observeValueForKeyPath(keyPath: String, ofObject object: AnyObject, change: [NSObject : AnyObject], context: UnsafeMutablePointer<Void>) {


        if ( (object as! NSObject) == player && keyPath == "status") {

            if (player?.status == .ReadyToPlay) {

                NSLog("Status - Loaded")

            } else if (player?.status == .Failed) {

                NSLog("Status - Failed")

            } else {

                NSLog("Status - Unknown")

            }

        }
    }

but I can’t understand where and how to apply a Core Image Filter to it. I have searched online but haven’t been able to find a complete example to do that.

  • Sscanf Equivalent in Objective-C
  • Change selection color on view-based NSTableView
  • When should I use NSURL instead of NSString and vice versa?
  • Sandbox entitlement to script iTunes via NSAppleScript
  • viewDidLoad in NSViewController?
  • ARC forbids explicit message send of 'retain' issue
  • Any help will be greatly appreciated, God willing, I can figure it out soon ūüôā

    Solutions Collect From Internet About “Apply Core Image Filter to Video on OS X using Swift”

    Actually I figured it out.. This is a solution that requires OS X El Capitan or later..

    let url = NSBundle.mainBundle().URLForResource("skies", withExtension: "mp4‚ÄĚ)!
    
    
            let avAsset =  AVAsset(URL: url)
            
           
            
            let vidComp = AVVideoComposition(asset: avAsset,
                applyingCIFiltersWithHandler: {
                
                request in
    
                self.image = request.sourceImage.imageByClampingToExtent();
                    
             
                    
    ¬† ¬† ¬† ¬† ¬† ¬†self.image = (self.image!.imageByApplyingFilter("CIGaussianBlur‚ÄĚ, ¬†withInputParameters: [kCIInputRadiusKey: self.blurTestSlider.integerValue])
                
                self.image = self.image!.imageByCroppingToRect(request.sourceImage.extent)
                            
                request.finishWithImage(self.image!, context: self.context)
                
                })
            
            
            let playerItem = AVPlayerItem(asset: avAsset)
            playerItem.videoComposition = vidComp
             player = AVPlayer(playerItem: playerItem)
            
            
            dTMediaViewControllerA?.player = player
             dTMediaViewControllerA?.player.play()