Swift iOS会实时过滤视频吗?

时间:2016-11-06 16:38:34

标签: ios swift avfoundation calayer core-image

我正在尝试对视频实时应用过滤器,但是当我点按按钮来转换CALayer时,不会应用任何更改;这是好奇的,因为过滤器通过,但在屏幕上不可见。这是我用来呈现AVOutput的UIView的子类......

//AVCamPreview Class
import Foundation
import UIKit
import AVFoundation


class AVCamPreviewView: UIView {


var session: AVCaptureSession? {
    get {

        return (self.layer as! AVCaptureVideoPreviewLayer).session
    }
    set (session) {

        (self.layer as! AVCaptureVideoPreviewLayer).session = session
    }
}

override class func layerClass() -> AnyClass {


    return AVCaptureVideoPreviewLayer.self
}
 }




  //CameraViewController
 //Properties

   lazy var context: CIContext = {
    let eaglContext = EAGLContext(API: EAGLRenderingAPI.OpenGLES2)
    let options = [kCIContextWorkingColorSpace : NSNull()]
    return CIContext(EAGLContext: eaglContext, options: options)
}()
lazy var filterNames: [String] = {
    return ["#nofilter","CIPhotoEffectNoir","CIPhotoEffectProcess"]
}()

//Apply Filter func
 @IBAction func applyFilter(_sender: UIButton) {
    let filterName = filterNames[_sender.tag]
    filter = CIFilter(name: filterName)
    self.filterButtonsContainer.center.x =  self.filterButtonsContainer.center.x - self.view.frame.width
}

 // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(captureOutput: AVCaptureOutput!,didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,fromConnection connection: AVCaptureConnection!) {
    autoreleasepool {
        let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!

        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        var outputImage = CIImage(CVPixelBuffer: imageBuffer)

        if self.filter != nil {

            self.filter.setValue(outputImage, forKey: kCIInputImageKey)
            outputImage = self.filter.outputImage!


        }


        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.readyForMoreMediaData == true {
                var newPixelBuffer: CVPixelBuffer? = nil

                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                self.context.render(outputImage, toCVPixelBuffer: newPixelBuffer!, bounds: outputImage.extent, colorSpace: nil)

                let success = self.assetWriterPixelBufferInput?.appendPixelBuffer(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        let orientation = UIDevice.currentDevice().orientation
        var t: CGAffineTransform!
        if orientation == UIDeviceOrientation.Portrait {
            t = CGAffineTransformMakeRotation(CGFloat(-M_PI / 2.0))
        } else if orientation == UIDeviceOrientation.PortraitUpsideDown {
            t = CGAffineTransformMakeRotation(CGFloat(M_PI / 2.0))
        } else if (orientation == UIDeviceOrientation.LandscapeRight) {
            t = CGAffineTransformMakeRotation(CGFloat(M_PI))
        } else {
            t = CGAffineTransformMakeRotation(0)
        }
        outputImage = outputImage.imageByApplyingTransform(t)

        let cgImage = self.context.createCGImage(outputImage, fromRect: outputImage.extent)

        self.ciImage = outputImage

        dispatch_sync(dispatch_get_main_queue(), {


           (self.previewView.layer as! AVCaptureVideoPreviewLayer).contents = cgImage


        })
    }
}

1 个答案:

答案 0 :(得分:0)

https://github.com/jabson/MagicVideo

在项目过滤中,CIImage使用CIContext在OpenGLView上绘图,它工作正常。还有照片库的写作可能性