在Swift中使用实时过滤器录制视频

时间:2017-07-18 16:18:17

标签: ios swift avfoundation core-image cifilter

我是swift的新手,并尝试构建一个可以应用实时过滤器的相机应用程序,并使用应用的过滤器进行保存。

到目前为止,我可以使用应用的滤镜实时预览,但是当我将视频保存为全黑时。

import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    @IBOutlet weak var previewView: UIView!
    @IBOutlet weak var recordButtton: UIButton!
    @IBOutlet weak var imageView: UIImageView!

    var assetWriter: AVAssetWriter?
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
    var isWriting = false
    var currentSampleTime: CMTime?
    var currentVideoDimensions: CMVideoDimensions?

    override func viewDidLoad() {
        super.viewDidLoad()
        FilterVendor.register()
        setupCaptureSession()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func setupCaptureSession() {
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
            print("Can't access the camera")
            return
        }

        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
        }

        let videoOutput = AVCaptureVideoDataOutput()

        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        if((previewLayer) != nil) {
            view.layer.addSublayer(previewLayer!)
        }

        captureSession.startRunning()
    }

    @IBAction func record(_ sender: Any) {
        if isWriting {
            print("stop record")
            self.isWriting = false
            assetWriterPixelBufferInput = nil
            assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
                self.saveMovieToCameraRoll()
            })
        } else {
            print("start record")
            createWriter()
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: currentSampleTime!)
            isWriting = true
        }
    }

    func saveMovieToCameraRoll() {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
        }) { saved, error in
            if saved {
                print("saved")
            }
        }
    }

    func movieURL() -> NSURL {
        let tempDir = NSTemporaryDirectory()
        let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
        return url! as NSURL
    }

    func checkForAndDeleteFile() {
        let fm = FileManager.default
        let url = movieURL()
        let exist = fm.fileExists(atPath: url.path!)

        if exist {
            do {
                try fm.removeItem(at: url as URL)
            } catch let error as NSError {
                print(error.localizedDescription)
            }
        }
    }

    func createWriter() {
        self.checkForAndDeleteFile()

        do {
            assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
        } catch let error as NSError {
            print(error.localizedDescription)
            return
        }

        let outputSettings = [
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : Int(currentVideoDimensions!.width),
            AVVideoHeightKey : Int(currentVideoDimensions!.height)
        ] as [String : Any]

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
        assetWriterVideoInput.expectsMediaDataInRealTime = true
        assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))

        let sourcePixelBufferAttributesDictionary = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
            String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
            String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
            String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
        ] as [String : Any]

        assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
                                                                           sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if assetWriter!.canAdd(assetWriterVideoInput) {
            assetWriter!.add(assetWriterVideoInput)
        } else {
            print("no way\(assetWriterVideoInput)")
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
        autoreleasepool {

            connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

            let filter = CIFilter(name: "Fİlter")!
            filter.setValue(cameraImage, forKey: kCIInputImageKey)


            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
            self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
            self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

            if self.isWriting {
                if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                    var newPixelBuffer: CVPixelBuffer? = nil

                    CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                    let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                    if success == false {
                        print("Pixel Buffer failed")
                    }
                }
            }

            DispatchQueue.main.async {

                if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                    let filteredImage = UIImage(ciImage: outputValue)
                    self.imageView.image = filteredImage
                }
            }
        }
    }
}

1 个答案:

答案 0 :(得分:6)

我在下面的关键部分添加了一些评论:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
    autoreleasepool {

        connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

        // COMMENT: This line makes sense - this is your pixelbuffer from the camera.
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        // COMMENT: OK, so you turn pixelBuffer into a CIImage...
        let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

        // COMMENT: And now you've create a CIImage with a Filter instruction...
        let filter = CIFilter(name: "Fİlter")!
        filter.setValue(cameraImage, forKey: kCIInputImageKey)


        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
                var newPixelBuffer: CVPixelBuffer? = nil

                // COMMENT: And you grabbed memory from the pool.
                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
                let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        // COMMENT: And now you're sending the filtered image back to the screen.
        DispatchQueue.main.async {

            if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                let filteredImage = UIImage(ciImage: outputValue)
                self.imageView.image = filteredImage
            }
        }
    }
}

在我看来,你基本上是在获取屏幕图像,创建一个过滤后的副本,然后创建一个空的像素缓冲区并将其写出来。

如果您编写的像素缓冲区而不是您正在创建的新像素,则应该成功写入图像。

成功写出已过滤视频所需的是从CIImage创建一个新的CVPixelBuffer - 这个解决方案已存在于StackOverflow上,我知道因为我自己需要这个步骤!