使用AVCaptureSession录制视频,将CIFilter添加到其中并将其保存到相册

时间:2016-04-13 13:47:48

标签: ios swift video swift2 video-capture

我想在我的应用中制作自定义视频录像机。 目前我可以录制视频并进行保存,但我希望在录制时为视频添加过滤器,并使用新过滤器将视频保存到相册。这是我录制视频并保存的代码。

let captureSession = AVCaptureSession()
let fileOutput = AVCaptureMovieFileOutput()

func initVideoRecording() {



   do {
        try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
        try AVAudioSession.sharedInstance().setActive(true)
    }catch {
        print("error in audio")
    }

    let session = AVCaptureSession()

    session.beginConfiguration()

    session.sessionPreset = AVCaptureSessionPresetMedium

    let videoLayer = AVCaptureVideoPreviewLayer(session: session)
    videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
    videoLayer.frame = myImage.bounds
    myImage.layer.addSublayer(videoLayer)

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
    do
    {
        let input = try AVCaptureDeviceInput(device: backCamera)
        let audioInput = try AVCaptureDeviceInput(device: audio)

        session.addInput(input)
        session.addInput(audioInput)

    }
    catch
    {
        print("can't access camera")
        return
    }

    session.addOutput(fileOutput)

    session.commitConfiguration()

    session.startRunning()

}

@IBAction func recordFunc() {
        if fileOutput.recording {
            myButton.setTitle("record", forState: .Normal)
            fileOutput.stopRecording()
        }else{
            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4")
        fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

        myButton.setTitle("stop", forState: .Normal)

    }
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {

//to save record video to photos album

    UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil)


}

我尝试使用AVCaptureVideoDataOutput

在其委托中我使用此代码

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {


            connection.videoOrientation = AVCaptureVideoOrientation.Portrait
            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIComicEffect")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)


            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }

        }

使用此代码,它只显示过滤器,但不记录。

======================= /这是我的问题的解决方案\ ============ ==== 请注意,此代码不使用swift 2和Xcode 7.3

let captureSession = AVCaptureSession()
    let videoOutput = AVCaptureVideoDataOutput()
    let audioOutput = AVCaptureAudioDataOutput()

    var adapter:AVAssetWriterInputPixelBufferAdaptor!
    var record = false
    var videoWriter:AVAssetWriter!
    var writerInput:AVAssetWriterInput!
    var audioWriterInput:AVAssetWriterInput!
    var lastPath = ""
    var starTime = kCMTimeZero

    var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height)

override func viewDidAppear(animated: Bool) {
        super.viewDidAppear(animated)

        video()
    }

    func video() {

        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
            try AVAudioSession.sharedInstance().setActive(true)
        }catch {
            print("error in audio")
        }

        captureSession.beginConfiguration()

        captureSession.sessionPreset = AVCaptureSessionPresetMedium

        let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
        //videoLayer.frame = myImage.bounds
        //myImage.layer.addSublayer(videoLayer)

        view.layer.addSublayer(videoLayer)

        let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
        do
        {
            let input = try AVCaptureDeviceInput(device: backCamera)
            let audioInput = try AVCaptureDeviceInput(device: audio)

            captureSession.addInput(input)
            captureSession.addInput(audioInput)

        }
        catch
        {
            print("can't access camera")
            return
        }

        let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)

        videoOutput.setSampleBufferDelegate(self,queue: queue)
        audioOutput.setSampleBufferDelegate(self, queue: queue)

        captureSession.addOutput(videoOutput)
        captureSession.addOutput(audioOutput)
        captureSession.commitConfiguration()

        captureSession.startRunning()

    }


    @IBAction func recordFunc() {

        if record {
            myButton.setTitle("record", forState: .Normal)
            record = false
            self.writerInput.markAsFinished()
            audioWriterInput.markAsFinished()
            self.videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
                UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil)
            }


        }else{

            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4")

            lastPath = fileUrl.path!
            videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4)



            let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]

            writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
            writerInput.expectsMediaDataInRealTime = true
            audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject])

            videoWriter.addInput(writerInput)
            videoWriter.addInput(audioWriterInput)

            adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject])









            videoWriter.startWriting()
            videoWriter.startSessionAtSourceTime(starTime)

            record = true
            myButton.setTitle("stop", forState: .Normal)

        }


    }

    func getCurrentDate()->String{
        let format = NSDateFormatter()
        format.dateFormat = "dd-MM-yyyy hh:mm:ss"
        format.locale = NSLocale(localeIdentifier: "en")
        let date = format.stringFromDate(NSDate())
        return date
    }


extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{


    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        if captureOutput == videoOutput {
            connection.videoOrientation = AVCaptureVideoOrientation.Portrait

            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIHexagonalPixellate")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
            //let filteredImage = UIImage(CIImage: cameraImage)
            if self.record == true{

                dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), {
                    if self.record == true{
                        if self.writerInput.readyForMoreMediaData {
                        let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime)

                        print("video is \(bo)")
                        }
                    }
                })
            }
            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }
        }else if captureOutput == audioOutput{

            if self.record == true{

                let bo = audioWriterInput.appendSampleBuffer(sampleBuffer)
                print("audio is \(bo)")
            }
        }



    }


    func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
        let context:CIContext? = CIContext(options: nil)
        if context != nil {
            return context!.createCGImage(inputImage, fromRect: inputImage.extent)
        }
        return nil
    }

    func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) {
        var title = "Success"
        var message = "Video was saved"

        if let saveError = error {
            title = "Error"
            message = "Video failed to save"
        }

        let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert)
        alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil))
        presentViewController(alert, animated: true, completion: nil)
    }

这些方法在DejalActivityView中存在于目标c和我无法将其转换为Swift所以如果任何人可以转换它请编辑我的代码并将其转换

+ (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

+(NSDictionary *)getAdapterDictionary{


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                           [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    return sourcePixelBufferAttributesDictionary;
}

+(NSDictionary *) getAudioDictionary{
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;


    NSDictionary* audioOutputSettings = nil;
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil ];
//    NSDictionary* audioOutputSettings = nil;
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey,
//                               [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];

    return audioOutputSettings;
}

1 个答案:

答案 0 :(得分:3)

您需要添加AVAssetWriter

var videoRecorder: AVAssetWriter?

然后在你的委托回调中:

let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

if videoRecorder?.status == .Unknown {
    startRecordingTime = timeStamp
    videoRecorder?.startWriting()
    videoRecorder?.startSessionAtSourceTime(timeStamp)
}

您需要为每个要录制的录制配置录制器,还需要将录制内容添加到录制器中。

您可能会开始遇到问题,因为您似乎没有任何队列设置,但您需要参考此Github是一个非常好的资源。

https://github.com/waleedka/rosywriterswift

编辑:其他信息

你需要init()编写器然后为视频/音频添加输入AVAssetWriterInput