在Swift中使用AVFoundation捕获照片

时间:2016-05-01 05:58:00

标签: ios swift camera avfoundation

我正在迅速构建一个实时过滤相机应用程序。

我从AVCaptureVideoDataOutput()更改为AVCaptureStillImageOutput()以执行捕获照片功能。更改后,打开应用程序时没有预览视图。

捕捉照片功能正常,我可以听到捕捉声音" Ka"当我点击主要组区域时。这里没有看法。

这是我的完整代码

import Foundation
import UIKit
import AVFoundation
import CoreMedia

let CIHueAdjust = "CIHueAdjust"
let CIHueAdjustFilter = CIFilter(name: "CIHueAdjust", withInputParameters: ["inputAngle" : 1.24])

let Filters = [CIHueAdjust: CIHueAdjustFilter]

let FilterNames = [String](Filters.keys).sort()

class LiveCamViewController :       UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate{
let mainGroup = UIStackView()
let imageView = UIImageView(frame: CGRectZero)
let filtersControl = UISegmentedControl(items: FilterNames)
var videoOutput = AVCaptureStillImageOutput()

override func viewDidLoad()
{
    super.viewDidLoad()

    view.addSubview(mainGroup)
    mainGroup.axis = UILayoutConstraintAxis.Vertical
    mainGroup.distribution = UIStackViewDistribution.Fill

    mainGroup.addArrangedSubview(imageView)
    mainGroup.addArrangedSubview(filtersControl)
    mainGroup.addGestureRecognizer(UITapGestureRecognizer(target: self, action:#selector(LiveCamViewController.saveToCamera(_:))))

    imageView.contentMode = UIViewContentMode.ScaleAspectFit

    filtersControl.selectedSegmentIndex = 0

    let captureSession = AVCaptureSession()
    captureSession.sessionPreset = AVCaptureSessionPresetPhoto

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)

    do
    {
        let input = try AVCaptureDeviceInput(device: backCamera)

        captureSession.addInput(input)
    }
    catch
    {
        print("can't access camera")
        return
    }

    //get captureOutput invoked
    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    view.layer.addSublayer(previewLayer)

    videoOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

    if captureSession.canAddOutput(videoOutput)
    {
        captureSession.addOutput(videoOutput)
    }

    captureSession.startRunning()
}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
{
    guard let filter = Filters[FilterNames[filtersControl.selectedSegmentIndex]] else
    {
        return
    }

    let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
    let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

    filter!.setValue(cameraImage, forKey: kCIInputImageKey)

    let filteredImage = UIImage(CIImage: filter!.valueForKey(kCIOutputImageKey) as! CIImage!)
    let fixedImage = correctlyOrientedImage(filteredImage)

    dispatch_async(dispatch_get_main_queue())
    {
        self.imageView.image = fixedImage
    }

}

func correctlyOrientedImage(image: UIImage) -> UIImage {

    UIGraphicsBeginImageContextWithOptions(image.size, false, image.scale)
    image.drawInRect(CGRectMake(0, 0, image.size.width, image.size.height))
    let normalizedImage:UIImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    let imageRef: CGImageRef = normalizedImage.CGImage!
    let rotatedImage: UIImage = UIImage(CGImage: imageRef, scale: 1.0, orientation: .Right)

    return rotatedImage
}

override func viewDidLayoutSubviews()
{
    mainGroup.frame = CGRect(x: 37, y: 115, width: 301, height: 481)
}

func saveToCamera(sender: UITapGestureRecognizer) {

    videoOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

    if let videoConnection = videoOutput.connectionWithMediaType(AVMediaTypeVideo) {
        videoOutput.captureStillImageAsynchronouslyFromConnection(videoConnection) {
            (imageDataSampleBuffer, error) -> Void in
            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
            UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
        }
    }
}

}

感谢。

2 个答案:

答案 0 :(得分:0)

如果您想从相机中捕捉照片或视频,则应使用UIImagePickerController代替AVFoundation。查看Applce documentation以获取有关它的更多信息。

更新:

参考this link。它有太多自定义imagePickercontroller的示例。你可以参考stackobverflow的this this回答。

希望这会有所帮助:)

答案 1 :(得分:0)

问题可能是因为您没有previewLayer的框架。

输入:

previewLayer.frame = self.view.bounds

并明确设置视频引力:(我认为这是可选的)

previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill // Or choose some other option if you prefer