didOutputSampleBuffer函数。我不知道为什么会这样。这是代码:
import Foundation
import AVFoundation
import UIKit
// Video Delegate
class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
fromConnection connection: AVCaptureConnection!){
print("hihi")
}
func captureOutput(captureOutput: AVCaptureOutput!,
didDropSampleBuffer sampleBuffer: CMSampleBuffer!,
fromConnection connection: AVCaptureConnection!){
print("LOL")
}
}
这是我的VideoDelegate代码:
{{1}}
为什么“我的委托被调用以及如何修复它?我已经检查了类似的堆栈溢出问题,但是我找不到解决这个问题的方法。请帮助。
答案 0 :(得分:3)
我发现了我的错误问题!这是因为必须在同一个视图控制器中创建被调用的委托。这是修改后的代码:
import UIKit
import AVFoundation
import Accelerate
var customPreviewLayer: AVCaptureVideoPreviewLayer?
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
//var customPreviewLayer: AVCaptureVideoPreviewLayer?
@IBOutlet weak var camView: UIView!
override func viewWillAppear(animated: Bool) {
super.viewDidAppear(animated)
//setupCameraSession()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//captureSession?.startRunning()
setupCameraSession()
self.captureSession?.startRunning()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func setupCameraSession() {
// Session
self.captureSession = AVCaptureSession()
self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
// Capture device
let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var deviceInput = AVCaptureDeviceInput()
// Device input
//var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
do {
deviceInput = try AVCaptureDeviceInput(device: inputDevice)
} catch let error as NSError {
// Handle errors
print(error)
}
if self.captureSession!.canAddInput(deviceInput) {
self.captureSession!.addInput(deviceInput)
}
// Preview
customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
customPreviewLayer!.frame = camView.bounds
customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
self.camView.layer.addSublayer(customPreviewLayer!)
print("Cam layer added")
self.dataOutput = AVCaptureVideoDataOutput()
self.dataOutput!.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
self.dataOutput!.alwaysDiscardsLateVideoFrames = true
if self.captureSession!.canAddOutput(dataOutput) {
self.captureSession!.addOutput(dataOutput)
}
self.captureSession!.commitConfiguration()
let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
//let delegate = VideoDelegate()
self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
}
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("buffered")
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic
let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
dispatch_sync(dispatch_get_main_queue(), {() -> Void in
customPreviewLayer!.contents = dstImageFilter as AnyObject
})
}
}