从AVCaptureDataOutputSynchronizer(Swift)获取音频数据时出现问题

时间:2019-02-23 19:28:09

标签: ios swift avfoundation

我正在尝试使用AVCaptureDataOutputSynchronizer从多个捕获输出捕获媒体。我在获取dataOutputSynchronizer中的音频数据时遇到问题,因为Guard语句中的audioData总是失败。抓住videoData没问题,我可以使用下面的代码显示框架。

我想知道AVCaptureSynchronizedSampleBufferData是否存在问题,但是AVCaptureSynchronizedSampleBufferData是用于使用同步捕获收集的视频或音频样本的容器。

我不确定问题是否出在我是否正确地正确配置了AVCaptureAudioDataOutput。这是我的ViewController类,用于初始化和检索视频和音频数据。

请注意,我正在iPhoneXS上运行它并运行iOS12。代码是快速编写的。尝试调试的任何帮助都将很棒!

//
//  ViewController.swift
//  AudioVideoSyncTest
//
//  Created by Andrew Mendez on 2/23/19.
//  Copyright © 2019 Andrew Mendez. All rights reserved.
//

import UIKit
import AVFoundation
class ViewController: UIViewController,AVCaptureDataOutputSynchronizerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {

    var capSession:AVCaptureSession!
    var videoDataOutput = AVCaptureVideoDataOutput()
    var audioDataOutput = AVCaptureAudioDataOutput()
    var dataOutputSynchronizer:AVCaptureDataOutputSynchronizer!
    var dataOutputQueue = DispatchQueue(label: "com.amendez.dataOutputQueue")

    @IBOutlet var imageView: UIImageView!
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.
        initSession()
        capSession.startRunning()
    }

    func initSession(){
        capSession = AVCaptureSession()
        capSession.sessionPreset = .photo

        let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .unspecified)
        let audioDevice = AVCaptureDevice.default(for: .audio)

        let videoInput = try? AVCaptureDeviceInput(device: dualCameraDevice!)
        let audioInput = try? AVCaptureDeviceInput(device: audioDevice!)

        do{
            if capSession.canAddInput(videoInput!) == true{
                capSession.addInput(videoInput!)
            }else{print("Issue input camera")}

            if capSession.canAddInput(audioInput!) == true {
                capSession.addInput(audioInput!)
            } else {print("Issue Adding audio input")}

            // configuring outputs
            // video output
            videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]

            videoDataOutput.alwaysDiscardsLateVideoFrames = true
            videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
            guard capSession.canAddOutput(videoDataOutput) else { fatalError() }
            capSession.addOutput(videoDataOutput)
            let videoConnection = videoDataOutput.connection(with: .video)
            videoConnection!.videoOrientation = .portrait

            // audio
            guard capSession.canAddOutput(audioDataOutput) else {
                print("FAILED");fatalError()}
            audioDataOutput.setSampleBufferDelegate(self , queue: dataOutputQueue)
            capSession.addOutput(audioDataOutput)

            // synchronizer
            dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [
                videoDataOutput,
                audioDataOutput])
            dataOutputSynchronizer.setDelegate(self, queue: dataOutputQueue)

        }catch{
            print("Error Config Input")
        }
    }
    @IBAction func startRecord(_ sender: Any) {
    }
    @IBAction func stopRecord(_ sender: Any) {
    }

    func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {

        //get video data
        guard let videoData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else{
            return
        }
        guard !videoData.sampleBufferWasDropped else{
            print("Dropped video:\(videoData)")
            return
        }
        let pixBuffer = CMSampleBufferGetImageBuffer(videoData.sampleBuffer)
        DispatchQueue.main.async {
            self.imageView.image = UIImage(ciImage: CIImage(cvImageBuffer:pixBuffer! ))
        }

        //get video data
        guard let audioData = synchronizedDataCollection.synchronizedData(for: audioDataOutput) as? AVCaptureSynchronizedSampleBufferData else{
            print("Error getting Audio Buffer")
            return
        }
        guard !audioData.sampleBufferWasDropped else{
            print("Dropped audio:\(audioData)")
            return
        }
        print(audioData.sampleBuffer)


    }


}


0 个答案:

没有答案