iOS:AVAssetWriterInput不在视频录制中存储音频

时间:2016-03-31 09:11:00

标签: ios avcapturesession avcapturedevice

我正在开发一个需要录制视频的应用程序。 在处理来自图像的面部后,将存储视频帧。

我可以成功录制视频。但音频并没有发挥作用。

我的代码如下。

- (void)setupAVCapture
{
    frameRate = 30;
    NSError *error = nil;

    AVCaptureSession *session = [AVCaptureSession new];
    if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
        [session setSessionPreset:AVCaptureSessionPreset640x480];
    else
        [session setSessionPreset:AVCaptureSessionPresetPhoto];

    // Select a video device, make an input
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
//    require( error == nil, bail );

    isUsingFrontFacingCamera = NO;
    if ( [session canAddInput:deviceInput] )
        [session addInput:deviceInput];

    // Make a still image output
    stillImageOutput = [AVCaptureStillImageOutput new];
    [stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:(__bridge void * _Nullable)(AVCaptureStillImageIsCapturingStillImageContext)];
    if ( [session canAddOutput:stillImageOutput] )
        [session addOutput:stillImageOutput];

    // Make a video data output
    videoDataOutput = [AVCaptureVideoDataOutput new];

    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
                                       [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [videoDataOutput setVideoSettings:rgbOutputSettings];
    [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)

    // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
    // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
    // see the header doc for setSampleBufferDelegate:queue: for more information
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
    [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];

    if ( [session canAddOutput:videoDataOutput] )
        [session addOutput:videoDataOutput];
    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO];

    effectiveScale = 1.0;
    previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
    CALayer *rootLayer = [previewView layer];
    [rootLayer setMasksToBounds:YES];
    [previewLayer setFrame:[rootLayer bounds]];
    [rootLayer addSublayer:previewLayer];

    /* GC Code : */
    // Setup the audio input
    AVCaptureDevice *audioDevice     = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error ];
    // Setup the audio output
    _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    if ([session canAddInput:audioInput])
        [session addInput:audioInput];
    [session addOutput:_audioOutput];
    [_audioOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];

    /*** END ***/



    if (error) {
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
                                                            message:[error localizedDescription]
                                                           delegate:nil
                                                  cancelButtonTitle:@"Dismiss"
                                                  otherButtonTitles:nil];
        [alertView show];
//        [alertView release];
        [self teardownAVCapture];
    }

    [self setUpVideoCapture];
    _capSession = session;
    [session startRunning];
}

将音频捕捉设置为以下功能。

#pragma mark - Video Capture
-(void)setUpVideoCapture{

    NSError *error = nil;
    pathVideoTempFile = [NSTemporaryDirectory() stringByAppendingString:@"test.mov"];
    BOOL isexit = [[NSFileManager defaultManager] fileExistsAtPath:pathVideoTempFile isDirectory:NO];
    if (isexit)
        [[NSFileManager defaultManager] removeItemAtPath:pathVideoTempFile error:nil];

    videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:pathVideoTempFile] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:640], AVVideoWidthKey,
                                   [NSNumber numberWithInt:480], AVVideoHeightKey,
                                   nil];
    writerInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings]; //retain should be removed if ARC
    adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];

    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];
    isVideoCaptureStart = false;

    // Add the audio input
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;


    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
    if( NO ) {
        // should work from iphone 3GS on and from ipod 3rd generation
        audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil];
    } else {
        // should work on any device requires more space
        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil ];
    }

    _audioWriterInput = [AVAssetWriterInput
                          assetWriterInputWithMediaType: AVMediaTypeAudio
                          outputSettings: audioOutputSettings ] ;

    _audioWriterInput.expectsMediaDataInRealTime = YES;

    //[_audioWriterInput addTrackAssociationWithTrackOfInput:writerInput type:AVTrackAssociationTypeTimecode];

    if ([videoWriter canAddInput:_audioWriterInput])
        [videoWriter addInput:_audioWriterInput];
}

我使用以下代码捕获图像帧和音频。

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{

 if( captureOutput == _audioOutput) {
        [self newAudioSample:sampleBuffer];
    }else{

            if ([writerInput isReadyForMoreMediaData]) {
                UIImage *_myImage = [UIImage imageWithCIImage:mainImage];
                UIImage *rotatedImage = [self rotate:_myImage orientation:_myImage.imageOrientation];
                 rotatedImage = [rotatedImage imageRotatedByDegrees:90.0];
               // _myImage = [_myImage imageRotatedByDegrees:90.0];
                CVPixelBufferRef pixelBuffer = [self newPixelBufferFromCGImage:rotatedImage.CGImage frameSize:rotatedImage.size];

                frameCount += 1;
                CMTime frameTime = CMTimeMake(frameCount,frameRate);

                if (![adaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime]) {
                    NSError *error = [videoWriter error];
                    NSLog(@"failed to append sbuf: %@", error);
                }
                CFRelease(pixelBuffer);

            }
}
}

-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer

{
    if( isVideoCaptureStart )
    {
        if( videoWriter.status > AVAssetWriterStatusWriting )
        {
            NSLog(@"Warning: writer status is %ld", (long)videoWriter.status);
            if( videoWriter.status == AVAssetWriterStatusFailed )
                NSLog(@"Error: %@", videoWriter.error);
            return;
        }


        if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
            NSLog(@"Unable to write to audio input");

    }
}

录音时没有给我任何错误。但它并没有将音频存储到其中。 请帮帮我!

1 个答案:

答案 0 :(得分:1)

最后,我得到了为什么音频没有存储到视频中的解决方案。

我改变了以下方法

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);

    if( captureOutput == _audioOutput) {
        if (_audioWriterInput == nil  && videoWriter.status == AVAssetWriterStatusUnknown){
           [self setupAssetWriterAudioInput:formatDescription];
        }else{
            [self newAudioSample:sampleBuffer];
        }
    }else{
        if (writerInput == nil &&  videoWriter.status == AVAssetWriterStatusUnknown){
            [self setupAssetWriterVideoInput:formatDescription];
        }else{
            if (isVideoCaptureStart == true && videoWriter.status == AVAssetWriterStatusUnknown) {

                if ([videoWriter startWriting]) {
                    timeVideoWriteStart = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
                   [videoWriter startSessionAtSourceTime:timeVideoWriteStart];
                }
                else {
                    NSLog(@"AVAssetWriter startWriting error:%@", videoWriter.error);
                }
            }

        }
}

我从当前缓冲时间开始会话。

[videoWriter startSessionAtSourceTime:timeVideoWriteStart];

解决了我的问题。