AVAsetWriter在appendSampleBuffer之后转到AVAssetWriterStatusFailed:

时间:2014-06-13 07:31:56

标签: objective-c macos avfoundation avcapturesession avassetwriter

我正在尝试使用AVAssetWriter执行屏幕录制,AVAssetWriter也接受音频输入。但是,我一直坚持这个错误,在appendSampleBuffer:encodeAudioFrame:内部)

之后,AVAssetWriter有时会变成AVAssetWriterStatusFailed
Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)}

几点意见:

  • 一旦进入此状态,即使我使用不同的记录器对象,后续记录尝试也将返回AVAssetWriterStatusFailed。
  • 当我注释掉录音块时,不会出现错误。
  • 但是当我注释掉视频录制块并且没有修改任何传入的CMSampleBufferRef时,错误仍会出现。

任何帮助将不胜感激。

以下是我正在使用的代码,为简洁起见省略了几个部分。我目前正在使用OSX 10.9 SDK,关闭ARC。

- (BOOL) startRecording
{
    if (!isRecording)
    {
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
            [self startCapture];

            [self setUpWriter];

            startedAt = [NSDate date];
            isRecording = YES;

            while (isRecording)
            {
                NSAutoreleasePool* pool = [NSAutoreleasePool new];

                NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];

                CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000);

                [self encodeFrameAtTime:tiem];

                [pool drain];

                sleep(0.05f);
            }

            [self endCapture];

            [self completeRecordingSession];
        });
    }

    return YES;
}

- (void) stopRecording {
    isRecording = NO;
}

-(void) startCapture
{
    AVCaptureDevice* microphone = x //Device selection code omitted

    videoCaptureSession = [[AVCaptureSession alloc] init];

    videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;

    //------------------------------------------

    NSError* err = nil;

    audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];

    [videoCaptureSession addInput:audioInput];

    //------------------------------------------

    audioOutput = [[AVCaptureAudioDataOutput alloc] init];

    queue = dispatch_queue_create("videoQueue", NULL);

    [audioOutput setSampleBufferDelegate:self queue:queue];

    [videoCaptureSession addOutput:audioOutput];

    audioDelta = -1;
    [videoCaptureSession startRunning];
}


-(void) endCapture
{
    [videoCaptureSession stopRunning];

    [videoCaptureSession removeInput:audioInput];
    [videoCaptureSession removeOutput:audioOutput];

    [audioOutput release];
    audioOutput = nil;

    audioInput = nil;

    [videoCaptureSession release];
    videoCaptureSession = nil;

    dispatch_release(queue);
}

-(BOOL) setUpWriter
{
    //delete the file.
    {
        NSFileManager* fileManager = [NSFileManager defaultManager];
        if ([fileManager fileExistsAtPath:self.moviePath]) {
            NSError* error;
            if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) {
                NSLog(@"Could not delete old recording file at path:  %@", self.moviePath);
            }
        }
    }

    mCaptureRect = NSRectToCGRect([screen frame]);

    int FWidth = mCaptureRect.size.width;
    int FHeight = mCaptureRect.size.height;

    int bitRate = FWidth * FHeight * 8;

    videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil];
    NSParameterAssert(videoWriter);

    //Configure video
     NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
     [NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
     nil];

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:FWidth], AVVideoWidthKey,
                                   [NSNumber numberWithInt:FHeight], AVVideoHeightKey,
                                   nil];

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
                                      [NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
                                      [NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
                                      nil];

    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

    //*
    //Configure Audio
    AudioChannelLayout acl;
    bzero(&acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

    NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                                   [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                                   [NSNumber numberWithInt:64000], AVEncoderBitRateKey,
                                   nil ];

    audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
    audioWriterInput.expectsMediaDataInRealTime = YES;

    //add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter addInput:audioWriterInput];

    return YES;
}

- (void) cleanupWriter {
    [videoWriter release];
    videoWriter = nil;
    avAdaptor = nil;
    videoWriterInput = nil;
    startedAt = nil;
    audioWriterInput = nil;
}

- (void) encodeFrameAtTime:(CMTime)timestamp
{
    if(!isRecording) return;

    if(videoWriter == nil) return;

    if(videoWriter.status == AVAssetWriterStatusFailed)
    {
        return;
    }

    if(videoWriter.status != AVAssetWriterStatusWriting)
    {
        if(videoWriter.status != AVAssetWriterStatusUnknown)
            return;

        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:timestamp];

        startTime = CMTimeGetSeconds(timestamp);
    }

    timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000);

    [self writeVideoFrameAtTime:timestamp];
}

-(void) writeVideoFrameAtTime:(CMTime)time {
    if (![videoWriterInput isReadyForMoreMediaData])
    {
    }
    else
    {
        /*
        CVPixelBufferRef manipulation omitted...
        */

        {
            BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];

            if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(@"Failed: %@", videoWriter.error);
            if (!success) NSLog(@"Warning: Unable to write buffer to video");
        }

        CVPixelBufferRelease(pixelBuffer);

        CGImageRelease(cgImage);
    }
}

-(void) encodeAudioFrame:(CMSampleBufferRef)buffer
{
    if(!isRecording) return;

    CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);

    if(videoWriter.status != AVAssetWriterStatusWriting)
    {
        //Wait for video thread to start the writer
        return;
    }

    if(![audioWriterInput isReadyForMoreMediaData])
        return;

    //*
    NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];

    if(audioDelta == -1)
    {
        audioDelta =  offset - CMTimeGetSeconds(timestamp);
    }

    //Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp
    CMItemCount count;
    CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000);

    CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count);
    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
    CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count);

    for(CMItemCount i = 0; i < count; i++)
    {
        pInfo[i].decodeTimeStamp = newTimestamp;
        pInfo[i].presentationTimeStamp = newTimestamp;
    }

    CMSampleBufferRef newBuffer;
    CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer);
    free(pInfo);

    timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer);

   BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];    
}

- (void) completeRecordingSession {
    @autoreleasepool {
        if(videoWriter.status != AVAssetWriterStatusWriting)
        {
            while (videoWriter.status == AVAssetWriterStatusUnknown)
            {
                [NSThread sleepForTimeInterval:0.5f];
            }

            int status = videoWriter.status;

            while (status == AVAssetWriterStatusUnknown)
            {
                NSLog(@"Waiting...");
                [NSThread sleepForTimeInterval:0.5f];
                status = videoWriter.status;
            }
        }

        @synchronized(self)
        {
            [videoWriter finishWriting];
            [self cleanupWriter];
        }
    }
}

-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    if(!CMSampleBufferDataIsReady(sampleBuffer))
        return;

    @autoreleasepool {
        if(captureOutput == audioOutput)
        {
            if(isRecording && !isPaused)
            {
                [self encodeAudioFrame:sampleBuffer];
            }
        }
    }
}

1 个答案:

答案 0 :(得分:0)

我的swift代码遇到了完全相同的问题。我发现我的电脑只是内存不足。所以请仔细检查你是否有足够的免费公羊。