将音频添加到AVAsset

时间:2013-11-29 23:28:40

标签: ios objective-c video-capture

我一直在尝试使用AVAssetWriter在iOS应用中裁剪和导出视频。在尝试将音频添加到输出之前,一切都在向上游动。当前代码(见下文)返回以下错误

NSInvalidArgumentException',原因:' * - [AVAssetReader startReading]在读取完毕后无法再次调用'

根据我在下面的描述,这应该使用与初始化的读者不同的读者,因此我不理解这个问题。这可能是需要另外一双眼睛的简单问题,所以这是代码。

-(void)cropVideoAsset:(NSURL *)fileURL
{
    NSString *outputPath = [NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"croppedVideo.mov"];
    toUrl = [NSURL fileURLWithPath:outputPath];

    if([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
    {[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];}

    AVAsset *asset = [AVAsset assetWithURL:fileURL];
    NSLog(@"asset: %f", CMTimeGetSeconds(asset.duration));

    NSDictionary* compressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                         AVVideoProfileLevelH264Main31, AVVideoProfileLevelKey,
                                         [NSNumber numberWithInt:2500000], AVVideoAverageBitRateKey,
                                         [NSNumber numberWithInt: 5], AVVideoMaxKeyFrameIntervalKey,
                                         nil];

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:500], AVVideoWidthKey,
                                   [NSNumber numberWithInt:500], AVVideoHeightKey,
                                   AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
                                   compressionSettings, AVVideoCompressionPropertiesKey,
                                   nil];

    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    writerInput.expectsMediaDataInRealTime = true;

    NSError *error;
    AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:toUrl fileType:AVFileTypeMPEG4 error:&error];
    [assetWriter addInput:writerInput];

    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    writerInput.transform = videoTrack.preferredTransform;

    NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    AVAssetReaderTrackOutput *asset_reader_output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOptions];
    [assetReader addOutput:asset_reader_output];

    //audio setup
    NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
                                   [NSNumber numberWithInt:1], AVNumberOfChannelsKey,
                                   [NSNumber numberWithFloat:44100.0], AVSampleRateKey,
                                   [NSNumber numberWithInt:64000], AVEncoderBitRateKey,
                                   nil];

    AVAssetWriterInput *audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
    AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

    [audioReader addOutput:readerOutput];
    audioWriterInput.expectsMediaDataInRealTime = true;
    [assetWriter addInput:audioWriterInput];
    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:kCMTimeZero];
    [assetReader startReading];

    dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
    [writerInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
     ^{
         while(writerInput.readyForMoreMediaData)
         {
             switch(assetReader.status)
             {
                 case AVAssetReaderStatusReading:
                 {
                     CMSampleBufferRef sampleBuffer = [asset_reader_output copyNextSampleBuffer];

                     if (sampleBuffer)
                     {
                         BOOL result = [writerInput appendSampleBuffer:sampleBuffer];
                         CFRelease(sampleBuffer);

                         if (!result)
                         {
                             [SVProgressHUD dismiss];
                             [self enableView];
                             NSString *errorMessage = [NSString stringWithFormat:@"We're sorry, an error occurred during the video capture.\n\n%@\n\nPlease try again.", assetWriter.error.localizedDescription];
                             [UIAlertView simpleAlertWithTitle:@"Video Error" andMessage:errorMessage];
                             [assetReader cancelReading];
                             break;
                         }
                     }
                 }
                     break;

                 case AVAssetReaderStatusCompleted:
                 {
                     [self handleAudioWithReader:audioReader writer:assetWriter input:audioWriterInput andOutput:readerOutput];
                 }
                     break;

                 case AVAssetReaderStatusFailed:
                     [SVProgressHUD dismiss];
                     [self enableView];
                     [UIAlertView simpleAlertWithTitle:@"Video Error" andMessage:@"We're sorry, an error occurred during the video capture. Please try again."];
                     [assetWriter cancelWriting];

                     break;
             }
         }
     }];
}

和必要的电话

-(void)handleAudioWithReader:(AVAssetReader *)audioReader writer:(AVAssetWriter *)videoWriter input:(AVAssetWriterInput *)audioWriterInput andOutput:(AVAssetReaderOutput *)readerOutput
{
    [audioReader startReading];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];
    dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
    [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
     {
         NSLog(@"Asset Writer ready :%d", audioWriterInput.readyForMoreMediaData);

         while (audioWriterInput.readyForMoreMediaData)
         {
             CMSampleBufferRef nextBuffer;

             if([audioReader status] == AVAssetReaderStatusReading && (nextBuffer = [readerOutput copyNextSampleBuffer]))
             {
                 if (nextBuffer)
                 {
                     NSLog(@"NextBuffer");
                     [audioWriterInput appendSampleBuffer:nextBuffer];
                 }
             }

             else
             {
                 [audioWriterInput markAsFinished];
                 switch ([audioReader status])
                 {
                     case AVAssetReaderStatusCompleted:
                         [videoWriter finishWriting];
                         [audioWriterInput markAsFinished];
                         dispatch_async(dispatch_get_main_queue(),^{[self saveAssetToLibraryAtURL:toUrl];});
                         break;
                 }
             }
         }
     }];
}

0 个答案:

没有答案