使用AVAssetWriter在iphone 4中跳帧来捕捉电影

时间:2013-03-24 20:18:16

标签: ios buffer avfoundation avassetwriter

我正在尝试使用AVAssetWriter捕捉电影,在iphone 5中一切都很好,捕捉和保存电影就像一个魅力。

但是当我尝试在iphone 4中捕捉电影时,samplebuffer会跳过一些帧并且电影效果不佳。

所以,这是我的代码:

- (void) initCaptureSession{
//  openSession and set quality to 1280x720
    session                             = [[AVCaptureSession alloc] init];
    if([session canSetSessionPreset:AVCaptureSessionPreset640x480]) session.sessionPreset = AVCaptureSessionPresetHigh;

//  get devices for audio and video
    deviceVideo                         = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    deviceAudio                         = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

    NSError *error                      = nil;

//  create input of audio and video
    inputVideo                          = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo error:&error];
    if (!inputVideo)    NSLog(@"ERROR: trying to open camera: %@", error);

    inputAudio                          = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:&error];
    if (!inputAudio)    NSLog(@"ERROR: trying to open audio: %@", error);

//    CMTime maxDuration                  = CMTimeMake(60, 1);

//  create output audio and video
    outputVideo                         = [[AVCaptureVideoDataOutput alloc] init];
    outputVideo.alwaysDiscardsLateVideoFrames = NO;
    outputVideo.videoSettings           = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

    outputAudio                         = [[AVCaptureAudioDataOutput alloc] init];   

//  add inputs and outputs in the current session
    [session beginConfiguration];
    if ([session canAddInput:inputVideo])[session addInput:inputVideo];
    if ([session canAddInput:inputAudio])[session addInput:inputAudio];
    if ([session canAddOutput:outputVideo]) [session addOutput:outputVideo];
    if ([session canAddOutput:outputAudio]) [session addOutput:outputAudio];
    [session commitConfiguration];

//  tourn of the torch
    [deviceVideo lockForConfiguration:&error];
    if([deviceVideo hasTorch] && [deviceVideo isTorchModeSupported:AVCaptureTorchModeOff]) [deviceVideo setTorchMode:AVCaptureTorchModeOff];
    [deviceVideo unlockForConfiguration];

    [self configDevice];

//  create the preview view to show the video
    captureVideoPreviewLayer            = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    [captureVideoPreviewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
    [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

    captureVideoPreviewLayer.frame      = viewPreview.bounds;
    [viewPreview.layer addSublayer:captureVideoPreviewLayer];

    CALayer *viewLayer                  = viewPreview.layer;

    [viewLayer setMasksToBounds:YES];
    [captureVideoPreviewLayer setFrame:[viewLayer bounds]];
    [viewLayer addSublayer:captureVideoPreviewLayer];

//  dispatch outputs to delegate in a queue
    dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
    [outputVideo setSampleBufferDelegate:self queue:queue];
    [outputAudio setSampleBufferDelegate:self queue:queue];
//    dispatch_release(queue);

    [session startRunning];
}

-(BOOL) setupWriter{    
    urlOutput           = [self tempFileURL];
    NSError *error      = nil;

    videoWriter         = [[AVAssetWriter alloc] initWithURL:urlOutput fileType:AVFileTypeMPEG4 error:&error];
    NSParameterAssert(videoWriter);

//  Add metadata  
    NSArray *existingMetadataArray      = videoWriter.metadata;
    NSMutableArray *newMetadataArray    = nil;
    if (existingMetadataArray) {
        newMetadataArray = [existingMetadataArray mutableCopy];
    } else {
        newMetadataArray = [[NSMutableArray alloc] init];
    }

    AVMutableMetadataItem *mutableItemLocation  = [[AVMutableMetadataItem alloc] init];
    mutableItemLocation.keySpace                = AVMetadataKeySpaceCommon;
    mutableItemLocation.key                     = AVMetadataCommonKeyLocation;
    mutableItemLocation.value                   = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", location.latitude, location.longitude];

    AVMutableMetadataItem *mutableItemModel     = [[AVMutableMetadataItem alloc] init];
    mutableItemModel.keySpace                   = AVMetadataKeySpaceCommon;
    mutableItemModel.key                        = AVMetadataCommonKeyModel;
    mutableItemModel.value                      = [[UIDevice currentDevice] model];

    [newMetadataArray addObject:mutableItemLocation];
    [newMetadataArray addObject:mutableItemModel];

    videoWriter.metadata = newMetadataArray;

//  video Configuration
    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                                [NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
                                                [NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
                                                nil];


    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
                                              nil];

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
                                   videoCleanApertureSettings, AVVideoCleanApertureKey,
                                   videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
                                   AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
                                   nil];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:640], AVVideoWidthKey,
                                   [NSNumber numberWithInt:360], AVVideoHeightKey,
                                   nil];

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;

    // Add the audio input
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;


    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
//    if( NO ) {
        // should work from iphone 3GS on and from ipod 3rd generation
        audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil];
//    } else {
//        // should work on any device requires more space
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
//                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
//                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
//                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];
//    }

    audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings];

    audioWriterInput.expectsMediaDataInRealTime = YES;  


    // add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter addInput:audioWriterInput];

    return YES;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    if( !CMSampleBufferDataIsReady(sampleBuffer) ){
        NSLog( @"sample buffer is not ready. Skipping sample" );
        return;
    }

    if(isRecording == YES ){
        lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        if(videoWriter.status != AVAssetWriterStatusWriting  ){
            [videoWriter startWriting];
            [videoWriter startSessionAtSourceTime:lastSampleTime];
        }

        if( captureOutput == outputVideo ){
            [self newVideoSample:sampleBuffer];
        } else if( captureOutput == outputAudio) {
            [self newAudioSample:sampleBuffer];
        }
    }
}

-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
    if( isRecording ){
        if( videoWriter.status > AVAssetWriterStatusWriting ) {
            NSLog(@"Warning: writer status is %d", videoWriter.status);
            if( videoWriter.status == AVAssetWriterStatusFailed )
                NSLog(@"Error: %@", videoWriter.error);
            return;
        }        


        while (!videoWriterInput.readyForMoreMediaData) {
            NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

        if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
            NSLog(@"Unable to write to video input");
    }
}

-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
    if( isRecording ){
        if( videoWriter.status > AVAssetWriterStatusWriting ) {
            NSLog(@"Warning: writer status is %d", videoWriter.status);
            if( videoWriter.status == AVAssetWriterStatusFailed )
                NSLog(@"Error: %@", videoWriter.error);
            return;
        }

        while (!audioWriterInput.readyForMoreMediaData) {
             NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

        if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
            NSLog(@"Unable to write to audio input");
    }
}

-(void) startVideoRecording {
    if( !isRecording ){
        NSLog(@"start video recording...");
        if( ![self setupWriter] ) {
            NSLog(@"Setup Writer Failed") ;

            return;
        }

        isRecording = YES;
        recorded    = NO;
    }
}

-(void) stopVideoRecording {
    if( isRecording ) {
        isRecording                 = NO;
        btRecord.hidden             = NO;
        btRecording.hidden          = YES;
        [timerToRecord invalidate];
        timerToRecord               = nil;

//        [session stopRunning];

        [videoWriter finishWritingWithCompletionHandler:^{
            if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
                videoWriterInput    = nil;
                audioWriterInput    = nil;
                videoWriter         = nil;

                NSLog(@"finishWriting returned succeful");

                recorded    = YES;
            } else {
                NSLog(@"finishWriting returned unsucceful") ;
            }
        }];    

        NSLog(@"video recording stopped");

        [self performSelector:@selector(openPlayer) withObject:nil afterDelay:0.5];
    }
} 

当我删除这一行时:

        while (!audioWriterInput.readyForMoreMediaData) {
             NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
            [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
        }

我收到了这个错误:

*由于未捕获的异常'NSInternalInconsistencyException'而终止应用程序,原因:'* - [AVAssetWriterInput appendSampleBuffer:]当readyForMoreMediaData为NO时,无法附加示例缓冲区。'

在iphone 5中,我没有使用这种循环。

我在这里阅读了一些例子,但我不明白如何在iphone 4中使电影更流畅。

如果有人使用AVAssetWriter为iphone 3gs,iphone 4,iphone 4s和iphone 5制作电影的一个消息或完整示例,我会非常感谢。

由于

2 个答案:

答案 0 :(得分:1)

与AVFoundation战斗一周后,我得到了一个很好的解决方案。

在观看了wwdc2012 - 520会议后,我做了一个很好的解决方案。

首先我使用AVCaptureMovieFileOutput和会话预设AVCaptureSessionPreset640x480录制电影

因此,在记录用户选择是否要保存和共享之后,只需保存或删除电影。

如果用户想要保存/保存和分享,我会分别录制和压缩电影。

首先,我压缩音频后压缩音频,然后压缩音轨。

请参阅我的代码:

-(void)exportMediaWithURL:(NSURL *)url location:(CLLocationCoordinate2D)location mirror:(BOOL)mirror{
    urlMedia                        = url;
    locationMedia                   = location;

    videoRecorded                   = NO;
    audioRecorded                   = NO;

    asset                           = [AVAsset assetWithURL:urlMedia];

    progressVideo                   = 0.0;
    progressAudio                   = 0.0;
    progressMarge                   = 0.0;
    progressFactor                  = 3.0;

    mirrored                        = mirror;

    limitTime                       = CMTimeMake(1000*60, 1000);

    [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:^() {
        NSError *error;
        AVKeyValueStatus stats      = [asset statusOfValueForKey:@"tracks" error:&error];

        if(stats == AVKeyValueStatusLoaded){
            if([[asset tracksWithMediaType:AVMediaTypeVideo] count] > 0) video_track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            if([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0) audio_track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

            if(!audio_track) progressFactor = 1.0;            
            if(video_track){
                if (CMTimeCompare(asset.duration, limitTime) > 0) {
                    totalTime = limitTime;
                }else{
                    totalTime = asset.duration;
                }
                [self exportVideo];
            }
        }
    }];
}

-(void)exportVideo{
    NSError *error;
    AVAssetReader *assetReader          = [AVAssetReader assetReaderWithAsset:asset error:&error];

    NSDictionary* videoSettings         = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];

    AVAssetReaderOutput *videoOutput    = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:video_track outputSettings:videoSettings];

    [assetReader addOutput:videoOutput];

    assetReader.timeRange               = CMTimeRangeMake(kCMTimeZero, totalTime);

//  start session to make a movie
    if (assetVideoWriter.status == AVAssetWriterStatusUnknown) {
        if ([self setupWriterVideo]) {
            if ([assetVideoWriter startWriting]) {
                [assetVideoWriter startSessionAtSourceTime:kCMTimeZero];
            }
        }
    }            

    if([assetReader startReading]){                
        BOOL videoDone = NO;

        CMSampleBufferRef bufferVideo;

        while (!videoDone) {
            if ([assetReader status]== AVAssetReaderStatusReading ) bufferVideo = [videoOutput copyNextSampleBuffer];

            if(bufferVideo){
                [self newVideoSample:bufferVideo];
                CFRelease(bufferVideo);
            }else{
                videoDone = YES;
            }
        }

//      finish
        [videoWriterInput markAsFinished];
        [assetVideoWriter finishWritingWithCompletionHandler:^{}];

        //  gambiarra to resolve the dealloc problem when use a block to delegate something
        while (!videoRecorded) {
            if (assetVideoWriter.status == AVAssetWriterStatusCompleted) {
                videoWriterInput    = nil;
                assetVideoWriter    = nil;

                videoRecorded       = YES;

                if (audio_track) {
                    [self exportAudio];
                }else{
                    NSMutableDictionary *infoToSend = [NSMutableDictionary new];
                    [infoToSend setValue:urlOutputVideo forKey:@"url_media"];
                    [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
                }
            }
        }
    }
}

-(void)exportAudio{            
    NSError *error;    
    AVAssetReader *assetReader          = [AVAssetReader assetReaderWithAsset:asset error:&error];

    NSDictionary* audioSettings         = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, nil];

    AVAssetReaderOutput *audioOutput    = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audio_track outputSettings:audioSettings];

    [assetReader addOutput:audioOutput];

     assetReader.timeRange               = CMTimeRangeMake(kCMTimeZero, totalTime);

//   start session to make a movie
    if (assetAudioWriter.status == AVAssetWriterStatusUnknown) {
        if ([self setupWriterAudio]) {
            if ([assetAudioWriter startWriting]) {
                [assetAudioWriter startSessionAtSourceTime:kCMTimeZero];
            }
        }
    }

    if([assetReader startReading]){                
        BOOL audioDone = NO;

        CMSampleBufferRef bufferAudio;

        while (!audioDone) {                    
            if ([assetReader status]== AVAssetReaderStatusReading ) bufferAudio = [audioOutput copyNextSampleBuffer];

            if(bufferAudio){                     
                [self newAudioSample:bufferAudio];
                CFRelease(bufferAudio);
            }else{
                audioDone = YES;
            }
        }

//      finish
        [audioWriterInput markAsFinished];
        [assetAudioWriter finishWritingWithCompletionHandler:^{}];

//      gambiarra to resolve the dealloc problem when use a block to delegate something
        while (!audioRecorded) {
            if (assetAudioWriter.status == AVAssetWriterStatusCompleted) {
                audioWriterInput    = nil;
                assetAudioWriter    = nil;

                audioRecorded       = YES;

                [self margeFile];
            }
        }                
    }
}

-(void)margeFile{    
    AVURLAsset *assetVideo                              = [AVURLAsset assetWithURL:urlOutputVideo];
    AVAssetTrack *video_track_marge                     = [[assetVideo tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVURLAsset *assetAudio                              = [AVURLAsset assetWithURL:urlOutputAudio];
    AVAssetTrack *audio_track_marge                     = [[assetAudio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    CMTime startTime                                    = CMTimeMake(1, 1);
    CMTimeRange timeRangeVideo                          = CMTimeRangeMake(kCMTimeZero, assetVideo.duration);
    CMTimeRange timeRangeAudio                          = CMTimeRangeMake(kCMTimeZero, assetAudio.duration);

    AVMutableComposition * composition                  = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionVideoTrack    = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    if(mirrored) compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI);
    AVMutableCompositionTrack *compositionAudioTrack    = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    NSError *error;

    [compositionVideoTrack insertTimeRange:timeRangeVideo ofTrack:video_track_marge atTime:startTime error:&error];
    [compositionAudioTrack insertTimeRange:timeRangeAudio ofTrack:audio_track_marge atTime:startTime error:&error];

    AVAssetExportSession *exportSession                 = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
    exportSession.outputFileType                        = AVFileTypeAppleM4V;
    exportSession.outputURL                             = [self tempFileURL:media_mixed];
    exportSession.shouldOptimizeForNetworkUse           = YES;
    exportSession.metadata                              = newMetadataArray;

    exportSession.timeRange                             = CMTimeRangeMake(CMTimeMakeWithSeconds(1.0, 600), totalTime);

    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        NSMutableDictionary *infoToSend = [NSMutableDictionary new];

        switch (exportSession.status) {
            case AVAssetExportSessionStatusCompleted:
                [infoToSend setValue:exportSession.outputURL forKey:@"url_media"];                
                [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
                break;

            case AVAssetExportSessionStatusExporting:
                [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self];
                break;

            case AVAssetExportSessionStatusFailed:
                NSLog(@"failed");
                break;
        }

    }];

    while (exportSession.status == AVAssetExportSessionStatusExporting) {        
        progressMarge = exportSession.progress;

        [self postProgress];
    }
}

-(BOOL) setupWriterVideo{
    urlOutputVideo                              = [self tempFileURL:media_video];
    NSError *error                              = nil;

    assetVideoWriter                            = [[AVAssetWriter alloc] initWithURL:urlOutputVideo fileType:AVFileTypeMPEG4 error:&error];
    NSParameterAssert(assetVideoWriter);

    //  Add metadata
    NSArray *existingMetadataArray              = assetVideoWriter.metadata;
    if (existingMetadataArray) {
        newMetadataArray = [existingMetadataArray mutableCopy];
    } else {
        newMetadataArray = [[NSMutableArray alloc] init];
    }

    AVMutableMetadataItem *mutableItemLocation  = [[AVMutableMetadataItem alloc] init];
    mutableItemLocation.keySpace                = AVMetadataKeySpaceCommon;
    mutableItemLocation.key                     = AVMetadataCommonKeyLocation;
    mutableItemLocation.value                   = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", locationMedia.latitude, locationMedia.longitude];

    AVMutableMetadataItem *mutableItemModel     = [[AVMutableMetadataItem alloc] init];
    mutableItemModel.keySpace                   = AVMetadataKeySpaceCommon;
    mutableItemModel.key                        = AVMetadataCommonKeyModel;
    mutableItemModel.value                      = [[UIDevice currentDevice] model];

    [newMetadataArray addObject:mutableItemLocation];
    [newMetadataArray addObject:mutableItemModel];

    assetVideoWriter.metadata                    = newMetadataArray;
    assetVideoWriter.shouldOptimizeForNetworkUse = YES;

    videoWriterInput                            = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:[self videoConfiguration]];
    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = NO;

//  add input
    [assetVideoWriter addInput:videoWriterInput];

    return YES;
}

-(BOOL) setupWriterAudio{
    urlOutputAudio                              = [self tempFileURL:media_audio];
    NSError *error                              = nil;

    assetAudioWriter                            = [[AVAssetWriter alloc] initWithURL:urlOutputAudio fileType:AVFileTypeAppleM4A error:&error];
    NSParameterAssert(assetAudioWriter);

    audioWriterInput                            = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioConfiguration]];
    audioWriterInput.expectsMediaDataInRealTime = NO;

    // add input
    [assetAudioWriter addInput:audioWriterInput];

    return YES;
}

- (NSDictionary *)videoConfiguration{
    //  video Configuration
    //    float bitsPerPixel;
    //    int numPixels = 640.0 * 360.0;
    //  int bitsPerSecond;
    //
    //  // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
    //  if ( numPixels < (640 * 360.0) )
    //      bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
    //  else
    //      bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
    //
    //  bitsPerSecond = numPixels * bitsPerPixel;

    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                                [NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
                                                [NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
                                                [NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
                                                nil];


    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioHorizontalSpacingKey,
                                              [NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
                                              nil];

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
                                   videoCleanApertureSettings, AVVideoCleanApertureKey,
                                   videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
                                   AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
                                   nil];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:640], AVVideoWidthKey,
                                   [NSNumber numberWithInt:360], AVVideoHeightKey,
                                   nil];

    return videoSettings;
}

-(NSDictionary *)audioConfiguration{
    // Add the audio input
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;


    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
    //    if( NO ) {
    // should work from iphone 3GS on and from ipod 3rd generation
    audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 128000 ], AVEncoderBitRateKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil];
    //    } else {
    //        // should work on any device requires more space
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
//                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
//                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
//                               [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];
    //    }

    return audioOutputSettings;
}

-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
    if( assetVideoWriter.status > AVAssetWriterStatusWriting ) {
        if( assetVideoWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", assetVideoWriter.error);
        return;
    }

    if (assetVideoWriter.status == AVAssetWriterStatusWriting ) {
        while (!videoWriterInput.readyForMoreMediaData) NSLog(@"waitting video");

        if (videoWriterInput.readyForMoreMediaData) {
            CMTime presTime     = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
            float valueLoading  = (presTime.value / presTime.timescale);
            float valueTotal    = (totalTime.value / totalTime.timescale);

            progressVideo       =  valueLoading / valueTotal;

            [self postProgress];

            if (![videoWriterInput appendSampleBuffer:sampleBuffer]) NSLog(@"Unable to write to video input");
        }
    }    
}

-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
    if( assetAudioWriter.status > AVAssetWriterStatusWriting ) {
        if( assetAudioWriter.status == AVAssetWriterStatusFailed )
            NSLog(@"Error: %@", assetAudioWriter.error);
        return;
    }

    if (assetAudioWriter.status == AVAssetWriterStatusWriting ) {
        while (!audioWriterInput.readyForMoreMediaData) NSLog(@"waitting audio");

        if (audioWriterInput.readyForMoreMediaData) {
            CMTime presTime     = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
            float valueLoading  = (presTime.value / presTime.timescale);
            float valueTotal    = (totalTime.value / totalTime.timescale);

            progressAudio       =  valueLoading / valueTotal;

            [self postProgress];

            if (![audioWriterInput appendSampleBuffer:sampleBuffer]) {
                NSLog(@"Unable to write to audio input");
            }
        }
    }
}

- (void)postProgress{    
    float totalProgress = (progressVideo + progressAudio + progressMarge) / progressFactor;

    NSMutableDictionary *infoToSend = [NSMutableDictionary new];
    [infoToSend setValue:[NSNumber numberWithFloat:totalProgress] forKey:@"progress"];

    [[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self userInfo:infoToSend];
}


- (NSURL *)tempFileURL:(int)typeMedia {
    NSString *outputPath;

    switch (typeMedia) {
        case media_video:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.mp4"];
            break;

        case media_audio:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.m4a"];
            break;

        case media_mixed:
            outputPath          = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"mixed.mp4"];
            break;
    }

    NSURL *outputURL            = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager *fileManager  = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath]) [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
    return outputURL;
}

- (void) dealloc {
    NSLog(@"dealloc video exporter");
    [[NSNotificationCenter defaultCenter] removeObserver:self];

    assetVideoWriter        = nil;
    assetAudioWriter        = nil;

    videoWriterInput        = nil;
    audioWriterInput        = nil;

    urlMedia                = nil;
    urlOutputVideo          = nil;
    urlOutputAudio          = nil;
    urlOutputFinal          = nil;
}

@end

如果有人要添加内容,请在此处发帖!

答案 1 :(得分:0)

设置AVAssetWriterInput.outputSettings [AVVideoCompressionPropertiesKey] [AVVideoAllowFrameReorderingKey] = @(NO) enter image description here