使用AVAssetWriter

时间:2016-02-25 23:59:21

标签: ios avfoundation avassetwriter

我编写的代码使用AVAssetReader从Photos库中成功加载AVAsset,然后使用AVAssetWriter保存它。除了一些必要的更正(例如,不推荐使用finishWriting;替换为didFinish ...完成块),它与AVFoundation编程指南中提供的代码相同。没有添加或修改。

虽然它没有错误,但我无法弄清楚如何写入照片库。该视频来自资产库/资产目录;但是,我能成功写出输出的唯一目录是我的应用程序的Documents目录。我并不一定反对它写在那里;但是,如何访问输出视频文件?而且,为什么我不能使用我读取资产的相同URL来写回来? (或者,更像我该怎么做?)

//
//  ExportVideo.m
//  ----
//
//  Created by James Alan Bush on 2/25/16.
//  Copyright © 2016 Apple. All rights reserved.
//

#import "ExportVideo.h"
#import "GLKitView.h"

@implementation ExportVideo

@synthesize url = _url;
@synthesize renderer = _renderer;

- (id)initWithURL:(NSURL *)url usingRenderer:(GLKitView *)renderer {
    NSLog(@"ExportVideo");
    if (!(self = [super init])) {
        return nil;
    }

    self.url = url;
    self.renderer = renderer;

    NSString *serializationQueueDescription = [NSString stringWithFormat:@"%@ serialization queue", self];
    _mainSerializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);

    NSString *rwAudioSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw audio serialization queue", self];
    _rwAudioSerializationQueue = dispatch_queue_create([rwAudioSerializationQueueDescription UTF8String], NULL);

    NSString *rwVideoSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw video serialization queue", self];
    _rwVideoSerializationQueue = dispatch_queue_create([rwVideoSerializationQueueDescription UTF8String], NULL);

    return self;
}

- (void)startProcessing {
    NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    _asset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
    NSLog(@"URL: %@", self.url);
    _cancelled = NO;
    [_asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
        dispatch_async(_mainSerializationQueue, ^{
            if (_cancelled)
                return;
            BOOL success = YES;
            NSError *localError = nil;
            success = ([_asset statusOfValueForKey:@"tracks" error:&localError] == AVKeyValueStatusLoaded);
            if (success)
            {
                NSFileManager *fm = [NSFileManager defaultManager];
                NSString *localOutputPath = [self.url path];
                if ([fm fileExistsAtPath:localOutputPath])
                    success = [fm removeItemAtPath:localOutputPath error:&localError];
            }
            if (success)
                success = [self setupAssetReaderAndAssetWriter:&localError];
            if (success)
                success = [self startAssetReaderAndWriter:&localError];
            if (!success)
                [self readingAndWritingDidFinishSuccessfully:success withError:localError];
        });
    }];
}


- (BOOL)setupAssetReaderAndAssetWriter:(NSError **)outError
{
    // Create and initialize the asset reader.
    _reader = [[AVAssetReader alloc] initWithAsset:_asset error:outError];
    BOOL success = (_reader != nil);
    if (success)
    {
        // If the asset reader was successfully initialized, do the same for the asset writer.
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *outputURL = paths[0];
        NSFileManager *manager = [NSFileManager defaultManager];
        [manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
        outputURL = [outputURL stringByAppendingPathComponent:@"output.mov"];
        [manager removeItemAtPath:outputURL error:nil];
        NSURL *outURL = [NSURL fileURLWithPath:outputURL];
        _writer = [[AVAssetWriter alloc] initWithURL:outURL fileType:AVFileTypeQuickTimeMovie error:outError];
        success = (_writer != nil);
    }

    if (success)
    {
        // If the reader and writer were successfully initialized, grab the audio and video asset tracks that will be used.
        AVAssetTrack *assetAudioTrack = nil, *assetVideoTrack = nil;
        NSArray *audioTracks = [_asset tracksWithMediaType:AVMediaTypeAudio];
        if ([audioTracks count] > 0)
            assetAudioTrack = [audioTracks objectAtIndex:0];
        NSArray *videoTracks = [_asset tracksWithMediaType:AVMediaTypeVideo];
        if ([videoTracks count] > 0)
            assetVideoTrack = [videoTracks objectAtIndex:0];

        if (assetAudioTrack)
        {
            // If there is an audio track to read, set the decompression settings to Linear PCM and create the asset reader output.
            NSDictionary *decompressionAudioSettings = @{ AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM] };
            _readerAudioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetAudioTrack outputSettings:decompressionAudioSettings];
            [_reader addOutput:_readerAudioOutput];
            // Then, set the compression settings to 128kbps AAC and create the asset writer input.
            AudioChannelLayout stereoChannelLayout = {
                .mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
                .mChannelBitmap = 0,
                .mNumberChannelDescriptions = 0
            };
            NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
            NSDictionary *compressionAudioSettings = @{
                                                       AVFormatIDKey         : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC],
                                                       AVEncoderBitRateKey   : [NSNumber numberWithInteger:128000],
                                                       AVSampleRateKey       : [NSNumber numberWithInteger:44100],
                                                       AVChannelLayoutKey    : channelLayoutAsData,
                                                       AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:2]
                                                       };
            _writerAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetAudioTrack mediaType] outputSettings:compressionAudioSettings];
            [_writer addInput:_writerAudioInput];
        }

        if (assetVideoTrack)
        {
            // If there is a video track to read, set the decompression settings for YUV and create the asset reader output.
            NSDictionary *decompressionVideoSettings = @{
                                                         (id)kCVPixelBufferPixelFormatTypeKey     : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
                                                         (id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary]
                                                         };
            _readerVideoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetVideoTrack outputSettings:decompressionVideoSettings];
            [_reader addOutput:_readerVideoOutput];
            CMFormatDescriptionRef formatDescription = NULL;
            // Grab the video format descriptions from the video track and grab the first one if it exists.
            NSArray *formatDescriptions = [assetVideoTrack formatDescriptions];
            if ([formatDescriptions count] > 0)
                formatDescription = (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
            CGSize trackDimensions = {
                .width = 0.0,
                .height = 0.0,
            };
            // If the video track had a format description, grab the track dimensions from there. Otherwise, grab them direcly from the track itself.
            if (formatDescription)
                trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
            else
                trackDimensions = [assetVideoTrack naturalSize];
            NSDictionary *compressionSettings = nil;
            // If the video track had a format description, attempt to grab the clean aperture settings and pixel aspect ratio used by the video.
            if (formatDescription)
            {
                NSDictionary *cleanAperture = nil;
                NSDictionary *pixelAspectRatio = nil;
                CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
                if (cleanApertureFromCMFormatDescription)
                {
                    cleanAperture = @{
                                      AVVideoCleanApertureWidthKey            : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth),
                                      AVVideoCleanApertureHeightKey           : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight),
                                      AVVideoCleanApertureHorizontalOffsetKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset),
                                      AVVideoCleanApertureVerticalOffsetKey   : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset)
                                      };
                }
                CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
                if (pixelAspectRatioFromCMFormatDescription)
                {
                    pixelAspectRatio = @{
                                         AVVideoPixelAspectRatioHorizontalSpacingKey : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing),
                                         AVVideoPixelAspectRatioVerticalSpacingKey   : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing)
                                         };
                }
                // Add whichever settings we could grab from the format description to the compression settings dictionary.
                if (cleanAperture || pixelAspectRatio)
                {
                    NSMutableDictionary *mutableCompressionSettings = [NSMutableDictionary dictionary];
                    if (cleanAperture)
                        [mutableCompressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
                    if (pixelAspectRatio)
                        [mutableCompressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
                    compressionSettings = mutableCompressionSettings;
                }
            }
            // Create the video settings dictionary for H.264.
            NSMutableDictionary *videoSettings = (NSMutableDictionary *) @{
                                                                           AVVideoCodecKey  : AVVideoCodecH264,
                                                                           AVVideoWidthKey  : [NSNumber numberWithDouble:trackDimensions.width],
                                                                           AVVideoHeightKey : [NSNumber numberWithDouble:trackDimensions.height]
                                                                           };
            // Put the compression settings into the video settings dictionary if we were able to grab them.
            if (compressionSettings)
                [videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
            // Create the asset writer input and add it to the asset writer.
            _writerVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetVideoTrack mediaType] outputSettings:videoSettings];
            [_writer addInput:_writerVideoInput];
        }
    }
    return success;
}

- (BOOL)startAssetReaderAndWriter:(NSError **)outError
{
    BOOL success = YES;
    // Attempt to start the asset reader.
    success = [_reader startReading];
    if (!success) {
        *outError = [_reader error];
        NSLog(@"Reader error");
    }
    if (success)
    {
        // If the reader started successfully, attempt to start the asset writer.
        success = [_writer startWriting];
        if (!success) {
            *outError = [_writer error];
            NSLog(@"Writer error");
        }
    }

    if (success)
    {
        // If the asset reader and writer both started successfully, create the dispatch group where the reencoding will take place and start a sample-writing session.
        _dispatchGroup = dispatch_group_create();
        [_writer startSessionAtSourceTime:kCMTimeZero];
        _audioFinished = NO;
        _videoFinished = NO;

        if (_writerAudioInput)
        {
            // If there is audio to reencode, enter the dispatch group before beginning the work.
            dispatch_group_enter(_dispatchGroup);
            // Specify the block to execute when the asset writer is ready for audio media data, and specify the queue to call it on.
            [_writerAudioInput requestMediaDataWhenReadyOnQueue:_rwAudioSerializationQueue usingBlock:^{
                // Because the block is called asynchronously, check to see whether its task is complete.
                if (_audioFinished)
                    return;
                BOOL completedOrFailed = NO;
                // If the task isn't complete yet, make sure that the input is actually ready for more media data.
                while ([_writerAudioInput isReadyForMoreMediaData] && !completedOrFailed)
                {
                    // Get the next audio sample buffer, and append it to the output file.
                    CMSampleBufferRef sampleBuffer = [_readerAudioOutput copyNextSampleBuffer];
                    if (sampleBuffer != NULL)
                    {
                        BOOL success = [_writerAudioInput appendSampleBuffer:sampleBuffer];
                        CFRelease(sampleBuffer);
                        sampleBuffer = NULL;
                        completedOrFailed = !success;
                    }
                    else
                    {
                        completedOrFailed = YES;
                    }
                }
                if (completedOrFailed)
                {
                    // Mark the input as finished, but only if we haven't already done so, and then leave the dispatch group (since the audio work has finished).
                    BOOL oldFinished = _audioFinished;
                    _audioFinished = YES;
                    if (oldFinished == NO)
                    {
                        [_writerAudioInput markAsFinished];
                    }
                    dispatch_group_leave(_dispatchGroup);
                }
            }];
        }

        if (_writerVideoInput)
        {
            // If we had video to reencode, enter the dispatch group before beginning the work.
            dispatch_group_enter(_dispatchGroup);
            // Specify the block to execute when the asset writer is ready for video media data, and specify the queue to call it on.
            [_writerVideoInput requestMediaDataWhenReadyOnQueue:_rwVideoSerializationQueue usingBlock:^{
                // Because the block is called asynchronously, check to see whether its task is complete.
                if (_videoFinished)
                    return;
                BOOL completedOrFailed = NO;
                // If the task isn't complete yet, make sure that the input is actually ready for more media data.
                while ([_writerVideoInput isReadyForMoreMediaData] && !completedOrFailed)
                {
                    // Get the next video sample buffer, and append it to the output file.
                    CMSampleBufferRef sampleBuffer = [_readerVideoOutput copyNextSampleBuffer];
                    if (sampleBuffer != NULL)
                    {
                        BOOL success = [_writerVideoInput appendSampleBuffer:sampleBuffer];
                        CFRelease(sampleBuffer);
                        sampleBuffer = NULL;
                        completedOrFailed = !success;
                    }
                    else
                    {
                        completedOrFailed = YES;
                    }
                }
                if (completedOrFailed)
                {
                    // Mark the input as finished, but only if we haven't already done so, and then leave the dispatch group (since the video work has finished).
                    BOOL oldFinished = _videoFinished;
                    _videoFinished = YES;
                    if (oldFinished == NO)
                    {
                        [_writerVideoInput markAsFinished];
                    }
                    dispatch_group_leave(_dispatchGroup);
                }
            }];
        }
        // Set up the notification that the dispatch group will send when the audio and video work have both finished. (This is one part I corrected)
        dispatch_group_notify(_dispatchGroup, _mainSerializationQueue, ^{
            BOOL finalSuccess = YES;
            NSError *finalError = nil;
            // Check to see if the work has finished due to cancellation.
            if (_cancelled)
            {
                // If so, cancel the reader and writer.
                [_reader cancelReading];
                [_writer cancelWriting];
            }
            else
            {
                // If cancellation didn't occur, first make sure that the asset reader didn't fail.
                if ([_reader status] == AVAssetReaderStatusFailed)
                {
                    finalSuccess = NO;
                    finalError = [_reader error];
                    NSLog(@"_reader finalError: %@", finalError);
                }
                // If the asset reader didn't fail, attempt to stop the asset writer and check for any errors.
                [_writer finishWritingWithCompletionHandler:^{
                    [self readingAndWritingDidFinishSuccessfully:finalSuccess withError:[_writer error]];
                }];
            }
            // Call the method to handle completion, and pass in the appropriate parameters to indicate whether reencoding was successful.

        });
    }
    // Return success here to indicate whether the asset reader and writer were started successfully.
    return success;
}

- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
    if (!success)
    {
        // If the reencoding process failed, we need to cancel the asset reader and writer.
        [_reader cancelReading];
        [_writer cancelWriting];
        dispatch_async(dispatch_get_main_queue(), ^{
            // Handle any UI tasks here related to failure.
        });
    }
    else
    {
        // Reencoding was successful, reset booleans.
        _cancelled = NO;
        _videoFinished = NO;
        _audioFinished = NO;
        dispatch_async(dispatch_get_main_queue(), ^{
            // Handle any UI tasks here related to success.
        });
    }
    NSLog(@"readingAndWritingDidFinishSuccessfully success = %@ : Error = %@", (success == 0) ? @"NO" : @"YES", error);
}

仅供参考:如果您对此采取措施,请不要仅仅建议使用其他对象或完全建议采用完全不同的方法。我需要这个以广告的方式工作,不知何故。

谢谢!

1 个答案:

答案 0 :(得分:0)

回答我自己的问题:

解决方案是将此行添加到finishWritingWithCompletionHandler,以将视频从我的应用程序的Documents目录移动到Photos库中:

UISaveVideoAtPathToSavedPhotosAlbum(_outputURL, nil, nil, nil);