将AVAsset视频文件拆分为块

时间:2014-04-08 15:52:16

标签: ios objective-c iphone ios7

我想将AVAsset拆分为块 - AVAsset是一个视频文件。基本上我想在每个块上应用过滤器,标题和音频并保存最终视频。任何指导都将是一个很大的帮助。

1 个答案:

答案 0 :(得分:0)

试试这段代码。阅读评论以了解我使用的变量

/**
* Here the object is a NSDictionary which has the asset URL, start time where you need to start the chunk, duration of the chunk and any other related details
* As the return value, you get split video path
*/
- (NSString *)chunkyGrab:(id)object
{
    NSLog(@"%s - %d", __PRETTY_FUNCTION__, __LINE__);

    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    AVMutableAudioMix *mutableAudioMix = [AVMutableAudioMix audioMix];
    NSMutableArray *audioParameters = [[NSMutableArray alloc] init];

    NSDictionary *options = nil;

    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[object objectForKey:@"url"] options:options]; // asset URL
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    CMTime chunkStart = CMTimeMakeWithSeconds(CMTimeGetSeconds(kCMTimeZero) + [[object objectForKey:@"tss"] intValue], 1);  // Start time of the chunk
    CMTime chunkDuration = CMTimeMakeWithSeconds([[object objectForKey:@"duration"] intValue], 1);      // Duration of the chunk

    @try
    {
        NSError *errorOne;

        [videoTrack insertTimeRange:CMTimeRangeMake(chunkStart, chunkDuration)
                            ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                             atTime:kCMTimeZero
                              error:&errorOne];

        if (errorOne)
        {
            NSLog(@"%s - %d # errorOne.description = %@", __PRETTY_FUNCTION__, __LINE__, errorOne.description);
        }
    }
    @catch (NSException *exception)
    {
        NSLog(@"%s - %d # exception.description = %@", __PRETTY_FUNCTION__, __LINE__, exception.description);
    }

    @try
    {
        if([[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] != nil)
        {
            [audioTrack insertTimeRange:CMTimeRangeMake(chunkStart, chunkDuration)
                                ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                 atTime:kCMTimeZero
                                  error:nil];
        }
        else
        {
            NSLog(@"%s - %d # No audio track", __PRETTY_FUNCTION__, __LINE__);
        }
    }
    @catch (NSException *exception)
    {
        NSLog(@"%s - %d # exception.description = %@", __PRETTY_FUNCTION__, __LINE__, exception.description);
    }

    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

    CGSize naturalSizeFirst;
    naturalSizeFirst = mixComposition.naturalSize;
    float renderWidth = 1280;
    float renderHeight = 720;

    mutableAudioMix.inputParameters = audioParameters;

    MainCompositionInst.frameDuration = CMTimeMake(1, 30);
    MainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);

    NSString *path = [self exportChunkAsset:MainCompositionInst :mixComposition :mutableAudioMix ];
    return path;
}


- (NSString *)exportChunkAsset:(AVMutableVideoComposition *)MainCompositionInst :(AVMutableComposition *)mixComposition :(AVMutableAudioMix *)mutableAudioMix
{
    NSLog(@"%s - %d", __PRETTY_FUNCTION__, __LINE__);

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    int r = rand() % 74;
    __block NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent: [NSString stringWithFormat:@"overlapVideo-%d.mov",r]];

    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
    {
        [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
    }

    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

// exporter is an instance of AVAssetExportSession
    _exporter = [AVCommon getAssetExportSession: mixComposition];
    _exporter.outputURL = url;
    _exporter.outputFileType = AVFileTypeQuickTimeMovie;
    _exporter.shouldOptimizeForNetworkUse = YES;
    _exporter.audioMix = mutableAudioMix;

    dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);

    [_exporter exportAsynchronouslyWithCompletionHandler:^
     {
         if(!_exporter.error)
         {
             exportError = NO;
             if( _exporter.status == AVAssetExportSessionStatusCancelled)
             {
                 if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
                 {
                     [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
                 }
             }
             dispatch_semaphore_signal(semaphore);
         }
         else
         {
             NSError *error = _exporter.error;
             NSLog(@"%s - %d # error.description = %@", __PRETTY_FUNCTION__, __LINE__, error.description);

             if([_exporter.error.domain isEqualToString:@"AVFoundationErrorDomain"])
             {
                 if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
                 {
                     [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
                 }
             }
             exportError = _exporter.error;

             dispatch_semaphore_signal(semaphore);
         }
     }];

    dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);

    if(exportError != nil)
    {
        return @"error";
    }

    [_exportProgressBarTimer invalidate];
    _exportProgressBarTimer = nil;

    return myPathDocs;
}