将图像数组转换为mp4

时间:2014-08-19 18:31:28

标签: ios video avfoundation avassetwriter

我尝试将图像数组转换为一个mp4文件,它似乎可以工作但是当视频播放器开始播放时它只显示黑色视图。我在这里关注了一些其他帖子来转换图像,这就是我得到的。当我测试它时,所有图片都被转换并且视频被创建。但是当视频播放时,它只显示黑色视图。任何帮助或澄清将不胜感激。

- (void) createVideoPlayer:(NSArray *)imagesArray
{
    NSError *error = nil;
    NSFileManager *fileMgr = [NSFileManager defaultManager];
    NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
    NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
    //NSLog(@"-->videoOutputPath= %@", videoOutputPath);
    // get rid of existing mp4 if exists...
    if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
         NSLog(@"Unable to delete file: %@", [error localizedDescription]);

    CGSize imageSize = CGSizeMake(213, 320);
    NSUInteger fps = 3;

    //////////////     end setup    ///////////////////////////////////

    NSLog(@"Start building video from defined frames.");

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:320], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    [videoWriter addInput:videoWriterInput];

    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.
    int frameCount = 0;
    double numberOfSecondsPerFrame = 6;
    double frameDuration = fps * numberOfSecondsPerFrame;

    //for(VideoFrame * frm in imageArray)
    NSLog(@"**************************************************");
    for(UIImage * img in imagesArray)
    {
        //UIImage * img = frm._imageFrame;
        buffer = [self pixelBufferFromCGImage:[img CGImage]];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 30)
        {
            if (adaptor.assetWriterInput.readyForMoreMediaData) 
            {
                //print out status:
                NSLog(@"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imagesArray count]);

                CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                if(!append_ok)
                {
                    NSError *error = videoWriter.error;
                    if(error!=nil)
                       NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
            else 
            {
                printf("adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1];
            }
            j++;
        }
        if (!append_ok)
            printf("error appending image %d times %d\n, with error.", frameCount, j);
        frameCount++;
     }
     NSLog(@"**************************************************");

     //Finish the session:
     [videoWriterInput markAsFinished];
     [videoWriter finishWritingWithCompletionHandler:^{
     }];
     NSLog(@"Write Ended");

     AVMutableComposition* mixComposition = [AVMutableComposition composition];

     // this is the video file that was just written above, full path to file is in --> videoOutputPath
     NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

     // create the final video output file as MOV file - may need to be MP4, but this works so far...
     NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
     NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

     if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
         [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

     CMTime nextClipStartTime = kCMTimeZero;

     AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
     CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
     AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
     [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

     AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
     _assetExport.outputFileType = @"public.mpeg-4";
     //NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
     _assetExport.outputURL = outputFileUrl;

     [_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) {
         // [self SaveVideo:outputFilePath];
         moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:outputFilePath]];
         [moviePlayer.view setFrame:CGRectMake(0, 100, 320, 320)];
         [moviePlayer prepareToPlay];
         // And other options you can look through the documentation.
         [self.view addSubview:moviePlayer.view];
         [moviePlayer play];
     }];
    NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);
}

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {

    CGSize size = CGSizeMake(300, 300);

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                      size.width,
                                      size.height,
                                      kCVPixelFormatType_32ARGB,
                                      (__bridge CFDictionaryRef) options,
                                      &pxbuffer);
    if (status != kCVReturnSuccess){
        NSLog(@"Failed to create pixel buffer");
    }

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                             size.height, 8, 4*size.width, rgbColorSpace,
                                             kCGImageAlphaPremultipliedLast);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

1 个答案:

答案 0 :(得分:0)

所有UI更新必须在主线程上,通过调用双重检查completionHandler在主线程上执行:

NSLog(@"%d", [NSThread isMainThread]);

要在mainthread上执行使用GCD:

dispatch_async(dispatch_get_main_queue(), ^{
  // your UI update
});

此外,您应始终在捕获Self时创建弱引用,以通过创建本地实例来避免strong reference cycle

__weak typeof(self) weakSelf = self; // now use weakSelf.doSomething