xcode中的图像阵列视频

时间:2014-08-14 12:45:51

标签: ios7 xcode5

我拍摄了图像的屏幕截图并将其文档目录路径存储到数组中。 我想通过传递这个数组创建一个视频..这是我创建视频的代码

-(void)writeImagesAsMovie:(NSArray *)array toPath:(NSString *)path
{

    NSError *error1 = nil;
    NSFileManager *fileMgr = [NSFileManager defaultManager];

    if ([fileMgr removeItemAtURL:[NSURL fileURLWithPath:path] error:&error1]!=YES)
    {
        NSLog(@"Unable to delete file: %@", [error1 localizedDescription]);
    }

    UIImage *first = [UIImage imageWithContentsOfFile:[array objectAtIndex:0]];


    CGSize frameSize = first.size;

    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
                                                              error:&error];

    if(error)
    {
        NSLog(@"error creating AssetWriter: %@",[error description]);
    }
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
                                   nil];



    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                       outputSettings:videoSettings];


    NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
    [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32RGBA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
    [attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:attributes];

    [videoWriter addInput:writerInput];

    // fixes all errors
    writerInput.expectsMediaDataInRealTime = YES;

    //Start a session:
    [videoWriter startWriting];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    int fps = 25;


    int cnt = 0;
    for (NSString *filename in array)
    {
        if (adaptor.assetWriterInput.readyForMoreMediaData)
        {

            cnt++;
            CMTime frameTime = CMTimeMake(1, fps);
            CMTime lastTime = CMTimeMake(cnt, fps);
            CMTime presentTime = CMTimeAdd(lastTime, frameTime);

            UIImage *imgFrame=[UIImage imageWithContentsOfFile:filename];

            buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
            BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

            if (result == NO) {
                NSLog(@"failed to append buffer");
                NSLog(@"The error is %@", [videoWriter error]);
            }

            if(buffer) {
                CVBufferRelease(buffer);
            }
        }

        else {
            NSLog(@"error");
            cnt--;
        }
    }

    // finish the session
    [writerInput markAsFinished];
    [videoWriter finishWritingWithCompletionHandler:^{

    }];

    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

        UIAlertView *saveAlert = [[UIAlertView alloc] initWithTitle:@"Complete" message:@"Finished making movie" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil, nil];
    [saveAlert show];
}

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) cgiImage {



    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(cgiImage),
                        CGImageGetHeight(cgiImage), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                        &pxbuffer);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(cgiImage),
                                                 CGImageGetHeight(cgiImage), 8, 4*CGImageGetWidth(cgiImage), rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);



    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));

    CGAffineTransform flipVertical = CGAffineTransformMake(
                                                           1, 0, 0, -1, 0, CGImageGetHeight(cgiImage)
                                                           );
    CGContextConcatCTM(context, flipVertical);



    CGAffineTransform flipHorizontal = CGAffineTransformMake(
                                                             -1.0, 0.0, 0.0, 1.0, CGImageGetWidth(cgiImage), 0.0
                                                             );

    CGContextConcatCTM(context, flipHorizontal);


    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(cgiImage),
                                           CGImageGetHeight(cgiImage)), cgiImage);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

此代码可帮助我创建视频但视频不正确且所有像素都模糊不清....

1 个答案:

答案 0 :(得分:0)

<。>在.m文件中。

导入“ViewController.h”

@interface ViewController()

@end

@implementation ViewController

- (无效)viewDidLoad中

{     [super viewDidLoad];

// Do any additional setup after loading the view, typically from a nib.

}

- (无效)didReceiveMemoryWarning

{

[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.

}

- (IBAction)createVideo:(id)sender

{

NSError *error = nil;

NSFileManager *fileMgr = [NSFileManager defaultManager];

NSString *documentsDirectory = [NSHomeDirectory()
                                stringByAppendingPathComponent:@"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];

if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)

    NSLog(@"Unable to delete file: %@", [error localizedDescription]);


CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;



NSMutableArray *imageArray;
imageArray = [[NSMutableArray alloc] initWithObjects:@"photo1.png",@"photo2.png",@"photo3.png",@"photo4.png",@"photo5.png", nil];

NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(@"-->imageArray.count= %lu", (unsigned long)imageArray.count);
for (NSString* path in imagePaths)
{
    [imageArray addObject:[UIImage imageWithContentsOfFile:path]];

}

NSLog(@"Start building video from defined frames.");

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];


[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;


int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;


NSLog(@"**************************************************");
for(UIImage * img in imageArray)
{

    buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {

            NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}
NSLog(@"**************************************************");

[videoWriterInput markAsFinished];
[videoWriter finishWriting];


AVMutableComposition* mixComposition = [AVMutableComposition composition];

NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];

NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
NSURL    *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];


NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];


NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
    [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

CMTime nextClipStartTime = kCMTimeZero;

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];




AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];



AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];

_assetExport.outputFileType = @"public.mpeg-4";

_assetExport.outputURL = outputFileUrl;


[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
    //[self saveVideoToAlbum:outputFilePath];
 }
 ];

NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);

}

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image

{

CGSize size = CGSizeMake(400, 200);

NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                      size.width,
                                      size.height,
                                      kCVPixelFormatType_32ARGB,
                                      (__bridge CFDictionaryRef) options,
                                      &pxbuffer);
if (status != kCVReturnSuccess){
    NSLog(@"Failed to create pixel buffer");
}

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                             size.height, 8, 4*size.width, rgbColorSpace,(CGBitmapInfo)
                                             kCGImageAlphaNoneSkipLast);

CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;

}

@end

在.h档案中......

导入

import Foundation / Foundation.h

导入CoreMedia / CoreMedia.h

导入CoreVideo / CoreVideo.h

导入CoreGraphics / CoreGraphics.h

导入AVFoundation / AVFoundation.h

导入QuartzCore / QuartzCore.h

@interface ViewController:UIViewController

- (IBAction为)createVideo:(ID)发送者;

@end

并添加以下框架.. CoreMedia.framework

CoreVideo.framework

CoreGraphics.framework

AVFoundation.framework

QuartzCore.framework