如何显示图像中的视频

时间:2011-12-26 10:51:51

标签: iphone objective-c ios xcode4.2

    #import "ViewController.h"
    #import "AVFoundation/AVAssetWriter.h"
    #import "AVFoundation/AVAssetWriterInput.h"
    #import "AVFoundation/AVMediaFormat.h"
    #import "AVFoundation/AVVideoSettings.h"


    @implementation ViewController

    - (void)viewDidLoad
    {
        [super viewDidLoad];

         NSArray *documentDirectories = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentDirectory = [documentDirectories objectAtIndex: 0];

       // NSLog(@"Where it is %@ \n",documentDirectory);
        image1.image = [UIImage imageNamed:@"images1.jpg"];
         CGSize sizeOfImage = image1.image.size;
        // printf("Size of Image width = %f  height = %f\n", sizeOfImage.width,sizeOfImage.height);
        [self writeImageAsMovie:(UIImage*)image1.image toPath:(NSString*)documentDirectory size:(CGSize)sizeOfImage duration:6];

    }


    - (void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
    {
        NSError *error = nil;

        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                      [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                                  error:&error];
        NSParameterAssert(videoWriter);


        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                       AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                       nil];


        AVAssetWriterInput *writerInput = [AVAssetWriterInput
                                           assetWriterInputWithMediaType:AVMediaTypeVideo
                                           outputSettings:videoSettings];




        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                         assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                         sourcePixelBufferAttributes:nil];


        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        [videoWriter addInput:writerInput];

        //Start a session:
        [videoWriter startWriting];

        [videoWriter startSessionAtSourceTime:kCMTimeZero];

        //Write samples:
        //CVPixelBufferRef Utils;
        CVPixelBufferRef buffer = [self newPixelBufferFromCGImage:image.CGImage size:size];
        [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];



        [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];


        //Finish the session:
        [writerInput markAsFinished];
        [videoWriter endSessionAtSourceTime:CMTimeMake(duration, 2)];



       [videoWriter finishWriting]; 


    } 

-(CVPixelBufferRef) newPixelBufferFromCGImage:(CGImageRef)image size:(CGSize )frameSize
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];


    CVPixelBufferRef pxbuffer = NULL;


    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width, frameSize.height, kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef)options,&pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    status = CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGAffineTransform frameTransform;
    CGContextConcatCTM(context, frameTransform);
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);


    return pxbuffer;
} 


@end

我是iOS上的新手,我看过一个代码,它在堆栈溢出中显示来自图像的视频。我运行代码,它没有显示任何错误,但我没有得到所需的输出。我认为错误是这个函数的最后一行

-(void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration) that is [videoWriter finishWriting]

请帮帮我。

1 个答案:

答案 0 :(得分:0)

如果您有一组图像并且只是将这些图像作为视频播放,您可以使用UIImageView并使用for循环将阵列中的图像更新到图像视图。

如果阵列中有图像。 像,

for (int i = 0; i < [array count]; i++)
{    
    imageview.image = (UIImage *)[array objectAtIndex:i];
}