如何在iPad1 iOS SDK上以编程方式保存视频

时间:2012-05-01 06:52:15

标签: ipad video photolibrary

我有一系列图像,我将它们转换为电影视频,现在我想现在如何将转换后的视频保存到ipad。

我可以将转换后的视频保存到iPad照片库

NSError *error = nil;

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error];

NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                               nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];


// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];


NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.

CFDataRef imgData = (CFDataRef)[array objectAtIndex:0];
CGDataProviderRef imgDataProvider = CGDataProviderCreateWithCFData (imgData);
CGImageRef image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);

buffer = [self pixelBufferFromCGImage:image1];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

//Write samples:......

//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];

2 个答案:

答案 0 :(得分:1)

试试这个开源组件: -

https://www.cocoacontrols.com/controls/iqprojectvideo

这可能会对你有所帮助。根据您的要求使用它。

答案 1 :(得分:0)

尝试使用以下代码:

 [_operationQueue addOperationWithBlock:^{

    NSInteger i = 0;

    NSString *path = [NSTemporaryDirectory() stringByAppendingFormat:@"%d.png",i];
    UIImage *image;

    NSDate *startDate;

    while ((image = [UIImage imageWithContentsOfFile:path]))
    {
        while (1)
        {
            if (writerInput.readyForMoreMediaData == NO)
            {
                sleep(0.01);
                continue;
            }
            else
            {
                //First time only
                if (buffer == NULL)
                {
                    CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
                    startDate = [_dates objectAtIndex:i];
                }

                buffer = [IQProjectVideo pixelBufferFromCGImage:image.CGImage];

                if (buffer)
                {
                    if(i<_dates.count){

                    NSDate *currentDate = [_dates objectAtIndex:i];
                    Float64 interval = [currentDate timeIntervalSinceDate:startDate];

                    int32_t timeScale;

                    if (i == 0)
                    {
                        timeScale = 1.0/([[_dates objectAtIndex:i+1] timeIntervalSinceDate:currentDate]);
                    }
                    else
                    {
                        timeScale = 1.0/([currentDate timeIntervalSinceDate:[_dates objectAtIndex:i-1]]);
                    }

                    /**/
                    CMTime presentTime=CMTimeMakeWithSeconds(interval, MAX(33, timeScale));
                    //                        NSLog(@"presentTime:%@",(__bridge NSString *)CMTimeCopyDescription(kCFAllocatorDefault, presentTime));


                    if (_progressBlock != NULL)
                    {
                        dispatch_sync(dispatch_get_main_queue(), ^{
                            _progressBlock((CGFloat)i/(CGFloat)currentIndex);
                        });
                    }


                    // append buffer
                    [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
                    CVPixelBufferRelease(buffer);
                }
                }
                break;
            }
        }

        [[NSFileManager defaultManager] removeItemAtPath:path error:nil];

        path = [NSTemporaryDirectory() stringByAppendingFormat:@"%d.png",++i];
    }

    //Finish the session:
    [writerInput markAsFinished];

    if ([videoWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)])
    {
        [videoWriter finishWritingWithCompletionHandler:^{
            CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

        }];
    }
    else
    {
        [videoWriter finishWriting];
        CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
    }

    NSDictionary *fileAttrubutes = [[NSFileManager defaultManager] attributesOfItemAtPath:_path error:nil];
    NSDictionary *dictInfo = [NSDictionary dictionaryWithObjectsAndKeys:
                              _path,IQFilePathKey,
                              [fileAttrubutes objectForKey:NSFileSize], IQFileSizeKey,
                              [fileAttrubutes objectForKey:NSFileCreationDate], IQFileCreateDateKey,
                              nil];

    if (_completionBlock != NULL)
    {
        dispatch_sync(dispatch_get_main_queue(), ^{
            _completionBlock(dictInfo,videoWriter.error);
        });
    }

    NSString *openCommand = [NSString stringWithFormat:@"/usr/bin/open \"%@\"", NSTemporaryDirectory()];
    system([openCommand fileSystemRepresentation]);