如何为图像创建的视频添加声音

时间:2012-06-04 14:35:16

标签: iphone xcode

这是我用于创建视频图像的代码,但我还需要将音轨放回Video.Any help is appriciated。

- (IBAction)saveMovieToLibrary
{
    ImageVideoPath = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"Documents/movie.mp4"]];

    NSLog(@"<><><><>=%@",ImageVideoPath);


    testImageArray = [[NSArray alloc] initWithObjects:
                      [UIImage imageNamed:@"image1.png"], 
                      [UIImage imageNamed:@"image2.png"], 
                      [UIImage imageNamed:@"image3.png"],
                      [UIImage imageNamed:@"image4.png"],
                      [UIImage imageNamed:@"image5.png"], 
                      [UIImage imageNamed:@"image6.png"], 
                      [UIImage imageNamed:@"image7.png"],
                      [UIImage imageNamed:@"image9.png"], 
                      [UIImage imageNamed:@"image10.png"], 
                      [UIImage imageNamed:@"image11.png"], 
                      [UIImage imageNamed:@"image12.png"], nil];


    //  NSLog(@"testImageArray/=%@",testImageArray);


    [self writeImageAsMovie:testImageArray toPath:ImageVideoPath size:CGSizeMake(480, 320) duration:1];
}  

-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] 
                                                           fileType:AVFileTypeMPEG4 
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];


    AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                          outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput                                                                                                                     sourcePixelBufferAttributes:nil];

    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);

    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage] size:CGSizeMake(480, 320)];
    CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);


    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];


    int i = 1;
    while (1) 
    {
        if(writerInput.readyForMoreMediaData){

            CMTime frameTime = CMTimeMake( 0, 12);
            CMTime lastTime=CMTimeMake(i, 2);
            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            if (i >= [array count]) 
            {
                buffer = NULL;
            }   
            else 
            {
                buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage] size:CGSizeMake(480, 320)];
            }          


            if (buffer) 
            {
                // append buffer
                [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
                i++;
            } 
            else 
            {

                //Finish the session:
                [writerInput markAsFinished];
                [videoWriter finishWriting];                

                CVPixelBufferPoolRelease(adaptor.pixelBufferPool);


                [videoWriter release];
                [writerInput release];
                NSLog (@"Done");
                break;
            }
        }
    }

    audioFilePath1=[[NSBundle mainBundle]pathForResource:@"Video" ofType:@"mp3"];

    NSLog(@"%@",audioFilePath1);
    NSLog(@"Video File Path=%@",ImageVideoPath);

    [self addAudioToFileAtPath:ImageVideoPath andAudioPath:audioFilePath1];
}


- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image size:(CGSize) size{

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, 
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

2 个答案:

答案 0 :(得分:0)

使用此代码:

AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio 
                                                                             preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) 
                                ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] 
                                 atTime:kCMTimeZero error:nil];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo 
                                                                                preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
                           ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] 
                            atTime:kCMTimeZero error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition 
                                                                presetName:AVAssetExportPresetPassthrough];   

NSString* videoName = @"export.mov";

NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL    *exportUrl = [NSURL fileURLWithPath:exportPath]; //url of your video created from image

if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) 
{
   [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}

_assetExport.outputFileType = @"com.apple.quicktime-movie";
DLog(@"file type %@",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {      
        // your completion code here
 }       
 }
 ];
希望有用。

答案 1 :(得分:0)

代码就在这里......

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; // CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio

here the code.

preferredTrackID:kCMPersistentTrackID_Invalid]; [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];

__block AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:[videoComposition copy] presetName:AVAssetExportPresetMediumQuality];

_assetExport.videoComposition = videoComposition; _assetExport.outputFileType = AVFileTypeQuickTimeMovie; _assetExport.outputURL = outputFileUrl; [_assetExport exportAsynchronouslyWithCompletionHandler:^{ switch (_assetExport.status) { case AVAssetExportSessionStatusCompleted: // Custom method to import the Exported Video NSLog(@"completed video creation!!!"); [self removeAudioFileFromDocumentsdirectory:outputFilePath1]; [self removeAudioFileFromDocumentsdirectory:videoOutputPath]; break; case AVAssetExportSessionStatusFailed: // NSLog(@"Failed video creation:%@",_assetExport.error); break; case AVAssetExportSessionStatusCancelled: // NSLog(@"Canceled:%@",_assetExport.error); break; default: break; } }];
_assetExport.videoComposition = videoComposition; _assetExport.outputFileType = AVFileTypeQuickTimeMovie; _assetExport.outputURL = outputFileUrl; [_assetExport exportAsynchronouslyWithCompletionHandler:^{ switch (_assetExport.status) { case AVAssetExportSessionStatusCompleted: // Custom method to import the Exported Video NSLog(@"completed video creation!!!"); [self removeAudioFileFromDocumentsdirectory:outputFilePath1]; [self removeAudioFileFromDocumentsdirectory:videoOutputPath]; break; case AVAssetExportSessionStatusFailed: // NSLog(@"Failed video creation:%@",_assetExport.error); break; case AVAssetExportSessionStatusCancelled: // NSLog(@"Canceled:%@",_assetExport.error); break; default: break; } }];