如何在ios中混合音频和视频?

时间:2014-05-12 10:48:40

标签: ios movie avasset audio-video-sync

我试图将一个音频文件与视频文件混合,但我收到了一个错误。 “导出失败:操作无法完成

如果出现错误,请更正此代码。

我的代码:

-(void)CompileFilesToMakeMovie
     {
     AVMutableComposition* mixComposition = [AVMutableComposition composition];
     NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:@"bgsong.mp3"];
NSString* audio_inputFilePath =str;
NSURL*    audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

NSString* video_inputFileName = @"movie.mp4";

NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
                                                     NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];

NSURL*    video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];

NSString* outputFileName = @"outputFile.mp4";
NSString* outputFilePath = [documentsDirectory stringByAppendingPathComponent:outputFileName];
NSURL*    outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
    [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];



CMTime nextClipStartTime = kCMTimeZero;

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);

AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];



AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;

[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {







     BOOL _success = false;



     switch ([_assetExport status]) {
         case AVAssetExportSessionStatusCompleted:
             _success = true;
             NSLog(@"Export Completed");
             break;
         case AVAssetExportSessionStatusWaiting:
             NSLog(@"Export Waiting");
             break;
         case AVAssetExportSessionStatusExporting:
             NSLog(@"Export Exporting");
             break;
         case AVAssetExportSessionStatusFailed:
         {
             NSError *error = [_assetExport error];
             NSLog(@"Export failed: %@", [error localizedDescription]);

             break;
         }
         case AVAssetExportSessionStatusCancelled:
             NSLog(@"Export canceled");

             break;
         default:
             break;
     }

     if (_success == true) {

         ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
         [assetLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileUrl completionBlock:^(NSURL *assetURL, NSError *error){
             NSError *removeError = nil;
             [[NSFileManager defaultManager] removeItemAtURL:outputFileUrl error:&removeError];
         }];

     }


 }
 ];
 }

先谢谢

2 个答案:

答案 0 :(得分:0)

请试试这个......

AVMutableComposition* mixComposition = [AVMutableComposition composition];



    //audio File

    NSMutableArray *loTempArr = [[[Database sharedDBDetails] getAllUserDetails:kaudioTable] mutableCopy];

    TempFile *lotemp1 = [[TempFile alloc] init];

    TempFile *loTemp2 = [[TempFile alloc] init];

    loTemp2 = [mallVideoArray objectAtIndex:self.slectedVideoIndex];

    for (int i = 0; i < [loTempArr count]; i++)

    {

        lotemp1 = [loTempArr objectAtIndex:i];

        if (loTemp2.mTemp_Key == [lotemp1.mTemp_videorefID intValue])

        {

            //NSLog(@"%@",lotemp1.mTemp_AudioName);

            NSString *filepath = [kDocument_Path stringByAppendingString:[NSString stringWithFormat:@"/audioFolder/%@",lotemp1.mTemp_AudioName]];

            NSURL    *SongURL    =   [NSURL fileURLWithPath:filepath];

            self.audioAsset = [[AVURLAsset alloc] initWithURL:SongURL options:nil];



            CMTime time2=CMTimeMake([lotemp1.mTemp_timeinvideo doubleValue]*600, 600);

            AVMutableCompositionTrack *compositionCommentaryTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

            [compositionCommentaryTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeSubtract(self.videoAsset.duration, time2))



                                                 ofTrack:[[self.audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:time2 error:nil];

        }

    }



    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)



                                   ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];



    // 3.1 - Create AVMutableVideoCompositionInstruction



    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);



    // 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.



    AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

    AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];



     CGAffineTransform transform = CGAffineTransformIdentity;//

     transform = videoAssetTrack.preferredTransform;

     [videolayerInstruction setTransform:transform atTime:kCMTimeZero];

     [videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];



    mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];



    // 3.3 - Add instructions

    AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];

    float renderWidth, renderHeight;

    renderWidth = self.movieController.view.frame.size.width;

    renderHeight = self.movieController.view.frame.size.height;

    CGSize size;

    if(flipActionFlag == 4 || flipActionFlag == 5)

    {

        size = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);

    }

    else

    {

        size = videoAssetTrack.naturalSize;

    }



    //NSLog(@"%@",NSStringFromCGSize(size));



    mainCompositionInst.renderSize = size;//size//CGSizeMake(renderWidth, renderHeight)

    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];

    mainCompositionInst.frameDuration = CMTimeMake(1, 30);

    // 4 - Get path

    TempFile *mnewtemp = [[TempFile alloc] init];

    mnewtemp.mTemp_videoName = [NSString stringWithFormat:@"Video_%d.m4v",loTemp.mTemp_Key+1];

    [[Database sharedDBDetails] insertNewRowWithData:mnewtemp forTable:kvideoTable];

    NSString *myPathDocs =  [kDocument_Path stringByAppendingPathComponent:

                             [NSString stringWithFormat:@"Video/Video_%d.m4v",loTemp.mTemp_Key+1]];

    NSURL *url = [NSURL fileURLWithPath:myPathDocs];



    // 5 - Create exporter

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition

                                                                      presetName:AVAssetExportPresetHighestQuality];

    exporter.outputURL=url;

    exporter.outputFileType = AVFileTypeQuickTimeMovie;//@"com.apple.quicktime-movie";AVFileTypeQuickTimeMovie

    exporter.shouldOptimizeForNetworkUse = YES;

    exporter.videoComposition = mainCompositionInst;



    [exporter exportAsynchronouslyWithCompletionHandler:^

     {

         int exportStatus = exporter.status;

         NSLog(@"exportStatus = %d",exportStatus);

         switch (exportStatus)

         {

             case AVAssetExportSessionStatusFailed: {NSError *exportError = exporter.error;NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError);break;}

             case AVAssetExportSessionStatusCompleted: { NSLog (@"AVAssetExportSessionStatusCompleted--"); break;}

             case AVAssetExportSessionStatusUnknown: { NSLog (@"AVAssetExportSessionStatusUnknown"); break;}

             case AVAssetExportSessionStatusExporting: { NSLog (@"AVAssetExportSessionStatusExporting"); break;}

             case AVAssetExportSessionStatusCancelled: { NSLog (@"AVAssetExportSessionStatusCancelled"); break;}

             case AVAssetExportSessionStatusWaiting:{NSLog (@"AVAssetExportSessionStatusWaiting"); break;}

             default: { NSLog (@"didn't get export status"); break;}

         }

         dispatch_async(dispatch_get_main_queue(), ^

                        {

                            [self exportDidFinish:exporter];

                        });

     }];

  }}

  - (void)exportDidFinish:(AVAssetExportSession*)session

  {

   [losaveView removeFromSuperview];

   if (session.status == AVAssetExportSessionStatusCompleted)

   {

    NSURL *outputURL = session.outputURL;

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

        [library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){

            dispatch_async(dispatch_get_main_queue(), ^{

                if (error)

                {

                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"

                                                                   delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];

                    [alert show];

                }

                else

                {

                    self.mallVideoArray = [[[Database sharedDBDetails] getAllUserDetails:kvideoTable] mutableCopy];

                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"

                                                                   delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];

                    [alert show];

                }

            });

         }];

      }

    }

 }

答案 1 :(得分:0)

我的问题得到了解决方案。

    - (IBAction)MergeAndSave:(id)sender
      {

      NSString *str=[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:@"bgsong.mp3"];
    NSString* audio_inputFilePath =str;


    NSURL *SongURL =[NSURL fileURLWithPath:audio_inputFilePath];
    audioAsset = [AVAsset assetWithURL:SongURL];


    NSString* video_inputFileName = @"movie.mp4";

    NSArray *paths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,
                                                         NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *video_inputFilePath = [documentsDirectory stringByAppendingPathComponent:video_inputFileName];
    NSURL*    video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
    firstAsset = [AVAsset assetWithURL:video_inputFileUrl];
    secondAsset = [AVAsset assetWithURL:video_inputFileUrl];

    if(firstAsset !=nil && secondAsset!=nil){

        //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
        AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

        //VIDEO TRACK
        AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];

        AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];

        //AUDIO TRACK
        if(audioAsset!=nil){
            AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
        }

        AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));

        //FIXING ORIENTATION//

        AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];

        [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];

              MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;

        AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
        MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
        MainCompositionInst.frameDuration = CMTimeMake(1, 30);
        MainCompositionInst.renderSize = CGSizeMake(self.view.frame.size.width, self.view.frame.size.height);

        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo.mov"]];

        NSURL *url = [NSURL fileURLWithPath:myPathDocs];

        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
        exporter.outputURL=url;
        exporter.outputFileType = AVFileTypeQuickTimeMovie;
        exporter.videoComposition = MainCompositionInst;
        exporter.shouldOptimizeForNetworkUse = YES;
        [exporter exportAsynchronouslyWithCompletionHandler:^
         {
             dispatch_async(dispatch_get_main_queue(), ^{

                 [self playVideo];
                //[self performSelector:@selector(playVideo) withObject:nil afterDelay:2.0];
                 // [self exportDidFinish:exporter];
             });
         }];
    }


        }
          - (void)exportDidFinish:(AVAssetExportSession*)session
             {
          if(session.status == AVAssetExportSessionStatusCompleted){
         NSURL *outputURL = session.outputURL;
         ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
         if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
            [library writeVideoAtPathToSavedPhotosAlbum:outputURL
                                         completionBlock:^(NSURL *assetURL, NSError *error){
                                            dispatch_async(dispatch_get_main_queue(), ^{
                                                if (error) {
                                                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"  delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles: nil, nil];
                                                    [alert show];
                                                }else{
                                                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"  delegate:self cancelButtonTitle:@"Ok" otherButtonTitles: nil];
                                                    [alert show];
                                                }

                                            });

                                        }];
        }
    }

    audioAsset = nil;
    firstAsset = nil;
    secondAsset = nil;

}