正确合并视频但合并视频是静默的

时间:2015-09-28 08:48:01

标签: objective-c

我正在制作合并视频,     这是我的代码正确合并视频,但合并的视频是不可听见的(我正在合并有声音的合适视频,但在合并视频后,合并的视频是静音的)任何人都可以帮助我:

NSValue *timeDur;
    NSMutableArray *arrInstructions = [[NSMutableArray alloc]init] ;
    NSMutableArray *arrDuration = [[NSMutableArray alloc]init] ;
    AVAsset *fAsset ;
    CMTime eachDuration = kCMTimeZero ;
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

for (NSInteger counter = 0; counter < self.arrVideoUrls.count; counter++) {
    AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    if (counter>0)
        fAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter-1][@"VideoUrl"]]];
    AVAsset *firstAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter][@"VideoUrl"]]];
    [track insertTimeRange:CMTimeRangeMake(kCMTimeZero,firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:(counter == 0)?kCMTimeZero:eachDuration error:nil];
    eachDuration = CMTimeAdd(eachDuration, firstAsset.duration);
    timeDur = [NSValue valueWithCMTime:firstAsset.duration];
    [arrDuration addObject:timeDur];
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
    AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
    BOOL  isFirstAssetPortrait_  = NO;
    CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
    if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)  {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
    if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)  {FirstAssetOrientation_ =  UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
    if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)   {FirstAssetOrientation_ =  UIImageOrientationUp;}
    if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
    CGFloat FirstAssetScaleToFitRatio = 640.0/640.0;

    //CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
    if (counter == 0) {
        if(isFirstAssetPortrait_){
            FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
        }else{
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero];
        }

    }else{
        if(isFirstAssetPortrait_){
            FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:firstAsset.duration];
        }else{
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration];
        }
    }
    if (counter <self.arrVideoUrls.count - 1 ) {
        [FirstlayerInstruction setOpacity:0.0 atTime:eachDuration];
    }

    [arrInstructions addObject:FirstlayerInstruction];
}
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
CMTime final;
for (NSInteger counter = 0; counter < arrDuration.count; counter++) {
    NSValue *value = arrDuration[counter];
    CMTime timing  = kCMTimeZero ;
    [value getValue:&timing];
    if (counter == 0) {
        final = timing ;
    }else
        final = CMTimeAdd(final, timing);
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,final);
MainInstruction.layerInstructions = [arrInstructions copy] ;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 10);
NSLog(@"%f",CMTimeGetSeconds(MainCompositionInst.frameDuration));
MainCompositionInst.renderSize = CGSizeMake(SYSTEM_SCREEN_SIZE.width, SYSTEM_SCREEN_SIZE.height);

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];

NSURL *url = [NSURL fileURLWithPath:myPathDocs];
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1920x1080];
//AVAssetExportPreset960x540
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
//exporter.shouldOptimizeForNetworkUse = YES;
//exporter.audioMix = audioZeroMix ;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     dispatch_async(dispatch_get_main_queue(), ^{
         NSLog(@"%@",exporter.error);
         Hide_Indicator ;
         [self exportDidFinish:exporter];
     });
 }];

1 个答案:

答案 0 :(得分:1)

实时编辑音频与直播编辑视频完全一样。回到每部电影并获取音轨并将其粘贴到可变组合中。

在这个例子中,我抓住视频的前五秒和电影中最后五秒的视频,并将它们一个接一个地放在一个新视频中:

 NSString* type = AVMediaTypeVideo;
 NSArray* arr = [oldAsset tracksWithMediaType:type];
 AVAssetTrack* track = [arr lastObject];
 CMTime duration = track.timeRange.duration;
 AVMutableComposition* comp = [AVMutableComposition composition];
 AVMutableCompositionTrack* comptrack = [comp addMutableTrackWithMediaType:type preferredTrackID:kCMPersistentTrackID_Invalid];
 [comptrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0,600), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(0,600) error:nil];
 [comptrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(duration, CMTimeMakeWithSeconds(5,600)), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(5,600) error:nil];

但由此产生的视频将保持沉默。所以我也回去取相应的音频:

type = AVMediaTypeAudio;
arr = [oldAsset tracksWithMediaType:type];
track = [arr lastObject];
comptrack = [comp addMutableTrackWithMediaType:type   preferredTrackID:kCMPersistentTrackID_Invalid];
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0,600), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(0,600) error:nil];
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(duration, CMTimeMakeWithSeconds(5,600)), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(5,600) error:nil];