在AVFoundation中叠加视频

时间:2016-05-09 03:20:43

标签: objective-c avfoundation overlay calayer video-processing

AVFoundation未向我的视频添加叠加层。我不确定我做错了什么。我已经尝试使叠加层完全变白,但它没有放在视频上。当视频播放时,它必须播放AVMutableComposition Track而不是我添加的exporter.videoComposition。我对AVFoundation的经验不足以了解出现了什么问题。

AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];

// 3 - Video track
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
 //    [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,self.videoAsset.duration)
//                        ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
//                         atTime:kCMTimeZero error:nil];
    CMTime insertTime = kCMTimeZero;
    for(AVURLAsset *videoAsset in self.videoArray){
        [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:insertTime error:nil];

        // Updating the insertTime for the next insert
        insertTime = CMTimeAdd(insertTime, videoAsset.duration);
    }

// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = videoTrack.timeRange;

// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoTrack.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_  = UIImageOrientationUp;
BOOL isVideoAssetPortrait_  = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
    videoAssetOrientation_ = UIImageOrientationRight;
    isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
    videoAssetOrientation_ =  UIImageOrientationLeft;
    isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
    videoAssetOrientation_ =  UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
    videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoTrack.timeRange.duration];

// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];

AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];

CGSize naturalSize;
if(isVideoAssetPortrait_){
    naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
    naturalSize = videoAssetTrack.naturalSize;
}

float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);

   // [self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
    // 1 - set up the overlay
    CALayer *overlayLayer = [CALayer layer];
    UIImage *overlayImage = nil;
    //overlayLayer.backgroundColor = [UIColor whiteColor].CGColor;
    overlayImage = [UIImage imageNamed:@"overlayImage.png"];


    [overlayLayer setContents:(id)[overlayImage CGImage]];
    overlayLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
    [overlayLayer setMasksToBounds:YES];

    // 2 - set up the parent layer
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:overlayLayer];

    // 3 - apply magic
    mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool
                                 videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];


// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:
                         [NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];

// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                  presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
    dispatch_async(dispatch_get_main_queue(), ^{
        //[self exportDidFinish:exporter];
        //do stuff

    });
}];

1 个答案:

答案 0 :(得分:8)

我不是Objective-C的专家。我目前正在做一个iOS项目(在Swift 3中),它需要将文本覆盖到具有特定时间范围的视频上。下面的代码完全适用于我的项目。你可以尝试一下。您的视频保存在文档目录中。因此,您必须在导出时删除临时视频文件。在代码下方,我将视频导出到我的照片库中。因此,您需要导入照片框架。

    let composition = AVMutableComposition()
    let asset = AVURLAsset(url: urlVideo!, options: nil)

    let track =  asset.tracks(withMediaType: AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = track[0] as AVAssetTrack
    let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)

    let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

    do {
        try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
        compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
    } catch {
        print(error)
    }

    let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())

    for audioTrack in asset.tracks(withMediaType: AVMediaTypeAudio) {
        do {
            try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
        } catch {
            print(error)
        }

    }

    let size = videoTrack.naturalSize

    //print("Size of vieo: \(size)")

    let textLayer = CATextLayer()
    textLayer.string = "Text Overlay Test"
    textLayer.fontSize = 35
    textLayer.font = "Baskerville-BoldItalic" as CFTypeRef
    textLayer.foregroundColor = UIColor.red.cgColor

    textLayer.opacity = 0
    textLayer.alignmentMode = kCAAlignmentCenter


    textLayer.frame = CGRect(x: 0, y: 100, width: size.width, height: size.height / 6)





    // making a time range to show text
    let fadeInAnimation = CABasicAnimation(keyPath: "opacity")
    fadeInAnimation.duration = 10
    fadeInAnimation.fromValue = Int(1.0)
    fadeInAnimation.toValue = Int(1.0)
    fadeInAnimation.beginTime = 3
    fadeInAnimation.isRemovedOnCompletion = false

    textLayer.add(fadeInAnimation, forKey: "opacity")


    // video layer making

    let videolayer = CALayer()
    videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(textLayer)

    // adding the magic
    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(1, 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = [layerinstruction]
    layercomposition.instructions = [instruction]

    //  create new file to receive data
    let mergedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/mergedVideo.mp4")

    // must delete existing temporary file from file directory, also use try catch
    do {
        try FileManager.default.removeItem(at: mergedVideoURL as URL)
        print("Tyr Excepted")
    } catch _ as NSError {
        print("Error")
    }



    // use AVAssetExportSession to export video
    guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetMediumQuality) else {return}
    assetExport.videoComposition = layercomposition
    assetExport.outputFileType = AVFileTypeMPEG4
    assetExport.outputURL = mergedVideoURL as URL
    assetExport.exportAsynchronously(completionHandler: {
        switch assetExport.status{
        case  AVAssetExportSessionStatus.failed:
            print("failed")
        case AVAssetExportSessionStatus.cancelled:
            print("cancelled")
        default:
            print("Exported")
            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: mergedVideoURL as URL)
            }) { saved, error in
                if saved {
                    let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                    let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                    alertController.addAction(defaultAction)
                    self.present(alertController, animated: true, completion: nil)
                }
            }


        }
    })