水印图像大小会根据iOS中的视频大小而变化

时间:2017-01-18 08:34:21

标签: ios objective-c swift avfoundation

我使用以下代码使用CALayer将图像添加到视频中。但我面临的问题是水印的大小及其位置的变化,因为我选择了不同的分辨率视频。我正在制作像Snapchat这样的应用程序,用户将通过调整其位置和大小来选择贴纸并将其添加到视频上。谁能告诉我,我在这里做了什么错?或者我现在该怎么办才能做到这一点。提前谢谢!

- (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL
{
    if (videoURL == nil)
        return;
    AppDelegate* appDelegate = [[UIApplication sharedApplication] delegate];
     dispatch_async(dispatch_get_main_queue(), ^{

    [appDelegate showLoadingView: YES];
     });

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo  preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                   ofTrack:clipVideoTrack
                                    atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

    //  create the layer with the watermark image
    CALayer* aLayer = [CALayer layer];
    aLayer.contents = (id)image.CGImage;
    aLayer.frame = CGRectMake(0, 100, 500, 200);
    aLayer.opacity = 0.9;

    //sorts the layer in proper order

    AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    CGSize videoSize = [videoTrack naturalSize];
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:aLayer];

    printf("Video Size %f %f",videoSize.width,videoSize.height);
    // create text Layer
    CATextLayer* titleLayer = [CATextLayer layer];
    titleLayer.backgroundColor = [UIColor clearColor].CGColor;
    titleLayer.string = @"Dummy text";
    titleLayer.foregroundColor = (__bridge CGColorRef _Nullable)([UIColor redColor]);
    titleLayer.font = CFBridgingRetain(@"Helvetica");
    titleLayer.fontSize = 28;
    titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentCenter;
    titleLayer.frame = CGRectMake(videoSize.width/2.0, videoSize.height / 2.0, videoSize.width,100 );
    [parentLayer addSublayer:titleLayer];

    //create the composition and add the instructions to insert the layer:

    AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
    videoComp.renderSize = videoSize;
    videoComp.frameDuration = CMTimeMake(1, 30);
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    /// instruction
    AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
    AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    videoComp.instructions = [NSArray arrayWithObject: instruction];

    // export video

    _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    _assetExport.videoComposition = videoComp;

    NSLog (@"created exporter. supportedFileTypes: %@", _assetExport.supportedFileTypes);

    NSString* videoName = @"NewWatermarkedVideo.mov";

    NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
    NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
        [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];

    _assetExport.outputFileType = AVFileTypeQuickTimeMovie;
    _assetExport.outputURL = exportUrl;
    _assetExport.shouldOptimizeForNetworkUse = YES;

    [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {

         [appDelegate showLoadingView:NO];

         //Final code here

         switch (_assetExport.status)
         {
             case AVAssetExportSessionStatusUnknown:
                 NSLog(@"Unknown");
                 break;
            case AVAssetExportSessionStatusWaiting:
                 NSLog(@"Waiting");
                 break;
             case AVAssetExportSessionStatusExporting:
                 NSLog(@"Exporting");
                 break;
             case AVAssetExportSessionStatusCompleted:
                 NSLog(@"Created new water mark image");
                 _playButton.hidden = NO;
                 break;
             case AVAssetExportSessionStatusFailed:
                 NSLog(@"Failed- %@", _assetExport.error);
                 break;
             case AVAssetExportSessionStatusCancelled:
                 NSLog(@"Cancelled");
                 break;
            }
     }
     ];   
}

1 个答案:

答案 0 :(得分:0)

最后我得到了这个问题的解决方案我在这里发布代码, 我使用了objective-c桥接头来实现这一点。

//This method adds watermark image to video
+(void)createWatermarkForVideo:(NSURL*)videoURL watermark:(UIImage*)watermarkImage stickerContainerView:(UIView*)containerView completionAction:(VideoMergeCompletionBlock)completion{


    AVURLAsset *videoAsset =  [[AVURLAsset alloc]initWithURL:videoURL options:nil];

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo  preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];

    AVAssetTrack *aAudioAssetTrack= nil;
    @try{
        if([[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0){
        aAudioAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeAudio][0];
        }
    }
    @catch(NSError *error){

    }
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);

    @try{

        [compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];


            @try{
                if(aAudioAssetTrack!=nil)
                    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
                else
                    [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
            }
            @catch(NSError *error){

            }

    }
    @catch(NSError *error){

    }
   AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
    //add video composition as per video size
    if([self isVideoPortait:aVideoAssetTrack]){
        videoComp = [self addWatermarkForPortaitVideoAsset:videoAsset WatermakImage:watermarkImage Composition:mixComposition stickerContainerView:containerView];
    }
    else{
        videoComp = [self addWatermarkToLandscapeVideoForAssetTrack:aVideoAssetTrack WatermarkImage:watermarkImage ContainerView:containerView Composition:mixComposition];
    }


    //Exporting File
    NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"WaterMarkedMovie" stringByAppendingPathExtension:@"mp4"]];
    NSURL *finalVideoFileURL = [NSURL fileURLWithPath:fullMoviePath];


    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    [exportSession setOutputFileType:AVFileTypeMPEG4];
    [exportSession setOutputURL:finalVideoFileURL];
    [exportSession setVideoComposition:videoComp];

    NSError *error;
    [[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error];
    if (error){
        NSLog(@"%@", error);
    }
    [exportSession exportAsynchronouslyWithCompletionHandler:
     ^(void ) {

         dispatch_async(dispatch_get_main_queue(), ^{
             completion(exportSession);

         });
     }
     ];

}

您还需要以下方法来识别视频类型(纵向或横向),并相应地合并视频。

+(AVMutableVideoComposition*)addWatermarkForPortaitVideoAsset:(AVURLAsset*)videoAsset WatermakImage:(UIImage*)watermarkImage Composition:(AVMutableComposition*)mixComposition stickerContainerView:(UIView*)containerView{
    //WaterMark
    AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
    AVAssetTrack *assetVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] ;


    CALayer *waterMarkLayer = [CALayer layer];
    CGSize videoSize = [assetVideoTrack naturalSize];

    UIImage *newwatermarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)];
    //[waterMarkLayer setContents:(id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage];
    [waterMarkLayer setContents:(id)newwatermarkImage.CGImage];

    waterMarkLayer.frame = CGRectMake(0, 0, videoSize.height, videoSize.width);
    [waterMarkLayer setOpacity:1] ;


    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    [parentLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)];
    [videoLayer setFrame:CGRectMake(0, 0, videoSize.height, videoSize.width)];
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:waterMarkLayer];

    //Instruction
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    [instruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [mixComposition duration])];
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

    CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoSize.height, 0);
    CGAffineTransform t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
    [layerInstruction setTransform:t2 atTime:kCMTimeZero];


    [instruction setLayerInstructions:[NSArray arrayWithObject:layerInstruction]];


    [videoComp setRenderSize:CGSizeMake(videoSize.height, videoSize.width)];
    [videoComp setFrameDuration:CMTimeMake(1, 30)];
    [videoComp setAnimationTool:[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]];
    [videoComp setInstructions:[NSArray arrayWithObject:instruction]];
    return videoComp;
}



+(AVMutableVideoComposition*)addWatermarkToLandscapeVideoForAssetTrack:(AVAssetTrack*)aVideoAssetTrack WatermarkImage:(UIImage*)watermarkImage ContainerView:(UIView*)containerView Composition:(AVMutableComposition*)mixComposition{
    //adding image layer
     AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition] ;
    CGSize videoSize = [aVideoAssetTrack naturalSize];
    CALayer *aLayer = [CALayer layer];
    //aLayer.contents = (id)[AVUtils scaleImage:watermarkImage toSize:[AVUtils getResizedVideoFrame:CGRectMake(0, 0, videoSize.width, videoSize.height) andPlayerViewSize:containerView.frame]].CGImage;
    UIImage *newWaterMarkImage = [AVUtils scaleImage:watermarkImage toSize:CGRectMake(0, 0, videoSize.width, videoSize.height)];
    aLayer.contents = (id)newWaterMarkImage.CGImage;
    //CGFloat videoScale = videoSize.width/containerView.frame.size.width;
    aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

    aLayer.opacity = 1; //Feel free to alter the alpha here



    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:aLayer];

    videoComp.renderSize = videoSize;
    videoComp.frameDuration = CMTimeMake(1, 30);
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];


    /// instruction
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    videoComp.instructions = [NSArray arrayWithObject: instruction];

    return videoComp;
}




CGFloat degreesToRadians (CGFloat deg) {
    return deg * (M_PI / 180.0f);
}



+(BOOL)isVideoPortait:(AVAssetTrack*)videoAssetTrack{
    UIImageOrientation videoAssetOrientation_  = UIImageOrientationUp;
    BOOL isVideoAssetPortrait_  = NO;
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
    if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
        videoAssetOrientation_ = UIImageOrientationRight;
        isVideoAssetPortrait_ = YES;
    }
    if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
        videoAssetOrientation_ =  UIImageOrientationLeft;
        isVideoAssetPortrait_ = YES;
    }
    if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
        videoAssetOrientation_ =  UIImageOrientationUp;
    }
    if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
        videoAssetOrientation_ = UIImageOrientationDown;
    }
    return isVideoAssetPortrait_;
}


+(CGRect)getResizedVideoFrame:(CGRect)naturalSize andPlayerViewSize:(CGRect)playerSize{
    CGFloat resVi = naturalSize.size.width / naturalSize.size.height;
    CGFloat resPl = playerSize.size.width/playerSize.size.height;


    return (resPl > resVi ? CGRectMake(0, 0, naturalSize.size.width*playerSize.size.height/naturalSize.size.height, playerSize.size.height) : CGRectMake(0, 0, playerSize.size.width, naturalSize.size.height*playerSize.size.width/naturalSize.size.width));
}

/*class func getResizedVideoFrame(_ naturalSize: CGRect, andPlayerViewSize playerSize: CGRect) -> CGRect {
    let resVi: CGFloat = CGFloat(naturalSize.size.width)/CGFloat(naturalSize.size.height)
    let resPl: CGFloat = CGFloat(playerSize.size.width)/CGFloat(playerSize.size.height)
    return (resPl > resVi ? CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(naturalSize.size.width * playerSize.size.height / naturalSize.size.height), height: CGFloat(playerSize.size.height)) : CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(playerSize.size.width), height: CGFloat(naturalSize.size.height * playerSize.size.width / naturalSize.size.width)))
}*/




+(UIImage *)scaleImage:(UIImage *)originalImage toSize:(CGRect)size
{
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(NULL, size.size.width, size.size.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
    CGContextClearRect(context, CGRectMake(0, 0, size.size.width, size.size.height));

    if (originalImage.imageOrientation == UIImageOrientationRight) {
        CGContextRotateCTM(context, -M_PI_2);
        CGContextTranslateCTM(context, -size.size.height, 0.0f);
        CGContextDrawImage(context, CGRectMake(0, 0, size.size.height, size.size.width), originalImage.CGImage);
    } else {
        CGContextDrawImage(context, CGRectMake(0, 0, size.size.width, size.size.height), originalImage.CGImage);
    }

    CGImageRef scaledImage = CGBitmapContextCreateImage(context);
    CGColorSpaceRelease(colorSpace);
    CGContextRelease(context);

    UIImage *image = [UIImage imageWithCGImage:scaledImage];
    CGImageRelease(scaledImage);

    return image;
}


+(void)convertVideoToMP4:(NSURL*)videoURL completionAction:(VideoConvertCompletionBlock)completion {

    AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];


    NSString *fullMoviePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"temp" stringByAppendingPathExtension:@"mp4"]];
    NSError *error;
    [[NSFileManager defaultManager] removeItemAtPath:fullMoviePath error:&error];
    if (error){
        NSLog(@"%@", error);
    }
    exportSession.outputURL = [NSURL fileURLWithPath:fullMoviePath];
    //set the output file format if you want to make it in other file format (ex .3gp)
    exportSession.outputFileType = AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = NO;


    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        switch ([exportSession status])
        {
            case AVAssetExportSessionStatusFailed:{
                NSLog(@"Export session failed");
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(nil, false);
                });
            }
                break;
            case AVAssetExportSessionStatusCancelled:{
                NSLog(@"Export canceled");
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(nil, false);
                });
            }
                break;
            case AVAssetExportSessionStatusCompleted:
            {
                //Video conversion finished
                NSLog(@"Successful!");
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(exportSession.outputURL, true);
                });

            }
                break;
            default:
                break;
        }
    }];
}

<强>用法 我从Swift代码中调用了这个方法,如下所示

AVUtils.createWatermark(forVideo: videoURL, watermark: LIMITUtils.getScreenShot(containerView: self.stickerContainer), stickerContainerView: self.stickerContainer) { (assetExport) in
    let exportSession : AVAssetExportSession = assetExport! as AVAssetExportSession
    switch(exportSession.status){
    case .completed :
        DispatchQueue.main.async {
            self.saveVideo(url : exportSession.outputURL!)

        }
        break
    case .failed :
        //Error occurred while merge
        break

    case .exporting : break
    default :

        break
    }
    DispatchQueue.main.async {
        ProgressUtils.hideHUD()
    }

}