在iOS中添加水印图像后,录制的视频会旋转

时间:2017-01-24 11:52:07

标签: ios image watermark

我使用波纹管方法将水印图像添​​加到视频中,但问题是当我录制视频并为其添加水印时,视频会旋转90度。

+(void)createWatermarkForVideo:(NSURL *)videoURL水印:(UIImage *)watermarkImage stickerContainerView:(UIView *)containerView completionAction:(VideoMergeCompletionBlock)completion {

AVURLAsset *audioAsset, *videoAsset;
//Create AVMutableComposition Object which will hold our multiple       AVMutableCompositionTrack or we can say it will hold our video and audio files.
AVMutableComposition* mixComposition = [AVMutableComposition composition];

//Now first load your audio file using AVURLAsset. Make sure you give the correct path of your videos.
audioAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];

//Now we are creating the first AVMutableCompositionTrack containing our audio and add it to our AVMutableComposition object.
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];



//Now we will load video file.

videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];



//Now we are creating the second AVMutableCompositionTrack containing our video and add it to our AVMutableComposition object.
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];



AVAssetTrack *aVideoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo][0];
AVAssetTrack *aAudioAssetTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration);

[a_compositionVideoTrack setPreferredTransform:aVideoAssetTrack.preferredTransform];
@try{
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:aVideoAssetTrack atTime:kCMTimeZero error:nil];

    if(CMTimeGetSeconds(audioAsset.duration) == CMTimeGetSeconds(videoAsset.duration)){
        @try{
            [b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
        }
        @catch(NSError *error){

        }
    }
    else{
        @try{
            [b_compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration) ofTrack:aAudioAssetTrack atTime:kCMTimeZero error:nil];
        }
        @catch(NSError *error){

        }
    }
}
@catch(NSError *error){

}

//  create the layer with the watermark image
CALayer* aLayer = [CALayer layer];
aLayer.contents = (id)watermarkImage.CGImage;


CGSize videoSize = [aVideoAssetTrack naturalSize];
CGFloat videoScale = videoSize.width/containerView.frame.size.width;
aLayer.frame = CGRectMake(0, 0, containerView.frame.size.width * videoScale, containerView.frame.size.height * videoScale);

aLayer.opacity = 0.9;

//sorts the layer in proper order

CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];

printf("Video Size %f %f",videoSize.width,videoSize.height);


//create the composition and add the instructions to insert the layer:

AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];



/// instruction
AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack];

instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];

AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && a_compositionVideoTrack) {
    [a_compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}

AVAssetExportSession *_assetExport;

// export video

_assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.videoComposition = videoComp;

NSLog (@"created exporter. supportedFileTypes: %@", _assetExport.supportedFileTypes);

NSString* videoName = @"NewWatermarkedVideo.mov";

NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL* exportUrl = [NSURL fileURLWithPath:exportPath];

if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
    [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];

_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;


[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {

     dispatch_async(dispatch_get_main_queue(), ^{
         completion(_assetExport);

     });
 }
 ];

}

0 个答案:

没有答案