CGAffine转换AVAsset以适应设备屏幕

时间:2016-10-03 10:26:00

标签: ios video rotation cgaffinetransform avasset

我有AVAsset AVAssetTrack,其大小(例如 - (width = 1920,height = 1080))。我需要的东西 - 将这个资产适合给定的屏幕尺寸(例如(宽度= 320,高度= 568)),如果是横向资产 - 旋转它(90度),如果是正方形 - 在顶部和底部添加黑色条纹。我试过这个:

- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
    if (handler)
        handler(nil, [NSError errorWithDomain:@"com.myapp.error" code:-1 userInfo:@{@"message" : @"there are no video tracks in asset"}]);
    return;
}


CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;

const CGFloat videoAspectRatio = newWidth / newHeight;

const CGFloat toSizeAspectRatio = toSize.width / toSize.height;

CGFloat scale = 1.f;
    if (videoAspectRatio > toSizeAspectRatio)
    {
        scale = toSize.height / newHeight;
    }
    else
    {
        scale = toSize.width /newWidth;
    }

CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);

CGAffineTransform translateTrans = CGAffineTransformIdentity;
    if (videoAspectRatio > toSizeAspectRatio)
    {
        if (offsetRatioPoint)
        {
            const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
            translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
        }
        else
        {
            const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
            translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
        }
    }
    else
    {
        if (offsetRatioPoint)
        {
            const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
            translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
        }
        else
        {
            const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
            translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
        }
    }
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);

// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);


CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);

AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;


int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = @[layerInstruction];
videoComposition.instructions = @[instruction];

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;

NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
    exportPath = toURL.path;

if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
    [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];

__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];

exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;

NSLog(@"%@", exportPath);

[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
    if (!toURL)
    {
        if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
            [[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];


        NSError *error;
        if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
            NSLog(@"Error %@", error);
        }
        outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
    }

    NSLog(@"%@", outURL);
    handler(outURL, nil);
}];

}

这几乎是第一步 - 旋转横向资产,但结果资产有点缩放。提前感谢所有建议。

1 个答案:

答案 0 :(得分:0)

我解决了这个prblm usrin这段代码。在此代码中我将横向旋转为视频肖像。使其正方形也添加图像作为水印。

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(){

            // input clip
            AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
            int videoDimention;
            // make it square
            UIInterfaceOrientation orientation = [self orientationForTrack:asset];
            BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;

            CGSize videoSize;
            NSUserDefaults *userDefault=[NSUserDefaults standardUserDefaults];
            if(isPortrait) {
                //videoSize = CGSizeMake(complimentSize*.7,clipVideoTrack.naturalSize.height );
                videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width*.7 );

                [userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
                [userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];


                videoDimention=0;// for Portrait
            } else {
                videoSize = CGSizeMake(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.height);
                videoDimention=1;// for Landscape
                [userDefault setDouble:videoSize.width forKey:VIDEO_WIDTH_OUTPUT];
                [userDefault setDouble:videoSize.height forKey:VIDEO_HEIGHT_OUTPUT];
            }
            AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
            if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
                videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height , clipVideoTrack.naturalSize.height);
                videoDimention=2; // for squre
                double height=clipVideoTrack.naturalSize.height;
                [userDefault setDouble:height forKey:VIDEO_WIDTH_OUTPUT];
                [userDefault setDouble:height forKey:VIDEO_HEIGHT_OUTPUT];
            }
            else{
                videoComposition.renderSize =videoSize;

            }
            // videoComposition.renderScale=.5;
            if([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==0){
                videoComposition.frameDuration = CMTimeMake(1, 15);
            }
            else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==1){
                videoComposition.frameDuration = CMTimeMake(1, 20);
            }
            else if ([[NSUserDefaults standardUserDefaults] integerForKey:FPS]==2){
                videoComposition.frameDuration = CMTimeMake(1, 25);
            }
            else{
                videoComposition.frameDuration = CMTimeMake(1, 30);
            }
            AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
            instruction.timeRange =  CMTimeRangeMake(kCMTimeZero, [asset duration] );;
            AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
            // rotate to portrait
            if([self orientationForTrack:asset]==UIInterfaceOrientationPortrait){

                CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
                CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);

                CGAffineTransform finalTransform = t2;
                [transformer setTransform:finalTransform atTime:kCMTimeZero];

            }
            //for water mark
            CGSize sizeOfVideo=[asset naturalSize];
            //Image of watermark
            UIImage *myImage=[UIImage imageNamed:@"watermark"];
            CALayer *layerCa = [CALayer layer];
            layerCa.contents = (id)myImage.CGImage;
            if([[NSUserDefaults standardUserDefaults] integerForKey:SIZE]==0){
                layerCa.frame = CGRectMake(videoSize.height-(videoSize.height/6), 0, videoSize.height/6, (videoSize.height/6)/4);
            }
            else{
                layerCa.frame = CGRectMake(videoSize.width-(videoSize.width/6), 0, videoSize.width/6, (videoSize.width/6)/4);
            }
            // layerCa.frame = CGRectMake(videoSize.width-200, 0, 200, 60);
            layerCa.opacity = 1.0;


            CALayer *parentLayer=[CALayer layer];
            CALayer *videoLayer=[CALayer layer];
            parentLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
            videoLayer.frame=CGRectMake(0, 0, videoSize.width, videoSize.height);
            [parentLayer addSublayer:videoLayer];
            [parentLayer addSublayer:layerCa];
            instruction.layerInstructions = [NSArray arrayWithObject:transformer];
            videoComposition.instructions = [NSArray arrayWithObject: instruction];
            if([[NSUserDefaults standardUserDefaults] boolForKey:UP_PID]==NO){
                videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
            }
            AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];


        //    AVMutableComposition *composition = [AVMutableComposition composition];
       //     [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
            // e.g .mov type
            exportSession.outputURL = fileURL;
            exportSession.videoComposition = videoComposition;
          //  [exportSession addObserver: forKeyPath:@"progress" options:NSKeyValueObservingOptionNew context:NULL];
            exportSession.outputFileType = AVFileTypeQuickTimeMovie;
            AVAssetExportSessionStatus status = [exportSession status];
            [exportSession exportAsynchronouslyWithCompletionHandler:^{
                dispatch_async(dispatch_get_main_queue(), ^{
                    VideoEditVC *controller=[[VideoEditVC alloc] init];
                    controller.isFirst=YES;
                    controller.videoSize=videoDimention;
                    [self.navigationController pushViewController:controller animated:YES];
                    self.delegate=controller;
                });

            }];
        });

你还需要实现这个:

- (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset
{
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    CGSize size = [videoTrack naturalSize];
    CGAffineTransform txf = [videoTrack preferredTransform];

    if (size.width == txf.tx && size.height == txf.ty)
        return UIInterfaceOrientationLandscapeRight;
    else if (txf.tx == 0 && txf.ty == 0)
        return UIInterfaceOrientationLandscapeLeft;
    else if (txf.tx == 0 && txf.ty == size.width)
        return UIInterfaceOrientationPortraitUpsideDown;
    else
        return UIInterfaceOrientationPortrait;
}