iOS:如何通过仅在视频中心占据特定大小来裁剪视频?

时间:2018-12-21 10:22:33

标签: ios swift avmutablecomposition avvideocomposition

我有一个视频,我想在视频的中央设置一个特定的尺寸。例如,我的视频是1920x960,例如:1920x960     我想从视频中心取1420x560 像这样:1420x560

我尝试了很多事情,但是找不到好的方法。当我触摸videoComposition.renderSize的值时,我尝试在视频中间取一个正方形(为方便起见,以示例为准),但它不在中心...

    - (CGFloat)getComplimentSize:(CGFloat)size {
        CGRect screenRect = [[UIScreen mainScreen] bounds];
        CGFloat ratio = screenRect.size.height / screenRect.size.width;

        // we have to adjust the ratio for 16:9 screens
        if (ratio == 1.775) ratio = 1.77777777777778;

        return size * ratio;
    }

    - (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset {
        UIInterfaceOrientation orientation = UIInterfaceOrientationPortrait;
        NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];

        if([tracks count] > 0) {
            AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
            CGAffineTransform t = videoTrack.preferredTransform;

            // Portrait
            if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) {
                orientation = UIInterfaceOrientationPortrait;
            }
            // PortraitUpsideDown
            if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
                orientation = UIInterfaceOrientationPortraitUpsideDown;
            }
            // LandscapeRight
            if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
                orientation = UIInterfaceOrientationLandscapeRight;
            }
            // LandscapeLeft
            if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
                orientation = UIInterfaceOrientationLandscapeLeft;
            }
        }
        return orientation;
    }

     // output file
        NSString* docFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
        NSString* outputPath = [docFolder stringByAppendingPathComponent:@"output2.mov"];
        if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
            [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];

        // input file
        AVMutableComposition *composition = [AVMutableComposition composition];
        [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

        // input clip
        NSString *filePath = [[self documentsDirectory] stringByAppendingPathComponent:@"testVideo.MP4"];
        AVAsset* asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]];
        AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

        // crop clip to screen ratio
        UIInterfaceOrientation orientation = [self orientationForTrack:asset];
        BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
        CGFloat complimentSize = [self getComplimentSize:videoTrack.naturalSize.height];
        CGSize videoSize;

        if(isPortrait) {
            videoSize = CGSizeMake(videoTrack.naturalSize.height, complimentSize);
        } else {
            videoSize = CGSizeMake(complimentSize, videoTrack.naturalSize.height);
        }

        // make render size square
        AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
        videoComposition.renderSize = CGSizeMake(1420,560);
        videoComposition.frameDuration = CMTimeMake(1, 30);

        AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );

        // rotate and position video
        AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

        CGFloat tx = (videoTrack.naturalSize.width-complimentSize)/2;
        if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationLandscapeRight) {
            // invert translation
            tx *= -1;
        }

        // t1: rotate and position video since it may have been cropped to screen ratio
        CGAffineTransform t1 = CGAffineTransformTranslate(videoTrack.preferredTransform, tx, 0);
        // t2/t3: mirror video horizontally
    //    CGAffineTransform t2 = CGAffineTransformTranslate(t1, isPortrait?0:videoTrack.naturalSize.width, isPortrait?videoTrack.naturalSize.height:0);
    //    CGAffineTransform t3 = CGAffineTransformScale(t2, isPortrait?1:-1, isPortrait?-1:1);

        [transformer setTransform:t1 atTime:kCMTimeZero];
        instruction.layerInstructions = [NSArray arrayWithObject: transformer];
        videoComposition.instructions = [NSArray arrayWithObject: instruction];

        // export
        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
        exporter.videoComposition = videoComposition;
        exporter.outputURL=[NSURL fileURLWithPath:outputPath];
        exporter.outputFileType=AVFileTypeQuickTimeMovie;

        [exporter exportAsynchronouslyWithCompletionHandler:^(void){
            NSLog(@"Exporting done!");
        }];

您有什么想法吗? 非常感谢。

0 个答案:

没有答案