视频方向问题

时间:2018-07-28 04:29:28

标签: ios objective-c avfoundation composition avassetexportsession

我实际上正在使用AVFoundation框架。 我正在尝试同时播放两个视频,一个带有过滤器,另一个正常。

我已经完成了整个任务。我只是想在将整个视频保存在图库中时最后设置方向。 我已经尝试了很多,但没有任何成果。 请帮助我设置方向。

//Save action
@IBAction func saveAction(_ sender: UIButton) {
    HelperClass.shared().applyFilter(self.firstAsset, andSecondAsset:self.secondAsset, onviewController:self, andcompos:self.composition, completion: { (value,error,url) in
        if(url != nil){
            print("url",url!)
        }
    })

    MBProgressHUD.showAdded(to:self.view, animated: true)
}

#pragma mark - Overlay task is happening here,filtered video that I am getting from applyfilter method is overlayed with foregroundvideo
-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
    //----first track---//
    AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    //----second track---//
    AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);
    //----FirstlayerInstruction---//
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];

    CGAffineTransform Scale = CGAffineTransformMakeScale(0.8f,1.0f);
    CGAffineTransform Move = CGAffineTransformMakeTranslation(40,0);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
    //----FirstlayerInstruction---//
    AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
    //
    CGAffineTransform SecondScale = CGAffineTransformMakeScale(1.5f,1.5f);
    CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);;
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];

    MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;


    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
    MainCompositionInst.frameDuration = CMTimeMake(1, 50);
    MainCompositionInst.renderSize = CGSizeMake(firstTrack.naturalSize.width, firstTrack.naturalSize.height);
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"overlapVideotest.mov"];

    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
    {
        [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
    }

    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
    exporter.outputURL=url;
    [exporter setVideoComposition:MainCompositionInst];
    exporter.outputFileType = AVFileTypeQuickTimeMovie;

    [exporter exportAsynchronouslyWithCompletionHandler:^
     {
         dispatch_async(dispatch_get_main_queue(), ^{
             [self exportDidFinish:exporter onViewController:vc];
         });
     }];
}

#pragma mark - Saving the final video to photo gallery here
- (void)exportDidFinish:(AVAssetExportSession*)session onViewController:(UIViewController*)vc {
    if(session.status == AVAssetExportSessionStatusCompleted) {
        NSURL *outputURL = session.outputURL;
        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
        if([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
            [library writeVideoAtPathToSavedPhotosAlbum:outputURL
                                        completionBlock:^(NSURL *assetURL, NSError *error) {
                                            dispatch_async(dispatch_get_main_queue(), ^{
                                                [MBProgressHUD hideHUDForView:vc.view animated:true];
                                                if (error) {
                                                    UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error"  message:@"Video Saving Failed"  delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles: nil, nil];
                                                    [alert show];
                                                } else {
                                                    UIAlertView *alert = [[UIAlertView alloc]
                                                                          initWithTitle:@"Video Saved" message:@"Saved To Photo Album"      delegate:self cancelButtonTitle:@"Ok" otherButtonTitles: nil];
                                                    [alert show];
                                                    // [self loadMoviePlayer:outputURL];
                                                }
                                            });
                                        }];
        }
    }else{
        [MBProgressHUD hideHUDForView:vc.view animated:true];
        NSLog(@"found an issue %@",session.error);
    }

}

1 个答案:

答案 0 :(得分:0)

您将不得不处理输入视频轨道的转换。首先,您必须获取输入视频轨的方向,并根据其对AVMutableCompositionLayerInstruction进行相对转换。 这是正确的方法。我用自己的代码库模拟了saveVideo方法。另外,我将渲染尺寸更改为720 x 720,以使其在所有不同分辨率的视频中看起来都更好。

在.h文件中获取direction和zoomOrientation对象,以在.m文件中全局使用它。就像AVCaptureVideoOrientation zoomOrientation,AVCaptureVideoOrientation方向一样。

-(void)saveVideosToPhotoAlbum:(AVURLAsset*)filteredasset andOriginalAsset:(AVURLAsset*)origAsset onViewController:(UIViewController*)vc{
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
    //----first track---//
    AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, origAsset.duration) ofTrack:[[origAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    //----second track---//
    AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, filteredasset.duration) ofTrack:[[filteredasset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];

    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, origAsset.duration);


    //----FirstlayerInstruction---//
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];


    orientation = [self videoOrientation:origAsset];
    zoomOrientation = [self videoOrientation:filteredasset];

    BOOL isPortrait = NO;
    switch (orientation) {
        case AVCaptureVideoOrientationLandscapeRight:
            isPortrait = NO;
            break;
        case AVCaptureVideoOrientationLandscapeLeft:
            isPortrait = NO;
            break;
        case AVCaptureVideoOrientationPortrait:
            isPortrait = YES;
            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:
            isPortrait = YES;
            break;
        default:
            break;
    }

    CGSize naturalSize = firstTrack.naturalSize;
    if(isPortrait){
        naturalSize = CGSizeMake(naturalSize.height,naturalSize.width);
    }

    BOOL isZoomPortrait = NO;
    switch (zoomOrientation) {
        case AVCaptureVideoOrientationLandscapeRight:
            isZoomPortrait = NO;
            break;
        case AVCaptureVideoOrientationLandscapeLeft:
            isZoomPortrait = NO;
            break;
        case AVCaptureVideoOrientationPortrait:
            isZoomPortrait = YES;
            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:
            isZoomPortrait = YES;
            break;
        default:
            break;
    }

    CGSize zoomNaturalSize = secondTrack.naturalSize;
    if(isZoomPortrait){
        zoomNaturalSize = CGSizeMake(zoomNaturalSize.height,zoomNaturalSize.width);
    }

    CGFloat aspectWidth = 720/naturalSize.width;
    CGFloat aspectheight = 720/naturalSize.height;

    CGFloat zoomAspectWidth = 720/zoomNaturalSize.width;
    CGFloat zoomAspectheight = 720/zoomNaturalSize.height;

    CGFloat scale = MIN(aspectWidth, aspectheight);
    CGFloat zoomScale = MAX(zoomAspectWidth, zoomAspectheight);

    CGAffineTransform transform = [self transformFromOrientationWithVideoSizeWithAspect:naturalSize scale1:scale];
    CGAffineTransform zoomTransform = [self zoomTransformFromOrientationWithVideoSizeWithAspect:zoomNaturalSize scale1:zoomScale];

    [FirstlayerInstruction setTransform:transform atTime:kCMTimeZero];


    //----FirstlayerInstruction---//


    AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];

    [SecondlayerInstruction setTransform:zoomTransform atTime:kCMTimeZero];

    MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;


    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
    MainCompositionInst.frameDuration = CMTimeMake(1, 30);
    MainCompositionInst.renderSize = CGSizeMake(720, 720);

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"overlapVideotest.mov"];

    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
    {
        [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
    }

    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL=url;
    [exporter setVideoComposition:MainCompositionInst];
    exporter.outputFileType = AVFileTypeQuickTimeMovie;

    [exporter exportAsynchronouslyWithCompletionHandler:^
     {
         dispatch_async(dispatch_get_main_queue(), ^{
             [self exportDidFinish:exporter onViewController:vc];
         });
     }];
}

- (CGAffineTransform)transformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{

    CGAffineTransform transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform,scale,scale);
    CGSize size = CGSizeMake(720.0, 720.0);

    switch (orientation) {
        case AVCaptureVideoOrientationLandscapeRight:

            transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);

            break;
        case AVCaptureVideoOrientationLandscapeLeft:
            transform = CGAffineTransformTranslate(transform,
                                                   naturalSize.width,
                                                   naturalSize.height);
            transform = CGAffineTransformRotate(transform, M_PI);

            if (naturalSize.width > naturalSize.height){
                transform =  CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
            }else{
                //transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
                transform =  CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
            }

            break;
        case AVCaptureVideoOrientationPortrait:
            transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);

            transform = CGAffineTransformRotate(transform, M_PI_2);
            //transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))

            transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:
            transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
            //  transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        default:
            break;
    }
    return transform;
}

- (CGAffineTransform)zoomTransformFromOrientationWithVideoSizeWithAspect:(CGSize)naturalSize scale1:(float)scale
{

    CGAffineTransform transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform,scale,scale);
    CGSize size = CGSizeMake(720.0, 720.0);

    switch (zoomOrientation) {
        case AVCaptureVideoOrientationLandscapeRight:

            transform = CGAffineTransformTranslate(transform, (size.width/scale) / 2.0 - naturalSize.width/2.0, (size.height/scale) / 2.0 - naturalSize.height/2.0);

            break;
        case AVCaptureVideoOrientationLandscapeLeft:
            transform = CGAffineTransformTranslate(transform,
                                                   naturalSize.width,
                                                   naturalSize.height);
            transform = CGAffineTransformRotate(transform, M_PI);

            if (naturalSize.width > naturalSize.height){
                transform =  CGAffineTransformTranslate(transform, 0, -((size.height/scale) / 2.0 - naturalSize.height/2.0));
            }else{
                //transform = transform.translatedBy(x: -((size.width/scale) / 2.0 - naturalSize.width/2.0), y: 0)
                transform =  CGAffineTransformTranslate(transform, -((size.width/scale) / 2.0 - naturalSize.width/2.0), 0);
            }

            break;
        case AVCaptureVideoOrientationPortrait:
            transform = CGAffineTransformTranslate(transform, naturalSize.width, 0);

            transform = CGAffineTransformRotate(transform, M_PI_2);
            //transform = transform.translatedBy(x: 0, y: -((size.width/scale) / 2.0 - naturalSize.width/2.0))

            transform = CGAffineTransformTranslate(transform, 0, -((size.width/scale) / 2.0 - naturalSize.width/2.0));
            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:
            transform = CGAffineTransformTranslate(transform, 0, naturalSize.height);
            //  transform = CGAffineTransformTranslate(transform, 0, (naturalSize.width - naturalSize.height)/2);
            transform = CGAffineTransformRotate(transform, -M_PI_2);
            break;
        default:
            break;
    }
    return transform;
}

-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
    AVCaptureVideoOrientation result = 0;
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    if([tracks    count] > 0) {
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];

        CGAffineTransform t = videoTrack.preferredTransform;
        // Portrait
        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
        {
            result = AVCaptureVideoOrientationPortrait;
        }
        // PortraitUpsideDown
        if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)  {

            result = AVCaptureVideoOrientationPortraitUpsideDown;
        }
        // LandscapeRight
        if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
        {
            result = AVCaptureVideoOrientationLandscapeRight;
        }
        // LandscapeLeft
        if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
        {
            result = AVCaptureVideoOrientationLandscapeLeft;
        }
    }
    return result;
}