从EAGLView录制的视频在上传到youtube时会被翻转

时间:2013-09-18 12:16:00

标签: iphone ios opengl-es-2.0 avmutablecomposition google-api-objc-client

在我的ios应用程序中,我正在尝试录制EAGLView内容的视频(没有摄像头参与)。我录制视频没问题。 录制后我必须在视频中添加几个音轨,然后将视频分享到Youtube和facebook。我的问题是,当我在iphone或Mac上播放时,视频是可以的,但当我将此视频上传到youtube时(使用Youtube Data Api v3),视频会垂直反转或颠倒。

我想我需要在上传前旋转视频中的帧,但我不知道该怎么做。

任何帮助都将受到高度赞赏。

我用来向视频添加音轨的代码如下:

-(void)prepareVideoForPath:(NSString *)videoPath usingAudio:(NSArray *)audioArray andOutputPath:(NSString *)exportPath{


NSDictionary *optionsDictionary = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];

NSURL *videoUrl=[NSURL fileURLWithPath:videoPath];

AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:videoUrl options:optionsDictionary];

AVAssetTrack *FirstAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
AVMutableComposition* mixComposition = [AVMutableComposition composition];


//VideoTrack

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:FirstAssetTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:firstTransform];


//Audio Track

AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

CMTime audioStartTime = kCMTimeZero;
for (NSURL *audioURL in audioArray) {
    AVURLAsset *audioAsset = [AVURLAsset URLAssetWithURL:audioURL options:optionsDictionary];
    [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:audioStartTime error:nil];
    audioStartTime = CMTimeAdd(audioStartTime, audioAsset.duration);
}


AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];


NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];

_assetExport.outputFileType = AVFileTypeQuickTimeMovie;

NSLog(@"file type %@",_assetExport.outputFileType);

_assetExport.outputURL = exportUrl;

_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:

 ^(void ) {

     // your completion code here

     dispatch_async(dispatch_get_main_queue(), ^{
         NSLog(@"Mixing complete");

     });

 }

 ];

}

1 个答案:

答案 0 :(得分:2)

好的我找到了如何旋转视频。

以下是代码:

-(void)fixVideoOrientationForURL:(NSURL *)videoURL andOutputPath:(NSString *)exportPath{


    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:@{ AVURLAssetPreferPreciseDurationAndTimingKey:@YES }];

    AVMutableVideoCompositionInstruction *instruction = nil;
    AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;

    CGAffineTransform transform;

AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
    assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
    assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}

CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;


// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset

AVMutableComposition *mutableComposition = [AVMutableComposition composition];

    // Insert the video and audio tracks from AVAsset
    if (assetVideoTrack != nil) {
        AVMutableCompositionTrack *compositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
    }
    if (assetAudioTrack != nil) {
        AVMutableCompositionTrack *compositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
    }

// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
// Rotate transformation
    transform = CGAffineTransformMake(1, 0, 0, -1, 0, assetVideoTrack.naturalSize.height);

// Step 3
// Set the appropriate render sizes and rotational transforms


    // Create a new video composition
    AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
    mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.width,assetVideoTrack.naturalSize.height);
    mutableVideoComposition.frameDuration = CMTimeMake(1, 30);

    // The rotate transform is set on a layer instruction
    instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]);
    layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(mutableComposition.tracks)[0]];
    [layerInstruction setTransform:t2 atTime:kCMTimeZero];

// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = @[layerInstruction];
mutableVideoComposition.instructions = @[instruction];


    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:[mutableComposition copy] presetName:AVAssetExportPresetHighestQuality];

exportSession.videoComposition = mutableVideoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:exportPath];
exportSession.outputFileType=AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;

[exportSession exportAsynchronouslyWithCompletionHandler:^(void){

        dispatch_async(dispatch_get_main_queue(), ^{
            switch (exportSession.status) {
                case AVAssetExportSessionStatusCompleted:
                    NSLog(@"writing complete");


                    break;
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"Failed:%@",exportSession.error);
                    break;
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"Canceled:%@",exportSession.error);
                    break;
                default:
                    break;
            }
        });

}];
}