我正在做一个视频应用程序,我必须在图像上添加一个叠加,然后拍摄其视频。我的应用程序在上个月使用iOS 7.0工作。但是在我今天检查后更新后我发现了AVAssetExportSession exportAsynchronouslyWithCompletionHandler块未执行。控件从[exporter exportAsynchronouslyWithCompletionHandler:^{
行返回。
代码:
-(void)saving_Edited_Video
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSMutableArray *loTempArr = [[[Database sharedDBDetails] getAllUserDetails:kaudioTable] mutableCopy];
TempFile *lotemp1 = [[TempFile alloc] init];
TempFile *loTemp2 = [[TempFile alloc] init];
loTemp2 = [mallVideoArray objectAtIndex:self.slectedVideoIndex];
for (int i = 0; i < [loTempArr count]; i++)
{
lotemp1 = [loTempArr objectAtIndex:i];
if (loTemp2.mTemp_Key == [lotemp1.mTemp_videorefID intValue])
{
//NSLog(@"%@",lotemp1.mTemp_AudioName);
NSString *filepath = [kDocument_Path stringByAppendingString:[NSString stringWithFormat:@"/audioFolder/%@",lotemp1.mTemp_AudioName]];
NSURL *SongURL = [NSURL fileURLWithPath:filepath];
self.audioAsset = [[AVURLAsset alloc] initWithURL:SongURL options:nil];
CMTime time2=CMTimeMake([lotemp1.mTemp_timeinvideo doubleValue]*600, 600);
AVMutableCompositionTrack *compositionCommentaryTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeSubtract(self.videoAsset.duration, time2))
ofTrack:[[self.audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:time2 error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize sizeflip;
if(flipActionFlag == 4 || flipActionFlag == 5)
{
sizeflip = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}
else
{
sizeflip = videoAssetTrack.naturalSize;
}
CGAffineTransform transform = CGAffineTransformIdentity;//
//rotate up
if (flipActionFlag == 2)
{
transform = videoAssetTrack.preferredTransform;
}
else if (flipActionFlag == 0)
{
//vertical flip
transform = CGAffineTransformTranslate(transform, 0.0f, sizeflip.height);
transform = CGAffineTransformScale(transform, 1.0f, -1.0f);
}
else if (flipActionFlag == 1)
{
//horizental flip......
transform = CGAffineTransformTranslate(transform, sizeflip.width, 0.0f);
transform = CGAffineTransformScale(transform, -1.0f, 1.0f);
}
else if (flipActionFlag == 3)
{
//rotate down
transform = CGAffineTransformTranslate(transform, sizeflip.width, sizeflip.height);
transform = CGAffineTransformRotate(transform, M_PI);
}
else if (flipActionFlag == 4)
{
//rotate right
transform = CGAffineTransformTranslate(transform, sizeflip.width, 0.0f);
transform = CGAffineTransformRotate(transform, M_PI_2);
}
else if (flipActionFlag == 5)
{
// //rotate left
transform = CGAffineTransformTranslate(transform, 0.0f, sizeflip.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
}
[videolayerInstruction setTransform:transform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
float renderWidth, renderHeight;
renderWidth = self.movieController.view.frame.size.width;
renderHeight = self.movieController.view.frame.size.height;
CGSize size;
if(flipActionFlag == 4 || flipActionFlag == 5)
{
size = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}
else
{
size = videoAssetTrack.naturalSize;
}
NSLog(@"%@",NSStringFromCGSize(size));
mainCompositionInst.renderSize = size;//size//CGSizeMake(renderWidth, renderHeight)
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
if ([self.drawImage.lines count] > 0)
{
//mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
[self applyVideoEffectsToComposition:mainCompositionInst size:size overlayerSize:size];//self.movieController.view.frame.size
}
//geting video name
TempFile *loTemp = [[TempFile alloc] init];
loTemp = [mallVideoArray lastObject];
// 4 - Get path
TempFile *mnewtemp = [[TempFile alloc] init];
mnewtemp.mTemp_videoName = [NSString stringWithFormat:@"Video_%d.m4v",loTemp.mTemp_Key+1];
[[Database sharedDBDetails] insertNewRowWithData:mnewtemp forTable:kvideoTable];
NSString *myPathDocs = [kDocument_Path stringByAppendingPathComponent:
[NSString stringWithFormat:@"Video/Video_%d.m4v",loTemp.mTemp_Key+1]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;//@"com.apple.quicktime-movie";AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus)
{
case AVAssetExportSessionStatusFailed: {NSError *exportError = exporter.error;NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError);break;}
case AVAssetExportSessionStatusCompleted: { NSLog (@"AVAssetExportSessionStatusCompleted--"); break;}
case AVAssetExportSessionStatusUnknown: { NSLog (@"AVAssetExportSessionStatusUnknown"); break;}
case AVAssetExportSessionStatusExporting: { NSLog (@"AVAssetExportSessionStatusExporting"); break;}
case AVAssetExportSessionStatusCancelled: { NSLog (@"AVAssetExportSessionStatusCancelled"); break;}
case AVAssetExportSessionStatusWaiting:{NSLog (@"AVAssetExportSessionStatusWaiting"); break;}
default: { NSLog (@"didn't get export status"); break;}
}
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
[losaveView removeFromSuperview];
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error)
{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"
delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
}
else
{
self.mallVideoArray = [[[Database sharedDBDetails] getAllUserDetails:kvideoTable] mutableCopy];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"
delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
}
});
}];
}
}
}
当我阅读文档时,它写在那里exportAsynchronouslyWithCompletionHandler is a block that is invoked when writing is complete or in the event of writing failure
。所以如果它失败那么它也应该调用。代码有什么问题或我在这里遗漏了什么吗?