任务:将传单图片合并到传单视频中。
例:
案例1
情况2
FAIL =错误域= AVFoundationErrorDomain代码= -11800“操作无法完成”UserInfo = 0x17266d40 {NSLocalizedDescription =操作无法完成,NSUnderlyingError = 0x172b3920“操作无法完成。( OSStatus错误-16980。)“,NSLocalizedFailureReason =发生未知错误(-16980)}
代码:
- (void)modifyVideo:(NSURL *)src destination:(NSURL *)dest crop:(CGRect)crop
scale:(CGFloat)scale overlay:(UIImage *)image
completion:(void (^)(NSInteger, NSError *))callback {
// Get a pointer to the asset
AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:src options:nil];
// Make an instance of avmutablecomposition so that we can edit this asset:
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Add tracks to this composition
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// Audio track
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// Image video is always 30 seconds. So we use that unless the background video is smaller.
CMTime inTime = CMTimeMake( MAX_VIDEO_LENGTH * VIDEOFRAME, VIDEOFRAME );
if ( CMTimeCompare( firstAsset.duration, inTime ) < 0 ) {
inTime = firstAsset.duration;
}
// Add to the video track.
NSArray *videos = [firstAsset tracksWithMediaType:AVMediaTypeVideo];
CGAffineTransform transform;
if ( videos.count > 0 ) {
AVAssetTrack *track = [videos objectAtIndex:0];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:track atTime:kCMTimeZero error:nil];
transform = track.preferredTransform;
videoTrack.preferredTransform = transform;
}
// Add the audio track.
NSArray *audios = [firstAsset tracksWithMediaType:AVMediaTypeAudio];
if ( audios.count > 0 ) {
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:[audios objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
NSLog(@"Natural size: %.2f x %.2f", videoTrack.naturalSize.width, videoTrack.naturalSize.height);
// Set the mix composition size.
mixComposition.naturalSize = crop.size;
// Set up the composition parameters.
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, VIDEOFRAME );
videoComposition.renderSize = crop.size;
videoComposition.renderScale = 1.0;
// Pass through parameters for animation.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, inTime);
// Layer instructions
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
// Set the transform to maintain orientation
if ( scale != 1.0 ) {
CGAffineTransform scaleTransform = CGAffineTransformMakeScale( scale, scale);
CGAffineTransform translateTransform = CGAffineTransformTranslate( CGAffineTransformIdentity,
-crop.origin.x,
-crop.origin.y);
transform = CGAffineTransformConcat( transform, scaleTransform );
transform = CGAffineTransformConcat( transform, translateTransform);
}
[passThroughLayer setTransform:transform atTime:kCMTimeZero];
passThroughInstruction.layerInstructions = @[ passThroughLayer ];
videoComposition.instructions = @[passThroughInstruction];
// If an image is given, then put that in the animation.
if ( image != nil ) {
// Layer that merges the video and image
CALayer *parentLayer = [CALayer layer];
parentLayer.frame = CGRectMake( 0, 0, crop.size.width, crop.size.height);
// Layer that renders the video.
CALayer *videoLayer = [CALayer layer];
videoLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height );
[parentLayer addSublayer:videoLayer];
// Layer that renders flyerly image.
CALayer *imageLayer = [CALayer layer];
imageLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height );
imageLayer.contents = (id)image.CGImage;
[imageLayer setMasksToBounds:YES];
[parentLayer addSublayer:imageLayer];
// Setup the animation tool
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
// Now export the movie
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
// Export the URL
exportSession.outputURL = dest;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
callback( exportSession.status, exportSession.error );
}];
}
我从AppDelegate.m
调用此函数- (void)applicationDidEnterBackground:(UIApplication *)application
{
bgTask = [application beginBackgroundTaskWithName:@"MyTask" expirationHandler:^{
// Clean up any unfinished task business by marking where you
// stopped or ending the task outright.
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
}];
// Start the long-running task and return immediately.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
// Do the work associated with the task, preferably in chunks.
[self goingToBg];
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
});
NSLog(@"backgroundTimeRemaining: %f", [[UIApplication sharedApplication] backgroundTimeRemaining]);
}
答案 0 :(得分:8)
在这个问题上有很多RND,没有为它找到解决方案。
想要分享一些链接,希望它能帮助堆栈社区,如果他们遇到同样的问题[要求]。
Link1:AVExportSession to run in background
与问题相关的引文[从Link1上方复制]
可悲的是,因为AVAssetExportSession使用gpu来完成其中的一些操作 工作,如果你正在使用它,它不能在后台运行 AVVideoComposition。
Link2:Starting AVAssetExportSession in the Background
与问题相关的引文[从Link2上方复制]
您可以在后台启动AVAssetExportSession。唯一的限制 在AVFoundation中执行后台工作,正在使用 AVVideoCompositions或AVMutableVideoCompositions。 AVVideoCompositions 正在使用GPU,GPU无法在后台使用
后台任务的网址:
答案 1 :(得分:0)
如果您更新项目功能中的“后台模式”设置以包含音频,那么派对太晚了。它将允许导出。
这是为了在后台播放音乐。
它对我有用。