我有2张人像视频。一个从默认的iphone相机中取出,第二个是使用UIIMagePickerController从我的应用程序中记录。
当我在视频1上应用cifilter然后过滤器应用完美但是当我在第二个视频上应用过滤器时,视频变焦一半视频部分模糊和拉伸,当我导出它时它应该旋转。
我的代码
AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"];
player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset: asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){
// Clamp to avoid blurring transparent pixels at the image edges
CIImage *source = [request.sourceImage imageByClampingToExtent];
source = [source imageByApplyingTransform:FirstAssetTrack.preferredTransform];
[filter setValue:source forKey:kCIInputImageKey];
// Crop the blurred output to the bounds of the original image
CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];
// Provide the filter output to the composition
[request finishWithImage:output context:nil];
}];
此代码不适用于第二个视频,因此对第二个视频进行了一些更改 这不是正确的代码,但我想检查它的大小和方向,并且在方向更改后,它在avplayer中播放时工作正常但是当导出它时会旋转
AVPlayer plays video composition result incorrectly
我检查了这个链接,我们都面临同样的问题所以我根据这个改变了我的代码,但仍然无法正常工作
AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
FirstAssetOrientation_= UIImageOrientationRight;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
FirstAssetOrientation_ = UIImageOrientationLeft;
isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
FirstAssetOrientation_ = UIImageOrientationDown;
}
player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset:asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) {
// Step 1: get the input frame image (screenshot 1)
CIImage *sourceImage = request.sourceImage;
// Step 2: rotate the frame
CIFilter *transformFilter = [CIFilter filterWithName:@"CIAffineTransform"];
[transformFilter setValue:sourceImage forKey: kCIInputImageKey];
[transformFilter setValue: [NSValue valueWithCGAffineTransform: firstTransform] forKey: kCIInputTransformKey];
sourceImage = transformFilter.outputImage;
CGRect extent = sourceImage.extent;
CGAffineTransform translation = CGAffineTransformMakeTranslation(-extent.origin.x, -extent.origin.y);
[transformFilter setValue:sourceImage forKey: kCIInputImageKey];
[transformFilter setValue: [NSValue valueWithCGAffineTransform: translation] forKey: kCIInputTransformKey];
sourceImage = transformFilter.outputImage;
// Step 3: apply the custom filter chosen by the user
extent = sourceImage.extent;
sourceImage = [sourceImage imageByClampingToExtent];
[filter setValue:sourceImage forKey:kCIInputImageKey];
sourceImage = filter.outputImage;
sourceImage = [sourceImage imageByCroppingToRect:extent];
// make the frame the same aspect ratio as the original input frame
// by adding empty spaces at the top and the bottom of the extent rectangle
CGFloat newHeight = 1920 * 1920 / extent.size.height;
CGFloat inset = (extent.size.height - newHeight) / 2;
extent = CGRectInset(extent, 0, inset);
sourceImage = [sourceImage imageByCroppingToRect:extent];
// scale down to the original frame size
CGFloat scale = 1920 / newHeight;
CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scale, scale*3.2);
[transformFilter setValue:sourceImage forKey: kCIInputImageKey];
[transformFilter setValue: [NSValue valueWithCGAffineTransform: scaleTransform] forKey: kCIInputTransformKey];
sourceImage = transformFilter.outputImage;
// translate the frame to make it's origin start at (0, 0)
CGAffineTransform translation1 = CGAffineTransformMake(1, 0, 0, 1, 0, 0);
[transformFilter setValue:sourceImage forKey: kCIInputImageKey];
[transformFilter setValue: [NSValue valueWithCGAffineTransform: translation1] forKey: kCIInputTransformKey];
sourceImage = transformFilter.outputImage;
// Step 4: finish processing the frame (screenshot 2)
[request finishWithImage:sourceImage context:nil];
}];
答案 0 :(得分:4)
只需删除导致问题的变换行。
只有这一个:
source = [source imageByApplyingTransform:FirstAssetTrack.preferredTransform];
并检查。 :)
AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"];
player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset: asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){
// Clamp to avoid blurring transparent pixels at the image edges
CIImage *source = [request.sourceImage imageByClampingToExtent];
[filter setValue:source forKey:kCIInputImageKey];
// Crop the blurred output to the bounds of the original image
CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];
// Provide the filter output to the composition
[request finishWithImage:output context:nil];
}];