请找到以下代码。
-(void) writeImagesAsMovie:(NSArray *)array toPath:(NSString*)path numPhoto:(NSInteger)totPics {
ALAsset *asset = [assets objectAtIndex:0];
ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];
UIImage *getImage = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
UIImage *first = [getImage imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];
CGSize frameSize = CGSizeMake(first.size.width,first.size.height);
NSLog(@"frameSize = %@",NSStringFromCGSize(frameSize));
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error) {
NSLog(@"error creating AssetWriter: %@",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
AVVideoScalingModeResizeAspect,AVVideoScalingModeKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
CGAffineTransform transform = CGAffineTransformIdentity;
UIImageOrientation orient = first.imageOrientation;
CGSize imageSize = first.size;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
writerInput.transform = transform;
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO)
NSLog(@"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
int fps = 2;
for(int i=0; i<totPics; i++)
{
if (adaptor.assetWriterInput.readyForMoreMediaData) {
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime = CMTimeMake(i, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
NSLog(@"presentTime = %f",CMTimeGetSeconds(presentTime));
ALAsset *asset = [assets objectAtIndex:i];
ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];
UIImage *imgGetFrame = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
UIImage *imgFrame = [imgGetFrame imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) //failes on 3GS, but works on iphone 4
{
NSLog(@"failed to append buffer");
NSLog(@"The error is %@", [videoWriter error]);
}
if(buffer) {
CVBufferRelease(buffer);
}
} else {
NSLog(@"error");
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"Complete");
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
裁剪图像方法
-(UIImage*)imageByScalingAndCroppingForSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO)
{
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
{
scaleFactor = widthFactor; // scale to fit height
}
else
{
scaleFactor = heightFactor; // scale to fit width
}
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor)
{
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
}
else
{
if (widthFactor < heightFactor)
{
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
}
UIGraphicsBeginImageContext(targetSize); // this will crop
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
if(newImage == nil) {
NSLog(@"could not scale image");
}
//pop the context to get back to the default
UIGraphicsEndImageContext();
return newImage;
}
我已经从用户照片库传递了图像,并在将图像裁剪为720 x 960后添加图像。
当我拍摄100张图像时,我收到内存警告错误。此外,当我在仪器中检查应用程序时,它花了大约400 MB。如果有人知道我做错了什么,请帮助我。
答案 0 :(得分:1)
问题是您在视频处理循环中耗尽了所有应用程序内存。您不能同时在内存中分配数百个图像,在iOS设备上运行时代码会崩溃。请阅读video_and_memory_usage_on_ios_devices上关于该主题的博客文章,然后更改for循环,以便为循环的每次迭代创建自动释放池以修复失控的内存使用情况。另请注意,kCVPixelFormatType_32ARGB
会非常慢,您应该使用kCVPixelFormatType_32BGRA
。