OpenCV iOS - 在模拟器和iPhone上的结果不一样

时间:2012-05-01 15:36:22

标签: iphone ios opencv simulator perspective

我在真正的iPhone上使用OpenCV库裁剪图像时遇到问题。

我有一个选定区域的图像,我想对这个区域的图像应用透视变换。这在模拟器上工作正常但在iphone上新图像没有映射到矩形,新图像也是蓝色。

这是我的代码:

+ (IplImage *)CreateIplImageFromUIImage:(UIImage *)image {
    // Getting CGImage from UIImage
    CGImageRef imageRef = image.CGImage;

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    // Creating temporal IplImage for drawing
    IplImage *iplimage = cvCreateImage(
                                       cvSize(image.size.width,image.size.height), IPL_DEPTH_8U, 4
                                       );
    // Creating CGContext for temporal IplImage
    CGContextRef contextRef = CGBitmapContextCreate(
                                                    iplimage->imageData, iplimage->width, iplimage->height,
                                                    iplimage->depth, iplimage->widthStep,
                                                    colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault
                                                    );
    // Drawing CGImage to CGContext
    CGContextDrawImage(
                       contextRef,
                       CGRectMake(0, 0, image.size.width, image.size.height),
                       imageRef
                       );
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);

    // Creating result IplImage
    IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
    cvCvtColor(iplimage, ret, CV_RGBA2BGR);
    cvReleaseImage(&iplimage);

    return ret;
}

+ (UIImage *)UIImageFromIplImage:(IplImage *)img {
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // Creating result IplImage
    IplImage *image = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 4);
    cvCvtColor(img, image, CV_BGR2RGBA);

    // Allocating the buffer for CGImage
    NSData *data =
    [NSData dataWithBytes:image->imageData length:image->imageSize];
    CGDataProviderRef provider =
    CGDataProviderCreateWithCFData((CFDataRef)data);
    // Creating CGImage from chunk of IplImage
    CGImageRef imageRef = CGImageCreate(
                                        image->width, image->height,
                                        image->depth, image->depth * image->nChannels, image->widthStep,
                                        colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
                                        provider, NULL, false, kCGRenderingIntentDefault
                                        );

    // Getting UIImage from CGImage
    UIImage *ret = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);

    return ret;
}

+ (UIImage *)perspectiveTransform: (UIImage*) originalImage :(CGFloat) h :(CGFloat) w :(CGPoint) point1 :(CGPoint) point2 :(CGPoint) point3 :(CGPoint) point4
{

    CvPoint2D32f srcQuad[4];

    srcQuad[0].x = point1.x;
    srcQuad[0].y = point1.y;
    srcQuad[1].x = point2.x;
    srcQuad[1].y = point2.y;
    srcQuad[2].x = point3.x;
    srcQuad[2].y = point3.y;
    srcQuad[3].x = point4.x;
    srcQuad[3].y = point4.y;

    IplImage *src = [self CreateIplImageFromUIImage:originalImage];

    IplImage *dst = cvCreateImage(cvGetSize(src),
                                  src->depth,
                                  src->nChannels);

    cvZero(dst);

    CGFloat width = src->width;
    CGFloat height = src->height;

    CvMat* mmat = cvCreateMat(3, 3, CV_32FC1);

    CvPoint2D32f *c1 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));
    CvPoint2D32f *c2 = (CvPoint2D32f *)malloc(4 * sizeof(CvPoint2D32f));

    c1[0].x = round((width/w)*srcQuad[0].x);   c1[0].y = round((height/h)*srcQuad[0].y);
    c1[1].x = round((width/w)*srcQuad[1].x);   c1[1].y = round((height/h)*srcQuad[1].y);
    c1[2].x = round((width/w)*srcQuad[2].x);   c1[2].y = round((height/h)*srcQuad[2].y);
    c1[3].x = round((width/w)*srcQuad[3].x);   c1[3].y = round((height/h)*srcQuad[3].y);

    c2[0].x = 0;            c2[0].y = 0;
    c2[1].x = width - 1;    c2[1].y = 0;
    c2[2].x = 0;            c2[2].y = height - 1;
    c2[3].x = width - 1;    c2[3].y = height - 1;

    mmat = cvGetPerspectiveTransform(c1, c2, mmat);
    free(c1);
    free(c2);

    cvWarpPerspective(src, dst, mmat, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));

    cvReleaseImage(&src);
    cvReleaseMat(&mmat);

    UIImage *newImage = [self UIImageFromIplImage:dst];
    cvReleaseImage(&dst);

    return newImage;
}

感谢您的帮助!

0 个答案:

没有答案