ios opencv的回调" processImage"分辨率与ImageView不匹配

时间:2016-04-07 06:45:35

标签: c++ ios opencv

我在iOS上使用过opencv3,我使用以下代码来捕获视频并处理图像

videoCamera = [[CvVideoCamera alloc] initWithParentView:_imageView];
videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
videoCamera.delegate = self;
videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
videoCamera.defaultFPS = 30;

和回调

- (void)processImage:(cv::Mat &)image {}

但我得到的图像是640行和480列的尺寸,这很奇怪。因为如果我将图像放在宽度为640×480×高的地方。 ImageView,非常适合。然后图像垫应该是480行×640列,因为opencv的垫子是行主要的。我需要将它作为480 * 640垫子处理,任何解决方案?

我也尝试转置它但是在ImageView上显示时它看起来很奇怪,而且opencv的内部可能会隐式旋转它?

1 个答案:

答案 0 :(得分:0)

虽然看起来你修好了,但我觉得你会遇到类似的问题。以下是如何避免所有这些内容(我只是将代码中的代码粘贴到也出现在情境中):

"您并不具体说明相关窗口是视图还是图层,还是实时视频或保存到文件中。您也没有指定视频是通过OpenCV录制还是通过其他方式录制。

因此,我列出了每个意外情况的代码片段;如果您熟悉OpenCV以及iOS视图编程的基础知识,应该明白应该使用的内容(在我的情况下,顺便说一句,我使用了所有内容):

- (void)viewDidLayoutSubviews {
    [super viewDidLayoutSubviews];

    switch ([UIDevice currentDevice].orientation) {
        case UIDeviceOrientationPortraitUpsideDown:
            self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
            break;
        case UIDeviceOrientationLandscapeLeft:
            self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
            break;
        case UIDeviceOrientationLandscapeRight:
            self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeRight;
            break;
        default:
            self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
            break;
    }

    [self refresh];
}

- (void)processImage:(cv::Mat &)mat {

    if (self.videoCamera.running) {
        switch (self.videoCamera.defaultAVCaptureVideoOrientation) {
            case AVCaptureVideoOrientationLandscapeLeft:
            case AVCaptureVideoOrientationLandscapeRight:
                // The landscape video is captured upside-down.
                // Rotate it by 180 degrees.
                cv::flip(mat, mat, -1);
                break;
            default:
                break;
        }
    }


- (void)convertBlendSrcMatToWidth:(int)dstW height:(int)dstH {

    double dstAspectRatio = dstW / (double)dstH;

    int srcW = originalBlendSrcMat.cols;
    int srcH = originalBlendSrcMat.rows;
    double srcAspectRatio = srcW / (double)srcH;
    cv::Mat subMat;
    if (srcAspectRatio < dstAspectRatio) {
        int subMatH = (int)(srcW / dstAspectRatio);
        int startRow = (srcH - subMatH) / 2;
        int endRow = startRow + subMatH;
        subMat = originalBlendSrcMat.rowRange(startRow, endRow);
    } else {
        int subMatW = (int)(srcH * dstAspectRatio);
        int startCol = (srcW - subMatW) / 2;
        int endCol = startCol + subMatW;
        subMat = originalBlendSrcMat.colRange(startCol, endCol);
    }
    cv::resize(subMat, convertedBlendSrcMat, cv::Size(dstW, dstH), 0.0, 0.0, cv::INTER_LANCZOS4);



- (int)imageWidth {
    AVCaptureVideoDataOutput *output = [self.captureSession.outputs lastObject];
    NSDictionary *videoSettings = [output videoSettings];
    int videoWidth = [[videoSettings objectForKey:@"Width"] intValue];
    return videoWidth;
}

- (int)imageHeight {
    AVCaptureVideoDataOutput *output = [self.captureSession.outputs lastObject];
    NSDictionary *videoSettings = [output videoSettings];
    int videoHeight = [[videoSettings objectForKey:@"Height"] intValue];
    return videoHeight;
}

- (void)updateSize {
    // Do nothing.
}

- (void)layoutPreviewLayer {
    if (self.parentView != nil) {

        // Center the video preview.
        self.customPreviewLayer.position = CGPointMake(0.5 * self.parentView.frame.size.width, 0.5 * self.parentView.frame.size.height);

        // Find the video's aspect ratio.
        CGFloat videoAspectRatio = self.imageWidth / (CGFloat)self.imageHeight;

        // Scale the video preview while maintaining its aspect ratio.
        CGFloat boundsW;
        CGFloat boundsH;
        if (self.imageHeight > self.imageWidth) {
            if (self.letterboxPreview) {
                boundsH = self.parentView.frame.size.height;
                boundsW = boundsH * videoAspectRatio;
            } else {
                boundsW = self.parentView.frame.size.width;
                boundsH = boundsW / videoAspectRatio;
            }
        } else {
            if (self.letterboxPreview) {
                boundsW = self.parentView.frame.size.width;
                boundsH = boundsW / videoAspectRatio;
            } else {
                boundsH = self.parentView.frame.size.height;
                boundsW = boundsH * videoAspectRatio;
            }
        }
        self.customPreviewLayer.bounds = CGRectMake(0.0, 0.0, boundsW, boundsH);
    }
}


- (void)processImage:(cv::Mat &)mat {

    if (self.videoCamera.running) {
        switch (self.videoCamera.defaultAVCaptureVideoOrientation) {
            case AVCaptureVideoOrientationLandscapeLeft:
            case AVCaptureVideoOrientationLandscapeRight:
                // The landscape video is captured upside-down.
                // Rotate it by 180 degrees.
                cv::flip(mat, mat, -1);
                break;
            default:
                break;
        }
    }


- (void)convertBlendSrcMatToWidth:(int)dstW height:(int)dstH {

    double dstAspectRatio = dstW / (double)dstH;

    int srcW = originalBlendSrcMat.cols;
    int srcH = originalBlendSrcMat.rows;
    double srcAspectRatio = srcW / (double)srcH;
    cv::Mat subMat;
    if (srcAspectRatio < dstAspectRatio) {
        int subMatH = (int)(srcW / dstAspectRatio);
        int startRow = (srcH - subMatH) / 2;
        int endRow = startRow + subMatH;
        subMat = originalBlendSrcMat.rowRange(startRow, endRow);
    } else {
        int subMatW = (int)(srcH * dstAspectRatio);
        int startCol = (srcW - subMatW) / 2;
        int endCol = startCol + subMatW;
        subMat = originalBlendSrcMat.colRange(startCol, endCol);
    }
    cv::resize(subMat, convertedBlendSrcMat, cv::Size(dstW, dstH), 0.0, 0.0, cv::INTER_LANCZOS4);



- (int)imageWidth {
    AVCaptureVideoDataOutput *output = [self.captureSession.outputs lastObject];
    NSDictionary *videoSettings = [output videoSettings];
    int videoWidth = [[videoSettings objectForKey:@"Width"] intValue];
    return videoWidth;
}

- (int)imageHeight {
    AVCaptureVideoDataOutput *output = [self.captureSession.outputs lastObject];
    NSDictionary *videoSettings = [output videoSettings];
    int videoHeight = [[videoSettings objectForKey:@"Height"] intValue];
    return videoHeight;
}

- (void)updateSize {
    // Do nothing.
}

- (void)layoutPreviewLayer {
    if (self.parentView != nil) {

        // Center the video preview.
        self.customPreviewLayer.position = CGPointMake(0.5 * self.parentView.frame.size.width, 0.5 * self.parentView.frame.size.height);

        // Find the video's aspect ratio.
        CGFloat videoAspectRatio = self.imageWidth / (CGFloat)self.imageHeight;

        // Scale the video preview while maintaining its aspect ratio.
        CGFloat boundsW;
        CGFloat boundsH;
        if (self.imageHeight > self.imageWidth) {
            if (self.letterboxPreview) {
                boundsH = self.parentView.frame.size.height;
                boundsW = boundsH * videoAspectRatio;
            } else {
                boundsW = self.parentView.frame.size.width;
                boundsH = boundsW / videoAspectRatio;
            }
        } else {
            if (self.letterboxPreview) {
                boundsW = self.parentView.frame.size.width;
                boundsH = boundsW / videoAspectRatio;
            } else {
                boundsH = self.parentView.frame.size.height;
                boundsW = boundsH * videoAspectRatio;
            }
        }
        self.customPreviewLayer.bounds = CGRectMake(0.0, 0.0, boundsW, boundsH);
    }
}

这里有很多,你必须知道把它放在哪里。如果你无法弄清楚,请告诉我。