这是来自OpenCV ColorBlobDetectionActivity.java示例的程序,我尝试修改它以便在触摸屏幕时检测黄色对象,但它总是只检测黑色对象,即使我指定颜色标量为黄色。我已将“通知”的评论放在我认为相关的地方。
package com.example.road_guiding;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
public class ColorBlobDetectionActivity extends Activity implements View.OnTouchListener, CameraBridgeViewBase.CvCameraViewListener2 {
// private static final String TAG = "OCVSample::Activity";
private Scalar CONTOUR_COLOR;
private Scalar mBlobColorHsv;
private Scalar mBlobColorRgba;
//NOTICE
private Scalar temp;
private ColorBlobDetector mDetector;
private boolean mIsColorSelected = false;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
public void onManagerConnected(int paramAnonymousInt) {
switch (paramAnonymousInt) {
default:
super.onManagerConnected(paramAnonymousInt);
// Log.i("OCVSample::Activity", "OpenCV loaded successfully");
ColorBlobDetectionActivity.this.mOpenCvCameraView.enableView();
ColorBlobDetectionActivity.this.mOpenCvCameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
return;
}
}
};
private CameraBridgeViewBase mOpenCvCameraView;
private Mat mRgba;
// private Size SPECTRUM_SIZE;
// private Mat mSpectrum;
public ColorBlobDetectionActivity() {
Log.i("OCVSample::Activity", "Instantiated new " + getClass());
}
private Scalar converScalarHsv2Rgba(Scalar paramScalar) {
Mat localMat = new Mat();
Imgproc.cvtColor(new Mat(1, 1, CvType.CV_8UC3, paramScalar), localMat, 71, 4);
return new Scalar(localMat.get(0, 0));
}
public Mat onCameraFrame( CameraBridgeViewBase.CvCameraViewFrame paramCvCameraViewFrame) {
this.mRgba = paramCvCameraViewFrame.rgba(); // mRbga = input frame with color
if (this.mIsColorSelected) {
this.mDetector.process(this.mRgba);
//contour info is ready in detector
List colorContour = this.mDetector.getContours();
// Log.e("OCVSample::Activity", "Contours count: " + localList.size());
Imgproc.drawContours(this.mRgba, colorContour, -1, this.CONTOUR_COLOR); //draw contour around detected area
this.mRgba.submat(4, 68, 4, 68).setTo(this.mBlobColorRgba);
// Producing spectrum
// Mat localMat = this.mRgba.submat(4, 4 + this.mSpectrum.rows(), 70, 70 + this.mSpectrum.cols());
// this.mSpectrum.copyTo(localMat);
}
return this.mRgba;
}
public void onCameraViewStarted(int paramInt1, int paramInt2) {
this.mRgba = new Mat(paramInt2, paramInt1, CvType.CV_8UC4); //width - - the width of the frames that will be delivered
this.mDetector = new ColorBlobDetector();
this.mBlobColorRgba = new Scalar(255.0);
this.mBlobColorHsv = new Scalar(255.0);
this.CONTOUR_COLOR = new Scalar(255.0, 0.0, 0.0, 255.0); //Specfiy the color of contour
//NOTICE
this.temp = new Scalar (237.0, 169.0, 50.0, 255.0);
//yellow to be used:
// this.mBlobColorRgba.val[0] = 237;
// this.mBlobColorRgba.val[1] = 169;
// this.mBlobColorRgba.val[2] = 50;
// this.mBlobColorRgba.val[3] = 255;
// this.mSpectrum = new Mat();
// this.SPECTRUM_SIZE = new Size(200.0, 64.0);
}
public void onCreate(Bundle paramBundle) {
// Log.i("OCVSample::Activity", "called onCreate");
super.onCreate(paramBundle);
// requestWindowFeature(1); // do not show app title
// getWindow().addFlags(128);
setContentView(R.layout.activity_color_blob_detection);
this.mOpenCvCameraView = ((CameraBridgeViewBase) findViewById(R.id.HelloOpenCvView));
this.mOpenCvCameraView.setCvCameraViewListener(this);
}
public boolean onTouch(View paramView, MotionEvent paramMotionEvent)
{
int cameraViewWidth = this.mRgba.cols(); // cameraViewWidth = i
int cameraViewHeight = this.mRgba.rows(); // cameraViewHeight = j
int xOffset = (this.mOpenCvCameraView.getWidth() - cameraViewWidth) / 2;
int yOffset = (this.mOpenCvCameraView.getHeight() - cameraViewHeight) / 2;
int touchX = (int)paramMotionEvent.getX() - xOffset;
int touchY = (int)paramMotionEvent.getY() - yOffset;
// Log.i("OCVSample::Activity", "Touch image coordinates: (" + n = touchX + ", " + i1=touchY + ")");
if ((touchX < 0) || (touchY < 0) || (touchX > cameraViewWidth) || (touchY > cameraViewHeight)) {
return false;
}
Rect touchedRect = new Rect();
if (touchX > 4) {
touchedRect.x = touchX - 4;
touchedRect.y = touchY - 4;
touchedRect.width = touchX + 4 - touchedRect.x;
}
for (int i5 = touchY + 4 - touchedRect.y;; i5 = cameraViewHeight - touchedRect.y) {
touchedRect.height = i5;
// Mat touchedRegionRgba = this.mRgba.submat(touchedRect);
Mat touchedRegionRgba = new Mat();
//NOTICE
Imgproc.cvtColor(new Mat(1, 1, CvType.CV_8UC3, temp), touchedRegionRgba, 71, 0);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL); //67
this.mBlobColorHsv = Core.sumElems(touchedRegionHsv); //calculate average color of touched region
int pixelCount = touchedRect.width * touchedRect.height;
for (int i = 0; i < this.mBlobColorHsv.val.length; i++) {
double[] arrayOfDouble = this.mBlobColorHsv.val;
arrayOfDouble[i] /= pixelCount;
}
touchedRegionRgba.release();
touchedRegionHsv.release();
break;
}
this.mBlobColorRgba = converScalarHsv2Rgba(this.mBlobColorHsv);
// Log.i("OCVSample::Activity", "Touched rgba color: (" + this.mBlobColorRgba.val[0] + ", " + this.mBlobColorRgba.val[1] + ", " + this.mBlobColorRgba.val[2] + ", " + this.mBlobColorRgba.val[3] + ")");
this.mDetector.setHsvColor(this.mBlobColorHsv);
// Imgproc.resize(this.mDetector.getSpectrum(), this.mSpectrum, this.SPECTRUM_SIZE);
this.mIsColorSelected = true;
return false;
}
public void onDestroy() {
super.onDestroy();
if (this.mOpenCvCameraView != null) {
this.mOpenCvCameraView.disableView();
}
}
public void onPause() {
super.onPause();
if (this.mOpenCvCameraView != null) {
this.mOpenCvCameraView.disableView();
}
}
public void onResume() {
super.onResume();
OpenCVLoader.initAsync("2.4.3", this, this.mLoaderCallback);
}
public void onCameraViewStopped() {
this.mRgba.release();
}
}
任何帮助将不胜感激,谢谢!
答案 0 :(得分:1)
抱歉,我不知道Java,但我可以建议检测“黄色”颜色的一般逻辑。您应该将RGB
图片转换为YUV
图片,然后均衡Y-channel 。由于Y通道用于亮度,因此您可以通过这样做来减少照明变化的影响。
- 然后将图片从RGB
转换回YUV
- 现在将图像转换为HSV
。
- 现在尝试仅计算那些可能代表“黄色”颜色的像素。为此,请使用以下条件:
S>0
(或接近0的其他值)以消除在计算中产生问题的白色像素。V>0
以移除具有V = 0 H> 22 && H<37
然后将yellowPixelCount
增加1。- 因此,按照上述步骤,您可以计算图像中的“黄色”像素。并且,如果计数大于阈值,那么您可以预测它是“黄色”颜色。
PS:不要忘记计算满足标准1
&amp;的像素总数。 2
以便您可以使用该值来查找黄色分量的百分比,以预测图像是否为黄色。
if (condition 1 & 2 satisfied)
{
totalPixelCount++;
if(condition 3 satisfied)
{
yellowPixelCount
}
}
% of yellow componet = yellowPixelCount/totalPixelCount*100