我正在使用Color Blob探测器OpenCV的示例代码(Sample Code),但应用程序在Core.multiply(contour, new Scalar(4,4), contour);
崩溃时发出致命错误
libc:致命信号11(SIGSEGV),代码1,tid 11016中的故障地址0x0(Thread-4857)
详细的日志可用于了解确切原因。在代码的其他部分,我使用Core.add(fg, bg, markers);
,app也在那里给出了相同的致命错误。有什么问题?请指导解决方案。我正在使用openCV(3.1)
代码如下:
package com.iu.kamraapp.utils;
import android.util.Log;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Cache
Mat mPyrDownMat = new Mat();
Mat mHsvMat = new Mat();
Mat mMask = new Mat();
Mat mDilatedMask = new Mat();
Mat mHierarchy = new Mat();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum() {
return mSpectrum;
}
public void setMinContourArea(double area) {
mMinContourArea = area;
}
public void process(Mat rgbaImage) {
try {
Imgproc.pyrDown(rgbaImage, mPyrDownMat);
Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
Imgproc.dilate(mMask, mDilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
Core.multiply(contour, new Scalar(4, 4), contour); // issue is here
mContours.add(contour);
}
}
}
catch (Exception e) { e.printStackTrace(); }
}
public List<MatOfPoint> getContours() {
return mContours;
}
}
package com.iu.kamraapp;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import com.iu.kamraapp.utils.AppGlobal;
import com.iu.kamraapp.utils.ColorBlobDetector;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
import java.util.List;
public class ColorBlobActivity extends AppCompatActivity implements View.OnTouchListener,CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
Context context;
int screenWidth, screenHeight;
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
setContentView(R.layout.activity_main);
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
screenWidth = AppGlobal.getScreenResolution(context, false);
screenHeight = AppGlobal.getScreenResolution(context, true);
} else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
screenWidth = AppGlobal.getScreenResolution(context, true);
screenHeight = AppGlobal.getScreenResolution(context, false);
}
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
OpenCVLoader.initDebug(true);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
try {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int) event.getX() - xOffset;
int y = (int) event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x > 4) ? x - 4 : 0;
touchedRect.y = (y > 4) ? y - 4 : 0;
touchedRect.width = (x + 4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y + 4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width * touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
} catch (Exception e) { e.printStackTrace(); }
return false; // don't need subsequent touch events
}
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.d(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}