美好的一天,StackOverflow!我是这里的新成员,我需要你的帮助。
我有一个Android项目,我无法解决这个问题。
基本上,我想要做的是:我将使用Android Camera意图捕获患病植物的图像。然后应用程序将位图图像传递到将被预处理的函数(具体来说,图像将被调整大小),然后调整大小的位图将被发送到一个函数,然后位图图像将被转换为Mat对象,发送到AsyncTask触发的函数(以位图图像的Mat对象作为参数)。
public void processImage(final Bitmap bmp)
{
class ProcessImage extends AsyncTask<Bitmap, Void, Void> //This AsyncTask triggers the function matchAndCompare().
{
Mat srcImage = new Mat(bmp.getWidth(), bmp.getHeight(), CvType.CV_8UC4, new Scalar(4));
protected void onPreExecute()
{
super.onPreExecute();
plsWait = ProgressDialog.show(CameraView.this, "Dur-Yan!", "Please wait. Processing image. " +
"Patience is appreciated since it will definitely pass.");
}
@Override
protected Void doInBackground(Bitmap... bitmaps)
{
matchAndCompare(srcImage); //The function where the main processing and matching takes place.
return null;
}
protected void onPostExecute(Void result)
{
super.onPostExecute(result);
plsWait.dismiss();
runOnUiThread(new Runnable()
{
public void run()
{
Utils.matToBitmap(srcImage, bmp);
imgPreview.setImageBitmap(bmp);
forUpload(srcImage, bmp);
}
});
}
}
ProcessImage process = new ProcessImage();
process.execute(bmp);
}
这是主图像处理发生的功能。此函数将检测关键点并计算源图像的描述符。然后它将遍历(查询)数据库中的条目,检索每个位图图像,将它们中的每一个转换为Mat对象,检测关键点,计算描述符,然后存储每个图像的描述符。
public void matchAndCompare (Mat source)
{
//Initialize minimum and maximum threshold
int MIN_MATCH_THRESHOLD = 10;
int MAX_MATCH_THRESHOLD = 250;
//Initialize DescriptorMatcher
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
//Initialize MatOfDMatch
MatOfDMatch matches = new MatOfDMatch();
MatOfDMatch goodMatches = new MatOfDMatch();
//Initialize good matches list
LinkedList<DMatch> listOfGoodMatches = new LinkedList<>();
//Initialize object lists
LinkedList<Point> refObjectList = new LinkedList<>();
LinkedList<Point> srcObjectList = new LinkedList<>();
Log.e(TAG, "Source image channels: " + source.channels());
//Pre-process source image and compute for keypoints
Imgproc.cvtColor(source, source, Imgproc.COLOR_RGB2GRAY, 4);
MatOfKeyPoint srcKeyPoints = createDetector(source);
Mat srcDescriptors = extractDescription(srcKeyPoints, source);
MatOfPoint2f srcPoint2f = new MatOfPoint2f();
//Features2d.drawKeypoints(source, srcKeyPoints, source);
//Retrieve images from online database, pre-process and compute for keypoints
String result = null;
InputStream is = null;
int i = 0;
if (connected == true)
{
try
{
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost(url_retrieve_images);
HttpResponse response = httpclient.execute(httppost);
HttpEntity entity = response.getEntity();
is = entity.getContent();
Log.e(TAG, "Connection successful");
// Toast.makeText(getApplicationContext(), “pass”, Toast.LENGTH_SHORT).show();
}
catch (Exception e)
{
Log.e(TAG, "Error in HTTP connection" + e.toString());
//Toast.makeText(getApplicationContext(), "Connection fail", Toast.LENGTH_SHORT).show();
}
//convert response to string
try
{
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
// Toast.makeText(getApplicationContext(), “Input Reading pass”, Toast.LENGTH_SHORT).show();
}
is.close();
result = sb.toString();
}
catch (Exception e)
{
Log.e(TAG, "Error converting result" + e.toString());
//Toast.makeText(getApplicationContext(), "Input reading fail", Toast.LENGTH_SHORT).show();
}
//parse json data
ArrayList<Images> imgs = new ArrayList<Images>();
try
{
JSONArray jArray = new JSONArray(result);
for (i = 0; i < jArray.length(); i++)
{
// Obtain images from MySQL database, stream them as Bitmap, and convert them to Mat.
JSONObject json_data = jArray.getJSONObject(i);
Images images = new Images();
images.setImage_Name(json_data.getString("image_name"));
imgs.add(images);
String ImgLoc = "http://duryan-itp.host56.com/uploads/" + images.getImage_Name();
//Bitmap referenceImg = BitmapFactory.decodeStream(new java.net.URL(ImgLoc).openStream());
Mat refImage = new Mat(ImageFromURL(ImgLoc).getWidth(), ImageFromURL(ImgLoc).getHeight(), CvType.CV_8UC4, new Scalar(4));
Log.i(TAG, "Successfully converted " + ImgLoc + " to Mat()");
//Pre-process reference image and compute for keypoints
Log.e(TAG, "Reference image channels: " + refImage.channels());
Imgproc.cvtColor(refImage, refImage, Imgproc.COLOR_RGB2GRAY, 4);
MatOfKeyPoint refKeyPoints = createDetector(refImage);
Mat refDescriptors = extractDescription(refKeyPoints, refImage);
MatOfPoint2f refPoint2f = new MatOfPoint2f();
//Features2d.drawKeypoints(refImage, refKeyPoints, refImage);
// Apply brute-force matching
Log.i(TAG, "Image matching in progress...\n" + "Matching image with template: " + images.getImage_Name());
Log.e(TAG, srcDescriptors.toString());
Log.e(TAG, refDescriptors.toString());
if (srcDescriptors.empty())
{
Log.e(TAG, "Source descriptor array is empty!");
finish();
break;
}
else if (refDescriptors.empty())
{
Log.e(TAG, "Reference descriptor array is empty!");
finish();
break;
}
else if (srcDescriptors.empty() && refDescriptors.empty())
{
Log.e(TAG, "Both descriptor arrays are empty!");
finish();
break;
}
else
{
matcher.match(srcDescriptors, refDescriptors, matches);
//Initialize match list
double max_dist = 0;
double min_dist = 100;
List<DMatch> matchesList = matches.toList();
for (int r = 0; r < refDescriptors.rows(); r++)
{
Double distance = (double) matchesList.get(r).distance;
if (distance < min_dist)
{
min_dist = distance;
}
if (distance > max_dist)
{
max_dist = distance;
}
}
for (int s = 0; s < refDescriptors.rows(); s++)
{
if (matchesList.get(i).distance < 3 * min_dist)
{
listOfGoodMatches.add(matchesList.get(s));
}
}
goodMatches.fromList(listOfGoodMatches);
List<KeyPoint> refObjectListKeypoints = refKeyPoints.toList();
List<KeyPoint> srcObjectListKeypoints = srcKeyPoints.toList();
for (int t = 0; t < listOfGoodMatches.size(); t++)
{
refObjectList.addLast(refObjectListKeypoints.get(listOfGoodMatches.get(t).queryIdx).pt);
srcObjectList.addLast(srcObjectListKeypoints.get(listOfGoodMatches.get(t).trainIdx).pt);
}
refPoint2f.fromList(refObjectList);
srcPoint2f.fromList(srcObjectList);
String ifMatch;
if(listOfGoodMatches.size() > MIN_MATCH_THRESHOLD && listOfGoodMatches.size() < MAX_MATCH_THRESHOLD)
{
ifMatch = "Match found! Determining severity...";
}
else
{
ifMatch = "No match found!";
}
Log.i(TAG, ifMatch);
}
plsWait.dismiss();
finish();
}
}
catch (JSONException e)
{
Log.e(TAG, "JSON error occurred!");
finish();
}
catch (IOException e)
{
Log.e(TAG, "IOException error occurred!");
finish();
}
catch (CvException e)
{
Log.e(TAG, "An error has occurred during image processing!");
finish();
}
}
else
{
runOnUiThread(new Runnable()
{
public void run()
{
Toast.makeText(CameraView.this, "Image retrieval failed. Check your Internet connection!" , Toast.LENGTH_LONG).show();
Log.e(TAG, "Image retrieval failed. Check your Internet connection!");
}
});
finish();
}
}
public MatOfKeyPoint createDetector (Mat matrix)
{
FeatureDetector detect = FeatureDetector.create(FeatureDetector.SIFT);
MatOfKeyPoint keypoints = new MatOfKeyPoint();
detect.detect(matrix, keypoints);
return keypoints;
}
public Mat extractDescription (MatOfKeyPoint keypoint, Mat matrix)
{
DescriptorExtractor descExtractor = DescriptorExtractor.create(FeatureDetector.SURF);
Mat descriptors = new Mat();
descExtractor.compute(matrix, keypoint, descriptors);
return descriptors;
}
现在主要的问题是,当我对源和参考Mat对象的描述符进行错误检查时,它们都返回空!此外,应用程序崩溃时出现以下错误(来自LogCat):
06-02 20:08:02.319: E/Dur-Yan!(3891): Source image channels: 4
06-02 20:08:02.340: E/GED(3891): Failed to get GED Log Buf, err(0)
06-02 20:08:06.014: E/Dur-Yan!(3891): Connection successful
06-02 20:08:15.721: E/Dur-Yan!(3891): Reference image channels: 4
06-02 20:08:16.381: E/Dur-Yan!(3891): Mat [ 0*0*CV_8UC1, isCont=false, isSubmat=false, nativeObj=0xffffffffaf848548, dataAddr=0x0 ]
06-02 20:08:16.381: E/Dur-Yan!(3891): Mat [ 0*0*CV_8UC1, isCont=false, isSubmat=false, nativeObj=0xffffffffaf848628, dataAddr=0x0 ]
06-02 20:08:16.382: E/Dur-Yan!(3891): Source descriptor array is empty!
06-02 20:08:16.411: E/cv::error()(3891): OpenCV Error: Assertion failed (src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols) in void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean), file /hdd2/buildbot/slaves/slave_ardbeg1/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp, line 97
06-02 20:08:16.415: E/org.opencv.android.Utils(3891): nMatToBitmap catched cv::Exception: /hdd2/buildbot/slaves/slave_ardbeg1/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp:97: error: (-215) src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols in function void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean)
06-02 20:08:16.421: E/AndroidRuntime(3891): FATAL EXCEPTION: main
06-02 20:08:16.421: E/AndroidRuntime(3891): Process: com.yurinymous.android.duryan, PID: 3891
06-02 20:08:16.421: E/AndroidRuntime(3891): CvException [org.opencv.core.CvException: /hdd2/buildbot/slaves/slave_ardbeg1/50-SDK/opencv/modules/java/generator/src/cpp/utils.cpp:97: error: (-215) src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols in function void Java_org_opencv_android_Utils_nMatToBitmap2(JNIEnv*, jclass, jlong, jobject, jboolean)
06-02 20:08:16.421: E/AndroidRuntime(3891): ]
06-02 20:08:16.421: E/AndroidRuntime(3891): at org.opencv.android.Utils.nMatToBitmap2(Native Method)
06-02 20:08:16.421: E/AndroidRuntime(3891): at org.opencv.android.Utils.matToBitmap(Utils.java:123)
06-02 20:08:16.421: E/AndroidRuntime(3891): at org.opencv.android.Utils.matToBitmap(Utils.java:132)
06-02 20:08:16.421: E/AndroidRuntime(3891): at com.yurinymous.android.duryan.CameraView$1ProcessImage$1.run(CameraView.java:613)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.app.Activity.runOnUiThread(Activity.java:5384)
06-02 20:08:16.421: E/AndroidRuntime(3891): at com.yurinymous.android.duryan.CameraView$1ProcessImage.onPostExecute(CameraView.java:609)
06-02 20:08:16.421: E/AndroidRuntime(3891): at com.yurinymous.android.duryan.CameraView$1ProcessImage.onPostExecute(CameraView.java:1)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.os.AsyncTask.finish(AsyncTask.java:636)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.os.AsyncTask.access$500(AsyncTask.java:177)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.os.AsyncTask$InternalHandler.handleMessage(AsyncTask.java:653)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.os.Handler.dispatchMessage(Handler.java:111)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.os.Looper.loop(Looper.java:194)
06-02 20:08:16.421: E/AndroidRuntime(3891): at android.app.ActivityThread.main(ActivityThread.java:5637)
06-02 20:08:16.421: E/AndroidRuntime(3891): at java.lang.reflect.Method.invoke(Native Method)
06-02 20:08:16.421: E/AndroidRuntime(3891): at java.lang.reflect.Method.invoke(Method.java:372)
06-02 20:08:16.421: E/AndroidRuntime(3891): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:959)
06-02 20:08:16.421: E/AndroidRuntime(3891): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:754)
我想知道我在这里做错了什么。我希望你们能帮我解决这个问题。顺便说一下,我是OpenCV的新手,所以无论你有什么见解都会对我完成这个项目有很大的帮助。
P.S。:在我毕业之前,这个Android项目是我的最终要求。谢谢你,更有力量!