使用FAST探测器在Android上实时识别物体

时间:2013-03-11 20:04:57

标签: android opencv

这项工作的目的是从视频场景中的现有图像中提取实时关键点processFrame由于匹配而执行不起作用, 是我可以实时显示图像上圆圈形式的对应关键点

class Sample1View extends SampleViewBase {
    public static final int     VIEW_MODE_RGBA   = 0;
    public static final int  VIEW_MODE_BLUE  = 1; 
    public static final int  VIEW_MODE_YELLOW = 2;
    public static final int  VIEW_MODE_DE = 3;
    private Mat mYuv;
    private Mat mRgba;
    private Mat mGraySubmat;
    private Mat mResult;
    private Mat mIntermediateMat;
 private Bitmap mBitmap;
 private int mViewMode;
 private Mat mColor;
 private Mat mHsv;
 TimingLogger timings;
   private Mat img1;
    private Mat descriptors;
    private MatOfKeyPoint keypoints;
    private FeatureDetector detector;
    private DescriptorExtractor descriptor;
    private DescriptorMatcher matcher;

 private static final String TAG ="Sample::View";

    public Sample1View(Context context) {
        super(context);
        mViewMode = VIEW_MODE_RGBA;

 try {
 img1=Utils.loadResource(getContext(), R.drawable.wings);
 } catch (IOException e) {
 // TODO Auto-generated catch block
 Log.w("Activity::LoadResource","Unable to load resource R.drawable.wings");
 e.printStackTrace();
 }
        descriptors = new Mat();
        keypoints = new MatOfKeyPoint();
        detector = FeatureDetector.create(FeatureDetector.FAST);
        detector.detect(img1, keypoints);
        descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
        descriptor.compute(img1, keypoints, descriptors);
        matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    }

 @Override
 protected void onPreviewStarted(int previewWidth, int previewHeight) {
     Log.i(TAG, "preview Started");
     synchronized (this) {
         mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
         mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
         mRgba = new Mat();
         mIntermediateMat = new Mat();
         mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); 
         mHsv = new Mat();
         mColor = new Mat();
         mResult = new Mat();
         }
 }

 @Override
 protected void onPreviewStopped() {
     Log.i(TAG, "preview Stopped");
     if(mBitmap != null) {
   mBitmap.recycle();
  }

  synchronized (this) {
            // Explicitly deallocate Mats
            if (mYuv != null)
                mYuv.release();
            if (mRgba != null)
                mRgba.release();
            if (mGraySubmat != null)
                mGraySubmat.release();
            if (mIntermediateMat != null)
                mIntermediateMat.release();
            mYuv = null;
            mRgba = null;
            mGraySubmat = null;
            mIntermediateMat = null;
            if (mResult != null)
             mResult.release();
            if (mHsv != null)
             mHsv.release();
            if (mColor != null)
             mColor.release();
            mColor = null;
         mResult = null;
         mHsv = null;
        }
    }

// cvt_YUVtoRGBtoHSV:

    @Override
    protected Bitmap processFrame(byte[] data) {

        mYuv.put(0, 0, data);
        final int viewMode = mViewMode;

                 ColorDetection.cvt_YUVtoRGBtoHSV(mYuv,mGraySubmat);

      MatOfKeyPoint mKeyPoints = new MatOfKeyPoint();
    MatOfDMatch  matches = new MatOfDMatch();
      detector.detect(mGraySubmat, mKeyPoints);
      descriptor.compute(mGraySubmat, mKeyPoints, mIntermediateMat);
      matcher.match(mIntermediateMat, descriptors, matches);
      mIntermediateMat2.create(resultSize, CvType.CV_8UC1);

      Features2d.drawMatches(mGraySubmat, mKeyPoints, mGraySubmat, mKeyPoints, matches, mIntermediateMat2);
      Imgproc.resize(mIntermediateMat2, mIntermediateMat2, mRgba.size());
      Imgproc.cvtColor(mIntermediateMat2, mRgba, Imgproc.COLOR_RGBA2BGRA, 4);
        break;
        }
    Bitmap bmp = mBitmap;

    try {
      Utils.matToBitmap(mRgba, bmp);

    } catch(Exception e) {
        Log.e("org.opencv.samples.*", "Utils.matToBitmap() throws an exception: " + e.getMessage());
        bmp.recycle();
        bmp = null;
    }
    return bmp;
    }

    public void setViewMode(int viewMode) {
     mViewMode = viewMode;
    }
}

以及日志中的内容

CvException [org.opencv.core.CvException:/home/reports/ci/slave/50-SDK/opencv/modules/features2d/src/draw.cpp:207:错误:(-215)i1> = 0&& i1<函数void cv :: drawMatches中的static_cast(keypoints1.size())(const cv :: Mat&,const std :: vector&,const cv :: Mat&,const std :: vector&,const std :: vector& ,cv :: Mat&,const Scalar&,const Scalar&,const std :: vector&,int)

1 个答案:

答案 0 :(得分:2)

这篇文章有点旧,但我还是给出了答案。

您的drawMatches函数的前四个参数不正确。前两个和后两个是相同的 您可以从(mGraySubmat,mKeyPoints,mGraySubmat,mKeyPoints,...)更改drawMatches的前四个参数;
到 (mGraySubmat,mKeyPoints,img1,keypoints,...);

(img1,keypoints,mGraySubmat,mKeyPoints,...);
使用那个适合你的

对于其他读者,如果他们遇到同样的问题,他们可以尝试使用后两个参数更改前两个参数 例:
(image1,keypointsImage1,image2,keypointsImage2,...);

(image2,keypointsImage2,image1,keypointsImage1,...);

对我而言,它修复了我的错误。