如何从特定区域opmecv android获取关键点

时间:2016-07-19 19:47:40

标签: android image-processing opencv3.0 opencv4android opticalflow

我想构建像this这样的应用程序。我想从这里绘制的矩形中绘制关键点。我怎样才能实现这一目标?我想检测那个红色圆圈中的物体,请帮我实现。 这是我的代码`。

公共类MainActivity扩展Activity实现CvCameraViewListener2 {

private MatOfPoint features;
private Mat                  mIntermediateMat;
private Mat                  mRgba;
private Mat                  mGray;
private org.opencv.core.Size                 wsize;
Mat                          descriptors ;
FeatureDetector              featureDetector;
MatOfKeyPoint                keyPoints;
DescriptorExtractor          descriptorExtractor;
DescriptorMatcher            descriptorMatcher;

protected static final String TAG = null;
private BaseLoaderCallback mLoaderCallBack = new BaseLoaderCallback(this){

     public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS: {
                    Log.i(TAG, "OpenCV Loaded Successfully");
                    mOpenCvCameraView.enableView();
                    break;
                }
                default: {
                    super.onManagerConnected(status);
                }
            }
        }
};
private JavaCameraView mOpenCvCameraView;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_main);

    mOpenCvCameraView = (JavaCameraView) findViewById(R.id.main_activity_surface_view);
    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
    mOpenCvCameraView.setCvCameraViewListener(this);
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.main, menu);
    return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    int id = item.getItemId();
    if (id == R.id.action_settings) {
        return true;
    }
    return super.onOptionsItemSelected(item);
}

public void onResume(){
    super.onResume();
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallBack);
}

public void onDestroy() {
    super.onDestroy();
    if (mOpenCvCameraView != null) {
        mOpenCvCameraView.disableView();
    }
}

@Override
public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
    mGray = new Mat(height, width, CvType.CV_8UC1);
    featureDetector=FeatureDetector.create(FeatureDetector.DYNAMIC_ORB);
    descriptorExtractor=DescriptorExtractor.create(DescriptorExtractor.ORB);
    descriptorMatcher= DescriptorMatcher.create(6);
    keyPoints = new MatOfKeyPoint();
    descriptors = new Mat();
    mIntermediateMat = new Mat();
    features = new MatOfPoint();
}

@Override
public void onCameraViewStopped() {


}

@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    //mGray = inputFrame.gray();
    Mat mZoomWindow = mRgba.submat(140,320,100,380);
    Imgproc.resize(mZoomWindow, mRgba, mRgba.size());
    wsize = mZoomWindow.size();
    Scalar color = new Scalar(255);
    Imgproc.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), color, 2);
    //Mat mask = new Mat(mZoomWindow.size(), CvType.CV_8U);
    Mat mask = Mat.zeros(mRgba.size(), CvType.CV_8UC1);
    /*boolean use_harris = true;
    Imgproc.cvtColor(mRgba, mRgba, Imgproc.COLOR_RGBA2GRAY, 4);
    Imgproc.goodFeaturesToTrack(mRgba, features ,100, 0.01, 10, mask , 8, use_harris , 0.06);
    List<Point> drawFeature = features.toList();
    for (int i = 0; i < drawFeature.size(); i++){
        Point p = drawFeature.get(i);
        Imgproc.circle(mRgba, p, 5, new Scalar(0,255,0));
    }
    //Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
    //mIntermediateMat.convertTo(mRgba, CvType.CV_8UC4, 1); */
    Imgproc.cvtColor(mRgba, mRgba , Imgproc.COLOR_RGBA2RGB, 4);
    featureDetector.detect(mRgba, keyPoints);
    Features2d.drawKeypoints(mRgba, keyPoints, mask);
            return mRgba;
}

} `

1 个答案:

答案 0 :(得分:0)

可能我现在太晚了,很可能你已经做到了。但根据我的理解,您需要逐帧处理视频。因此,当您获得一个框架时,您可以裁剪它以仅获得标记矩形区域的Mat对象。然后你可以检测裁剪图像中的特征点,并将它们与道路上给出红点的物体的特征点相匹配(你可以在路上找到那个东西的局部图片,为你提供它的特征点)顺便问一下,那是什么东西?)。

但这是使用KeyPoints在矩形内进行跟踪。如果您想要光流,再次检测点并存储它们,然后在裁剪后的图像中使用光流算法。