如何检测物体并从深度图计算实际距离?

时间:2015-10-23 21:03:56

标签: java opencv

我有问题。我想从两个网络摄像头计算实际对象大小,所以我测量视差图,做reprojectTo3d并得到Mat 3dImage ...我怎么能检测到未知对象?我需要从视差或3dImage中做到这一点?或者我需要在摄像机的2d图像中检测到它。如何检测物体? 谢谢...对不起我的英文) 代码:

double StereoCalibrateAndRectify()
{
    //Start Stereo Calibration.
    double err = Calib3d.stereoCalibrate(objectPoints, imagePoints, imagePoints2, 
            CM1, D1, CM2, D2, vImg.get(0).size(), R, T, E, F,
            new TermCriteria(TermCriteria.MAX_ITER+TermCriteria.EPS, 30, 1e-5), 
            //Calib3d.CALIB_SAME_FOCAL_LENGTH | Calib3d.CALIB_ZERO_TANGENT_DIST
            Calib3d.CALIB_FIX_FOCAL_LENGTH | Calib3d.CALIB_FIX_PRINCIPAL_POINT);
        /*
        CM1 - Camera Matrix of first camera.
        CM2 - Camera Matrix of second camera.
        D1 - Distortion coeff matrix of first camera.
        D2 - Distortion coeff matrix of second camera.
        R - Rotation Matrix between first and second camera coordinate systems.
        T - Translation vector between the coordinate systems of the cameras.
        E - Essential matrix.
        F - Fundamental matrix.*/

        //Start Stereo Rectification.
    System.out.println("done, \nerrReproj = " + err);
    Calib3d.stereoRectify(CM1, D1, CM2, D2, vImg.get(0).size(), R, T, R1, R2, P1, P2, Q,
            Calib3d.CALIB_ZERO_DISPARITY,-1,new Size(0,0),ROI1,ROI2);
        /*
        R1 - 3x3 rectification transform (rotation matrix) for the first camera.
        R2 - 3x3 rectification transform (rotation matrix) for the second camera.
        P1 - 3x4 projection matrix in the new (rectified) coordinate systems for the first camera.
        P2 - 3x4 projection matrix in the new (rectified) coordinate systems for the second camera.
        Q – 4x4 disparity-to-depth mapping matrix.*/
    objectPoints = new ArrayList(); 
    imagePoints=new ArrayList();
    imagePoints2=new ArrayList();
    return err;
}
    void Undistort(Mat img1, Mat img2,Mat img1out, Mat img2out)
{   
    Imgproc.undistort(img1, img1out, CM1, D1);
    Imgproc.undistort(img2, img2out, CM2, D2);

}
public Mat startSGBM(Mat undistortedLeft, Mat undistortedRight, int minDisparity, int numDisparities, int SADWindowSize, int preFilterCap, int uniquenessRatio,
        int disp12MaxDiff, int P1, int P2,int speckleRange, int speckleWindowSize)
{
    Mat disparity = new Mat();
    Mat disparity8 = new Mat();
    minDisparity = minDisparity*8;
    numDisparities = numDisparities*16;
    SADWindowSize =SADWindowSize*8;
    preFilterCap = preFilterCap*8;
    P1=P1*100;
    P2 =P2*110;
    speckleWindowSize = speckleWindowSize*10;
    StereoSGBM stereo = new StereoSGBM();

    stereo.set_SADWindowSize(SADWindowSize );
    stereo.set_minDisparity(minDisparity);
    stereo.set_numberOfDisparities(numDisparities);
    stereo.set_preFilterCap(preFilterCap);
    stereo.set_uniquenessRatio(uniquenessRatio);
    stereo.set_disp12MaxDiff(disp12MaxDiff);
    stereo.set_P1(P1);
    stereo.set_P2(P2);
    stereo.set_speckleRange(speckleRange);
    stereo.set_speckleWindowSize(speckleWindowSize);

    stereo.compute(left, right, disparity);
    //disparity.convertTo(disparity8, CvType.CV_8U, 255/(numDisparities));
    Core.normalize(disparity, disparity8, 0, 255, Core.NORM_MINMAX, CvType.CV_8U);
    return disparity8;
}
    void Calculation3D(Mat disparity, Mat _3dImage,ArrayList<Rect> objects)
{
    Mat perspetive = new Mat();
    ArrayList<Point3> pts = new ArrayList<Point3>();
    ArrayList<Point> pts2 = new ArrayList<Point>();
    Calib3d.reprojectImageTo3D(disparity, _3dImage, Q,true);
}
    public static ArrayList<Rect> FindObj(Mat image, Mat frameForDrawing, int lowerbX, int lowerbY, int lowerbZ, int upperbX,
        int upperbY, int upperbZ) {
    Image img = toBufferedImage(image);
    Mat negative = new Mat();
    ArrayList<Rect> objects = new ArrayList<Rect>();
    if (image.type() != 0)
        Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2HSV);
    Imgproc.GaussianBlur(image, image, new Size(21, 21), 0);
    Mat bw = new Mat();
    Core.inRange(image, new Scalar(lowerbX, lowerbY, lowerbZ), new Scalar(upperbX, upperbY, upperbZ), image);
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    negative = negative(image);
    Imgproc.findContours(negative, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    image = negative.clone();
    for (int i = 0; i < contours.size(); i++) {
        if (Imgproc.contourArea(contours.get(i)) > 50) {
            Rect rect = Imgproc.boundingRect(contours.get(i));
            if (rect.height <= img.getHeight(null) - 100) {
                if (rect.height > 150 && rect.height != img.getHeight(null)) {
                    System.out.println(rect.height + rect.width);
                    Core.rectangle(frameForDrawing, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
                    Core.putText(frameForDrawing, Integer.toString(rect.height + rect.width),
                            new Point(rect.x, rect.y), Core.FONT_HERSHEY_COMPLEX, 1.0, new Scalar(0, 0, 255));
                    objects.add(rect);
                }
            }
        }
    }
    return objects;
}

我按照主要的呼叫顺序显示功能......

0 个答案:

没有答案