openCV triangulatePoints

时间:2015-03-10 18:41:41

标签: opencv point-cloud-library

首先感谢阅读。

根据openCV函数提供的信息,我在使用PCL生成点云时遇到了问题。

  1. 我正在使用两个图像,该功能识别出几个关键点。
  2. 然后我进行匹配并使用RANSAC算法计算基本功能。
  3. 然后我打印每张图片中的点以查看相关点,并且我有几个匹配良好的点。
  4. 现在我正在尝试生成点云以重新投影这些点,因为下一步是制作一个具有两个以上图像的更大点云......通过2d信息进行三维重建。
  5. 我的问题是我无法填充云,因为这些点位于奇怪的位置,并且所有点似乎都非常接近......我正在使用的代码有问题吗?
  6. 下面的函数和我正在使用的矩阵:

    调用三角函数:

    TriangulatePoints(keypoints1, keypoints2, K.t(), P, P1, pointCloud)
    

    PopulateTheCloud

    PopulatePCLPointCloud(pointCloud);
    

    填充功能:

    void PopulatePCLPointCloud(const vector<Point3d>& pointcloud) //Populate point cloud
    {
        cout << "Creating point cloud...";
        cloud.reset(new pcl::PointCloud<pcl::PointXYZRGB>);
        for (unsigned int i = 0; i<pointcloud.size(); i++)
        {
            // get the RGB color value for the point
            Vec3b rgbv(255,255, 0);
            // check for erroneous coordinates (NaN, Inf, etc.)
            if (pointcloud[i].x != pointcloud[i].x || _isnan(pointcloud[i].x) || pointcloud[i].y != pointcloud[i].y || _isnan(pointcloud[i].y) || pointcloud[i].z != pointcloud[i].z || _isnan(pointcloud[i].z) || fabsf(pointcloud[i].x) > 10.0 || fabsf(pointcloud[i].y) > 10.0 || fabsf(pointcloud[i].z) > 10.0)
            {
                continue;
            }
            pcl::PointXYZRGB pclp;
            // 3D coordinates
            pclp.x = pointcloud[i].x;
            pclp.y = pointcloud[i].y;
            pclp.z = pointcloud[i].z;
            // RGB color, needs to be represented as an integer uint32_t
            float rgb = ((uint32_t)rgbv[2] << 16 | (uint32_t)rgbv[1] << 8 | (uint32_t)rgbv[0]);
            pclp.rgb = *reinterpret_cast<float*>(&rgb);
            cloud->push_back(pclp);
        }
        cloud->width = (uint32_t)cloud->points.size();
        // number of points
        cloud->height = 1;
        // a list of points, one row of data
    }
    

    使用3d点填充云的功能(我评论了reproj_error原因从masterinOpenCV复制了这段代码,但没有用。

    double TriangulatePoints(const vector<KeyPoint>& pt_set1, const vector<KeyPoint>& pt_set2, const Mat&Kinv, const Matx34d& P, const Matx34d& P1, vector<Point3d>& pointcloud) {
        vector<double> reproj_error;
    for (unsigned int i = 0; i<min(pt_set1.size(), pt_set2.size()); i++) {    //convert to normalized homogeneous coordinates   
    
        Point2f kp = pt_set1[i].pt;
        Point3d u(kp.x, kp.y, 1.0);
        Mat_<double> um = Kinv * Mat_<double>(u);
        u = (Point3d)um(0, 0);
    
        Point2f kp1 = pt_set2[i].pt;
        Point3d u1(kp1.x, kp1.y, 1.0);
        Mat_<double> um1 = Kinv * Mat_<double>(u1);
        u1 = (Point3d)um1(0, 0);
    
        //triangulate
        Mat_<double> X = LinearLSTriangulation(u, P, u1, P1);
    
        /*Mat_<double> xPt_img = Kinv.t() * Mat(P1) * X;
        Point2f xPt_img_(xPt_img(0)/xPt_img(2),xPt_img(1)/xPt_img(2));
    
        //calculate reprojection error
        reproj_error.push_back(norm(xPt_img_-kp1));    //store 3D point  */
    
        //carga la nube de puntos
        pointcloud.push_back(Point3d(X(0), X(1), X(2)));
    } //return mean reprojection error 
    /*Scalar me = mean(reproj_error);
    return me[0]; */
    
    return 0;
    

    }

    线性三角测量:

    Mat_<double> LinearLSTriangulation(Point3d u,//homogenous image point (u,v,1) 
        Matx34d P,//camera 1 matrix 
        Point3d u1,//homogenous image point in 2nd camera 
        Matx34d P1//camera 2 matrix 
        ) {
        //build A matrix 
        Matx43d A(u.x*P(2, 0) - P(0, 0), u.x*P(2, 1) - P(0, 1), u.x*P(2, 2) - P(0, 2), u.y*P(2, 0) - P(1, 0), u.y*P(2, 1) - P(1, 1), u.y*P(2, 2) - P(1, 2), u1.x*P1(2, 0) - P1(0, 0), u1.x*P1(2, 1) - P1(0, 1), u1.x*P1(2, 2) - P1(0, 2), u1.y*P1(2, 0) - P1(1, 0), u1.y*P1(2, 1) - P1(1, 1), u1.y*P1(2, 2) - P1(1, 2));
        //build B vector 
        Matx41d B(-(u.x*P(2, 3) - P(0, 3)), -(u.y*P(2, 3) - P(1, 3)), -(u1.x*P1(2, 3) - P1(0, 3)), -(u1.y*P1(2, 3) - P1(1, 3))); //solve for X 
        Mat_<double> X;
        solve(A, B, X, DECOMP_SVD);
        return X;
    }
    

    矩阵:

     Fundamental =
    [-5.365548729323536e-007, 0.0003108718787914248, -0.0457266834161677;
      -0.0003258809500026533, 4.695400741230473e-006, 1.295466303565132;
      0.05008017646011816, -1.300323239531621, 1]
    
    
    
    Calibration Matrix =
     [744.2366711500123, 0, 304.166818982576;
      0, 751.1308610972965, 225.3750058508892;
      0, 0, 1]
    
    Essential =
     [-0.2971914249411831, 173.7833277398352, 17.99033324690517;
      -182.1736856953757, 2.649133690692166, 899.405863948026;
      -17.51073288084396, -904.8934348365967, 0.3895173270497594]
    
    Rotation matrix =
     [-0.9243506387712034, 0.03758098759490174, -0.3796887751496749;
      0.03815782996164848, 0.9992536546828119, 0.006009460513344713;
      -0.379631237671357, 0.008933251056327281, 0.9250947629349537]
    
    Traslation matrix =
     [-0.9818733349058273;
      0.01972152607878091;
      -0.1885094576142884]
    
    P0 matrix =
     [1, 0, 0, 0;
      0, 1, 0, 0;
      0, 0, 1, 0]
    
    P1 matrix =
     [-0.9243506387712034, 0.03758098759490174, -0.3796887751496749, -0.9818733349058273;
      0.03815782996164848, 0.9992536546828119, 0.006009460513344713, 0.01972152607878091;
      -0.379631237671357, 0.008933251056327281, 0.9250947629349537, -0.1885094576142884]
    

1 个答案:

答案 0 :(得分:1)

我解决了这个问题,我有两大问题......

首先,我将未过滤的关键点传递给三角函数,所以我看到了匹配点和非有用点。也许我们会有更多无用而不是有用的点......

因此,您将在三角函数中看到我给出了我用ransacTest和SymTest过滤得到的匹配点。然后只使用关键点匹配的索引。所以一切都很好=)只是显示好匹配。

其次是triangulateFunctions错了。

这里更正了:

  double TriangulatePoints(const vector<KeyPoint>& pt_set1, const vector<KeyPoint>& pt_set2, const Mat&Kinv, const Matx34d& P, const Matx34d& P1, vector<Point3d>& pointcloud, vector<DMatch>& matches)
{
    //Mat_<double> KP1 = Kinv.inv() *Mat(P1);
    vector<double> reproj_error;
    for (unsigned int i = 0; i < matches.size(); i++)
    {    //convert to normalized homogeneous coordinates   
        Point2f kp = pt_set1[matches[i].queryIdx].pt;
        Point3d u(kp.x, kp.y, 1.0);
        Mat_<double> um = Kinv * Mat_<double>(u);
        u.x = um(0);
        u.y = um(1);
        u.z = um(2);
        Point2f kp1 = pt_set2[matches[i].trainIdx].pt;
        Point3d u1(kp1.x, kp1.y, 1.0);
        Mat_<double> um1 = Kinv * Mat_<double>(u1);
        u1.x = um1(0);
        u1.y = um1(1);
        u1.z = um1(2);
        //triangulate
        Mat_<double> X = LinearLSTriangulation(u, P, u1, P1);
        pointcloud.push_back(Point3d(X(0), X(1), X(2)));
    }
    cout << "cantidad Puntos" << pointcloud.size() << endl;
    return 1;
}