使用OpenCV投影点

时间:2017-10-18 08:20:40

标签: c++ opencv computer-vision

我试图:

  1. 对世界坐标中的几个点进行射线平面交叉
  2. 然后我得到那些相交的点
  3. 然后我尝试将这些点从世界坐标投影到图像坐标
  4. 但我得到的分数在0.4,0.1,0.5 ......范围内。

    以下是我正在做的事情,希望您发现错误

          Mat cameraIntrinsics( 3, 3, CV_32F );
    
          cameraIntrinsics.at<float>( 0, 0 ) = 1.6003814935684204;
          cameraIntrinsics.at<float>( 0, 1 ) = 0;
          cameraIntrinsics.at<float>( 0, 2 ) = -0.0021958351135253906;
          cameraIntrinsics.at<float>( 1, 0 ) = 0;
          cameraIntrinsics.at<float>( 1, 1 ) = 1.6003814935684204;
          cameraIntrinsics.at<float>( 1, 2 ) = -0.0044271680526435375;
          cameraIntrinsics.at<float>( 2, 0 ) = 0;
          cameraIntrinsics.at<float>( 2, 1 ) = 0;
          cameraIntrinsics.at<float>( 2, 2 ) = 1;
    
    
          Mat invCameraIntrinsics = cameraIntrinsics.inv();
    
          std::vector<cv::Point3f> points3D;
          std::vector<Ray> rays;
          for ( int i = 0; i < corners.size(); i++ )
          {
            cv::Point3f pt;
    
            pt.z = -1.0f;
    
            pt.x = corners[i].x;
            pt.y = corners[i].y;
    
            points3D.push_back( pt );
    
            Ray ray;
    
            ray.origin = Vec3f( 0, 0, 0);
            ray.direction = Vec3f( pt.x, pt.y, pt.z );
    
            rays.push_back( ray );
          }
    
          std::vector<cv::Point3f> pointsTransformed3D;
    
    
          cv::transform( points3D, pointsTransformed3D, invCameraIntrinsics );
    
    
          std::vector<cv::Vec3f> contacts;
    
          for ( int i = 0; i < pointsTransformed3D.size(); i++ )
          {
            Vec3f pt( pointsTransformed3D[i].x, pointsTransformed3D[i].y, pointsTransformed3D[i].z );
    
            cv::Vec3f contact;
            std::pair<bool, double> test = linePlaneIntersection( contact, rays[i].direction, rays[i].origin, Vec3f( 0, 1, 0 ), pt );
            if (test.first == true )
            {
              cv::Vec3f contact( rays[i].origin + ( rays[i].direction) * test.second);
              contacts.push_back( contact );
            }
          }
    
    
          Mat rotationMatrix( 3, 3, CV_32F );
    
          rotationMatrix.at<float>( 0, 0 ) = 0.9115078799790896;
          rotationMatrix.at<float>( 0, 1 ) = -0.1883612409043686;
          rotationMatrix.at<float>( 0, 2 ) = -0.3656137684237178;
          rotationMatrix.at<float>( 1, 0 ) = -0.3046835686704949;
          rotationMatrix.at<float>( 1, 1 ) = 0.2878667580409447;
          rotationMatrix.at<float>( 1, 2 ) = -0.9079100465339108;
          rotationMatrix.at<float>( 2, 0 ) = 0.2762631132059388;
          rotationMatrix.at<float>( 2, 1 ) = 0.9389636694462479;
          rotationMatrix.at<float>( 2, 2 ) = 0.2050022432604093;
    
          cv::Mat rVec( 3, 1, CV_32F ); // Rotation vector
          Rodrigues( rotationMatrix, rVec );
          double norm = cv::norm( rVec );
    
          float theta = (float)(sqrt( rVec.at<float>(0)*rVec.at<float>( 0 ) + rVec.at<float>( 1 )*rVec.at<float>( 1 ) + rVec.at<float>( 2 )*rVec.at<float>( 2 ) ) * 180 / 3.14567898726);
    
          cv::Mat tVec( 3, 1, CV_32F ); // Translation vector
          tVec.at<float>( 0 ) = 21.408294677734375;
          tVec.at<float>( 1 ) = 531.1319580078125;
          tVec.at<float>( 2 ) = 705.74224853515625;
    
          cv::Mat distCoeffs( 5, 1, CV_32F );   // Distortion vector
          distCoeffs.at<float>( 0 ) = 0;
          distCoeffs.at<float>( 1 ) = 0;
          distCoeffs.at<float>( 2 ) = 0;
          distCoeffs.at<float>( 3 ) = 0;
          distCoeffs.at<float>( 4 ) = 0;
    
          std::vector<cv::Point2d> projectedPoints;
          std::vector < cv::Point3d> ContactPoints;
    
          for ( int i = 0; i < contacts.size(); i++ )
          {
            cv::Point3d pt;
    
            pt.x = contacts[i][0];
            pt.y = contacts[i][1];
            pt.z = contacts[i][2];
    
            ContactPoints.push_back( pt );
          }
    
    
          cv::projectPoints( ContactPoints, rVec, tVec, cameraIntrinsics, distCoeffs, projectedPoints );
    
          for ( size_t i = 0; i < projectedPoints.size(); i++ )
          {
            cv::Point2d pt;
    
            pt.x = projectedPoints[i].x;
            pt.y = projectedPoints[i].y;
    
            cv::circle( src, pt, 10, cv::Scalar( 255, 0, 255 ), -1 );
          }
    
          imshow( "My window", src );
        }
      }
    
    cv:waitKey( 0 );
      return 0;
    }
    

1 个答案:

答案 0 :(得分:0)

你的内在参数太小了:对于物理准针孔相机来说,1.6像素的焦距是无意义的。