从图像计算实际x-y坐标

时间:2017-10-20 13:31:00

标签: opencv computer-vision camera-calibration pose-estimation

我正在尝试从机器人项目中使用的图像中获取x-y真实坐标,经过对互联网的大量研究后我发现可以在SO网站上发布并发布它运行良好。我刚用过这个可以但是替换了摄像头参数,这里的四点坐标是代码   {

#include "opencv2/opencv.hpp"
#include <stdio.h>
#include <iostream>
#include <sstream>
#include <math.h>

using namespace cv;
using namespace std;

vector<Point2f> imagePoints;
vector<Point3f> objectPoints;

int main()
{
imagePoints.push_back(Point2f(150.,180.));
imagePoints.push_back(Point2f(30.,365.));
imagePoints.push_back(Point2f(604.,365.));
imagePoints.push_back(Point2f(465.,180.));

objectPoints.push_back(Point3f(0., 0., 0.)); //distance in millimeters
objectPoints.push_back(Point3f(0.,320.,0.));
objectPoints.push_back(Point3f(320.,320.,0.));
objectPoints.push_back(Point3f(320.,0.,0.));

Mat cameraMatrix =(Mat_<double>(3,3) <<6.5409089313379638e+02, 0., 320., 0., 6.5409089313379638e+02, 240., 0., 0.,
    1.);

Mat distCoeffs =(Mat_<double>(5,1) <<8.1771217385200712e-02, -2.5852666755450442e+00, 0., 0.,
    1.0382795831648963e+01);

//Mat rvec(1,3,DataType::<double>::type);
Mat rvec(Mat_<double>(1,3));
Mat tvec(Mat_<double>(1,3));
Mat rotationMatrix((Mat_<double>(3,3)));

solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);
Rodrigues(rvec,rotationMatrix);
Mat ones(Mat_<double>(3,1));
Mat uvPoint = ones; //u,v,1
uvPoint.at<double>(0,0) = 150.; 
uvPoint.at<double>(1,0) = 180.;
Mat tempMat, tempMat2;
double s;
tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;
tempMat2 = rotationMatrix.inv() * tvec;

s = 0 + tempMat2.at<double>(2,0); 
s /= tempMat.at<double>(2,0);
cout << "P = " << rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec) << endl;
return 0;
}

问题是,当我尝试用知识点(150,180)测试它时,它应该给出原点(0,0,0),而不是我得到P = [340.5995534946562;  730.955008122653;  -5.684341886080801e-14] 任何有关此问题的帮助,因为我需要解决此问题才能构建我的系统。

0 个答案:

没有答案