环境
手头的任务
解决图像A和图像A之间的相对相机姿势。期望图像B(上图)在Z轴上产生诱导25度旋转
实施
选项A:
选项B:
结果
以上两个选项都无法正确恢复相对相机姿势(预计在Z轴上旋转25度)
任何帮助都将不胜感激。
完整代码
#define RAD2DEG(rad) (((rad) * 180)/M_PI)
#define DEG2RAD(deg) (((deg) * M_PI)/180)
#define FOV2FOCAL(pixelssensorsize, fov) ((pixelssensorsize) / (2 * tan((fov) / 2)))// http://books.google.co.il/books?id=bXzAlkODwa8C&pg=PA48&lpg=PA48&dq=expressing+focal+length+in+pixels&source=bl&ots=gY4972kxAC&sig=U1BUeNHhOHmYIrDrO0YDb1DrNng&hl=en&sa=X&ei=45dLU9u9DIyv7QbN2oGIDA&ved=0CGsQ6AEwCA#v=onepage&q=expressing%20focal%20length%20in%20pixels&f=false
// http://nghiaho.com/?page_id=846
void DecomposeRotation(IN const cv::Mat& R, OUT float& fX, OUT float& fY, OUT float& fZ) {// Taken from MatLab
fX = (float)atan2(R.at<double>(2, 1), R.at<double>(2, 2));
fY = (float)atan2(-R.at<double>(2, 0), sqrt(R.at<double>(2, 1)*R.at<double>(2, 1) + R.at<double>(2, 2)*R.at<double>(2, 2)));
fZ = (float)atan2(R.at<double>(1, 0), R.at<double>(0, 0));
}
int _tmain(int argc, _TCHAR* argv[])
{
// 25 deg rotation in the Z axis (800x600)
const cv::Point2f rotZ0[] = { { 109, 250 }, { 175, 266 }, { 204, 279 }, { 221, 253 }, { 324, 281 }, { 312, 319 }, { 328, 352 }, { 322, 365 } };
const cv::Point2f rotZ25[] = { { 510, 234 }, { 569, 622 }, { 593, 278 }, { 616, 257 }, { 716, 303 }, { 698, 340 }, { 707, 377 }, { 697, 390 } };
const cv::Point2f rotZminus15[] = { { 37, 260 }, { 106, 275 }, { 135, 286 }, { 152, 260 }, { 258, 284 }, { 248, 324 }, { 266, 356 }, { 260, 370 } };
const double dFOV = DEG2RAD(45);
const cv::Point2d res(800, 600);
const cv::Point2d pntPriciplePoint(res.x / 2, res.y / 2);
const cv::Point2d pntFocal(FOV2FOCAL(res.x, dFOV), FOV2FOCAL(res.y, dFOV));
//transfer the vector of points to the appropriate opencv matrix structures
const int numPoints = sizeof(rotZ0) / sizeof(rotZ0[0]);
std::vector<cv::Point2f> vecPnt1(numPoints);
std::vector<cv::Point2f> vecPnt2(numPoints);
for (int i = 0; i < numPoints; i++) {
vecPnt2[i] = rotZ0[i];
//vecPnt2[i] = rotZminus15[i];
vecPnt1[i] = rotZ25[i];
}
//// Normalize points
//for (int i = 0; i < numPoints; i++) {
// vecPnt1[i].x = (vecPnt1[i].x - pntPriciplePoint.x) / pntFocal.x;
// vecPnt1[i].y = (vecPnt1[i].y - pntPriciplePoint.y) / pntFocal.y;
// vecPnt2[i].x = (vecPnt2[i].x - pntPriciplePoint.x) / pntFocal.x;
// vecPnt2[i].y = (vecPnt2[i].y - pntPriciplePoint.y) / pntFocal.y;
//}
try {
// http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html
cv::Mat matK = cv::Mat::zeros(3, 3, CV_64F);
matK.at<double>(0, 0) = pntFocal.x;
matK.at<double>(1, 1) = pntFocal.y;
matK.at<double>(0, 2) = pntPriciplePoint.x;
matK.at<double>(1, 2) = pntPriciplePoint.y;
matK.at<double>(2, 2) = 1;
float x, y, z;
cv::Mat R1, R2, R3, R4;
cv::Mat t;
cv::Mat matE;
#if 1 // Option [A]
cv::Mat matF = cv::findFundamentalMat(vecPnt1, vecPnt2);
matE = matK.t() * matF * matK; // http://en.wikipedia.org/wiki/Essential_matrix
cv::Mat _tmp;
cv::Mat U;
cv::Mat Vt;
cv::SVD::compute(matE, _tmp, U, Vt);
cv::Matx33d W(0, -1, 0,
1, 0, 0,
0, 0, 1);
R1 = U*cv::Mat(W)*Vt; // See http://stackoverflow.com/questions/14150152/extract-translation-and-rotation-from-fundamental-matrix for details
R2 = U*cv::Mat(W)*Vt.t();
R3 = U*cv::Mat(W).t()*Vt;
R4 = U*cv::Mat(W).t()*Vt.t();
#else // Option [B]
matE = cv::findEssentialMat(vecPnt1, vecPnt2, pntFocal.x, pntPriciplePoint);// http://docs.opencv.org/trunk/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html
cv::decomposeEssentialMat(matE, R1, R2, t);
int iInliers = cv::recoverPose(matE, vecPnt1, vecPnt2, R4, t);// , pntFocal.x, pntPriciplePoint);
R3 = cv::Mat::zeros(3, 3, CV_64F);
#endif
DecomposeRotation(R1, x, y, z);
std::cout << "Euler Angles R1 (X,Y,Z): " << RAD2DEG(x) << ", " << RAD2DEG(y) << ", " << RAD2DEG(z) << std::endl;
DecomposeRotation(R2, x, y, z);
std::cout << " R2 (X,Y,Z): " << RAD2DEG(x) << ", " << RAD2DEG(y) << ", " << RAD2DEG(z) << std::endl;
DecomposeRotation(R3, x, y, z);
std::cout << " R3 (X,Y,Z): " << RAD2DEG(x) << ", " << RAD2DEG(y) << ", " << RAD2DEG(z) << std::endl;
DecomposeRotation(R4, x, y, z);
std::cout << " R4 (X,Y,Z): " << RAD2DEG(x) << ", " << RAD2DEG(y) << ", " << RAD2DEG(z) << std::endl;
//cv::Mat res = matFrom.t() * matF * matTo;// Results in a null vector ( as it should ) http://en.wikipedia.org/wiki/Fundamental_matrix_(computer_vision)
//res = matFrom.t() * matE * matTo;// Results in a null vector ( as it should )
}
catch (cv::Exception e) {
_ASSERT(FALSE);
}
return 0;
}
执行结果
选项A:
Euler Angles R1 (X,Y,Z): -26.2625, 8.70029, 163.643
R2 (X,Y,Z): 16.6929, -29.9901, -3.81642
R3 (X,Y,Z): 5.59033, -20.841, -19.9316
R4 (X,Y,Z): -5.76906, 7.25413, -179.086
选项B:
Euler Angles R1 (X,Y,Z): -13.8355, 3.0098, 171.451
R2 (X,Y,Z): 2.22802, -22.3479, -11.332
R3 (X,Y,Z): 0, -0, 0
R4 (X,Y,Z): 2.22802, -22.3479, -11.332
答案 0 :(得分:2)
首先,校准相机而不是使用预定义的值。它总是会产生很大的影响。 由8-pt或5-pt计算的相对姿势会受到很多噪音的影响,绝不意味着最终的结论。话虽如此,重建点然后捆绑调整整个场景是个好主意。优化你的外在性,你应该想出一个更好的姿势。
答案 1 :(得分:0)
我认为你的结果都不等于90度是因为坐标系。将摄像机绕z轴旋转90度的坐标系由您自己定义。您可以将相机姿势想象为指向场景的矢量,并且从基本矩阵分解的R矩阵表示矢量的旋转。结果是在另一个坐标系中。您可以通过检查两个摄像机矢量是否具有90度的矢量角来检查我的假设。