我目前正在尝试使用Emgu.CV库中的Fisheye.Calibrate方法和Fisheye.UndistorImage方法。据我所知,“校准”方法用于计算相机矩阵(K)和失真矢量(D),它们将用于使用UndistorImage方法使鱼眼图像失真。但是,当我使用这两种方法时,结果并不令人信服。这是我正在测试的输入图像:fisheye input image,结果是:fisheye output image。
当我尝试通过查看对象的数据变量来查看K和D的值时,它对K和D均表示“空”。因此,我不确定是否正在使用Calibrate( )方法正确。我的代码如下:
private void EmguCVUndistortFisheye()
{
string[] fileNames = Directory.GetFiles(@"C:\Users\Test\Desktop\Jakob\ImageAnalysis\Images\Calibration", "*.png");
Size patternSize = new Size(6, 8);
VectorOfVectorOfPoint3D32F objPoints = new VectorOfVectorOfPoint3D32F();
VectorOfVectorOfPointF imagePoints = new VectorOfVectorOfPointF();
foreach (string file in fileNames)
{
Mat img = CvInvoke.Imread(file, ImreadModes.Grayscale);
CvInvoke.Imshow("input", img);
VectorOfPointF corners = new VectorOfPointF(patternSize.Width * patternSize.Height);
bool find = CvInvoke.FindChessboardCorners(img, patternSize, corners);
if (find)
{
MCvPoint3D32f[] points = new MCvPoint3D32f[patternSize.Width * patternSize.Height];
int loopIndex = 0;
for (int i = 0; i < patternSize.Height; i++)
{
for (int j = 0; j < patternSize.Width; j++)
points[loopIndex++] = new MCvPoint3D32f(j, i, 0);
}
objPoints.Push(new VectorOfPoint3D32F(points));
imagePoints.Push(corners);
}
}
Size imageSize = new Size(1280, 1024);
Mat K = new Mat();
Mat D = new Mat();
Mat rotation = new Mat();
Mat translation = new Mat();
Fisheye.Calibrate(
objPoints,
imagePoints,
imageSize,
K,
D,
rotation,
translation,
Fisheye.CalibrationFlag.CheckCond,
new MCvTermCriteria(30, 0.1)
);
foreach (string file in fileNames)
{
Mat img = CvInvoke.Imread(file, ImreadModes.Grayscale);
Mat output = img.Clone();
Fisheye.UndistorImage(img, output, K, D);
CvInvoke.Imshow("output", output);
}
}
出现奇怪结果的原因是由于Calibrate方法的参数错误导致,还是仅仅是由于未使用足够的输入图像?
答案 0 :(得分:0)
这看起来与我最近在尝试将Mat
传递到校准函数时需要Matrix
时遇到的问题类似,就像您发现的那样,如果没有报告任何错误。我认为您需要以下条件:
var K = new Matrix<double>(3, 3);
var D = new Matrix<double>(4, 1);
还请注意,如果要检索传入Mat
的旋转和平移矢量,则很好,但如果要对其进行计算,则可能需要转换回Matrix
。我只是使用普通的相机校准而不是鱼眼镜头,但以下工作代码片段可能对理解这一点有用:
var cameraMatrix = new Matrix<double>(3, 3);
var distortionCoeffs = new Matrix<double>(4, 1);
var termCriteria = new MCvTermCriteria(30, 0.1);
System.Drawing.PointF[][] imagePoints = imagePointsList.Select(p => p.ToArray()).ToArray();
MCvPoint3D32f[][] worldPoints = worldPointsList.Select(p => p.ToArray()).ToArray();
double error = CvInvoke.CalibrateCamera(worldPoints, imagePoints, imageSize, cameraMatrix, distortionCoeffs, CalibType.RationalModel, termCriteria, out Mat[] rotationVectors, out Mat[] translationVectors);
var rotation = new Matrix<double>(rotationVectors[0].Rows, rotationVectors[0].Cols, rotationVectors[0].DataPointer);
var translation = new Matrix<double>(translationVectors[0].Rows, translationVectors[0].Cols, translationVectors[0].DataPointer);