如何在opencv中使用图像的幅度和相位进行逆DFT?

时间:2013-12-17 12:54:03

标签: c++ opencv

我想表明图像的相位比其幅度更多地传递信息,所以我想交换两个图像的幅度然后进行逆DFT。 这是代码:

    void main()
{

   Mat I1 = imread("lena.jpg", CV_LOAD_IMAGE_GRAYSCALE);

   Mat I2 = imread("peppers.png", CV_LOAD_IMAGE_GRAYSCALE);

   Mat padded1,padded2;                            

    //expand input image to optimal size
    int m1= getOptimalDFTSize( I1.rows );
    int n1 = getOptimalDFTSize( I1.cols ); 

    int m2= getOptimalDFTSize( I2.rows );
    int n2 = getOptimalDFTSize( I2.cols );

    // on the border add zero values
    copyMakeBorder(I1, padded1, 0, m1 - I1.rows, 0, n1 - I1.cols, BORDER_CONSTANT, Scalar::all(0));
    copyMakeBorder(I2, padded2, 0, m2 - I2.rows, 0, n2 - I2.cols, BORDER_CONSTANT, Scalar::all(0));

    Mat planes1[] = {Mat_<float>(padded1), Mat::zeros(padded1.size(), CV_32F)};
    Mat planes2[] = {Mat_<float>(padded2), Mat::zeros(padded2.size(), CV_32F)};
    Mat complexI, complexII;

    // Add to the expanded another plane with zeros
    merge(planes1, 2, complexI);     
    merge(planes2, 2, complexII);

    dft(complexI, complexI); 
    dft(complexII, complexII);  

    // compute the magnitude and phase then switch to logarithmic scale
    // => magnitude:log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2)), phase:arctan(Im(DFT(I)),Re(DFT(I)))
    split(complexI, planes1);// planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))
    Mat ph1, magI1;
    phase(planes1[0], planes1[1], ph1);//ph1 = phase
    magnitude(planes1[0], planes1[1], magI1);// magI1 = magnitude
    magI1 = magI1(Rect(0, 0, magI1.cols & -2, magI1.rows & -2));
    ph1 = ph1(Rect(0, 0, ph1.cols & -2, ph1.rows & -2));

    split(complexII, planes2);// planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))
    Mat ph2, magI2;
    phase(planes2[0], planes2[1], ph2);//ph2 = phase
    magnitude(planes2[0], planes2[1], magI2);// mag2 = magnitude
    magI2 = magI2(Rect(0, 0, magI2.cols & -2, magI2.rows & -2));
    ph2 = ph2(Rect(0, 0, ph2.cols & -2, ph2.rows & -2));

    planes1[1] = ph1; planes1[0] = magI2;
    planes2[1] = ph2; planes2[0] = magI1;


    dft(complexI,complexI,DFT_INVERSE);
    dft(complexII,complexII,DFT_INVERSE);
    imshow("image", complexI);
    waitKey();
}

我只是将幅度和相位合并在一起,然后进行IDFT,似乎完全错误。

2 个答案:

答案 0 :(得分:1)

我想你的问题是你的dft的某些东西不起作用。 尝试以下代码(添加分割平面后),看看它是否有效。 图像必须与Exact相同。 如果出现其他问题:请显示您的图片和结果。也许你的代码是正确的,你只是期待错误的东西。

以下是工作示例:

// Load an image
Mat I1 = imread("lena.jpg", CV_LOAD_IMAGE_GRAYSCALE);
Mat I2 = imread("peppers.png", CV_LOAD_IMAGE_GRAYSCALE);

Mat fI1;
Mat fI2;
I1.convertTo(fI1, CV_32F);
I2.convertTo(fI2, CV_32F);

//Perform DFT
Mat fourierTransform1;
Mat fourierTransform2;
dft(fI1, fourierTransform1, DFT_SCALE|DFT_COMPLEX_OUTPUT);
dft(fI2, fourierTransform2, DFT_SCALE|DFT_COMPLEX_OUTPUT);

//your split plane and  everything else

//Perform  IDFT
Mat inverseTransform1;
Mat inverseTransform2;
dft(fourierTransform1, inverseTransform1, DFT_INVERSE|DFT_REAL_OUTPUT);
dft(fourierTransform2, inverseTransform2, DFT_INVERSE|DFT_REAL_OUTPUT);  

Mat result1;
Mat result2;
inverseTransform1.convertTo(result1, CV_8U);
inverseTransform2.convertTo(result2, CV_8U);

答案 1 :(得分:1)

您无法使用cv::merge()函数合并幅度和相位,而应使用cv::polarToCart()。这是我做你想做的代码:

using namespace cv;

// Rearrange the quadrants of a Fourier image so that the origin is at
// the image center
void shiftDFT(Mat &fImage )
{
    Mat tmp, q0, q1, q2, q3;

    // first crop the image, if it has an odd number of rows or columns

    fImage = fImage(Rect(0, 0, fImage.cols & -2, fImage.rows & -2));

    int cx = fImage.cols / 2;
    int cy = fImage.rows / 2;

    // rearrange the quadrants of Fourier image
    // so that the origin is at the image center

    q0 = fImage(Rect(0, 0, cx, cy));
    q1 = fImage(Rect(cx, 0, cx, cy));
    q2 = fImage(Rect(0, cy, cx, cy));
    q3 = fImage(Rect(cx, cy, cx, cy));

    q0.copyTo(tmp);
    q3.copyTo(q0);
    tmp.copyTo(q3);

    q1.copyTo(tmp);
    q2.copyTo(q1);
    tmp.copyTo(q2);
}

int main()
{
    // Load an image
    Mat I1 = imread("lena.jpg", CV_LOAD_IMAGE_GRAYSCALE);
    Mat I2 = imread("pepper.jpg", CV_LOAD_IMAGE_GRAYSCALE);

    Mat fI1;
    Mat fI2;
    I1.convertTo(fI1, CV_32F);
    I2.convertTo(fI2, CV_32F);

    //expand input image to optimal size
    int m = getOptimalDFTSize( I1.rows );
    int n = getOptimalDFTSize( I1.cols ); 

    Mat padded1, padded2;

    // on the border add zero values
    copyMakeBorder(fI1, padded1, 0, m - I1.rows, 0, n - I1.cols, BORDER_CONSTANT, Scalar::all(0));
    copyMakeBorder(fI2, padded2, 0, m - I2.rows, 0, n - I2.cols, BORDER_CONSTANT, Scalar::all(0));

    //Perform DFT
    Mat fourierTransform1;
    Mat fourierTransform2;
    Mat planes1[2], planes2[2];

    dft(fI1, fourierTransform1, DFT_SCALE|DFT_COMPLEX_OUTPUT);
    dft(fI2, fourierTransform2, DFT_SCALE|DFT_COMPLEX_OUTPUT);

    shiftDFT(fourierTransform1);
    shiftDFT(fourierTransform2);

    split(fourierTransform1, planes1);// planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))
    split(fourierTransform2, planes2);// planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))

    Mat ph1, mag1;
    mag1.zeros(planes1[0].rows, planes1[0].cols, CV_32F);
    ph1.zeros(planes1[0].rows, planes1[0].cols, CV_32F);
    cartToPolar(planes1[0], planes1[1], mag1, ph1);

    Mat ph2, mag2;
    mag2.zeros(planes2[0].rows, planes2[0].cols, CV_32F);
    ph2.zeros(planes2[0].rows, planes2[0].cols, CV_32F);
    cartToPolar(planes2[0], planes2[1], mag2, ph2);

    polarToCart(mag1, ph2, planes1[0], planes1[1]);
    polarToCart(mag2, ph1, planes2[0], planes2[1]);

    merge(planes1, 2, fourierTransform1);
    merge(planes2, 2, fourierTransform2);

    shiftDFT(fourierTransform1);
    shiftDFT(fourierTransform2);

    //Perform IDFT
    Mat inverseTransform1, inverseTransform2;

    dft(fourierTransform1, inverseTransform1, DFT_INVERSE|DFT_REAL_OUTPUT);
    dft(fourierTransform2, inverseTransform2, DFT_INVERSE|DFT_REAL_OUTPUT);

    namedWindow("original image 1");
    imshow("original image 1", I1);
    namedWindow("original image 2");
    imshow("original image 2", I2);
    waitKey(0);

    cv::Mat out1, out2;
    inverseTransform1.convertTo(out1, CV_8U);
    inverseTransform2.convertTo(out2, CV_8U);

    namedWindow("result image 1");
    imshow("result image 1", out1);
    namedWindow("result image 2");
    imshow("result image 2", out2);
    waitKey(0); 
}