我最近安装了Opencv-2.4.6 我写了一个用于鸟瞰视图转换的代码(它不是最终的代码)。 使用早期版本的Opencv(2.4.3),它正在执行。
现在我跑步时遇到错误。
加载共享库时出现**错误:libopencv_core.so.2.4:无法打开共享对象文件:没有此类文件或目录* * 没有编译错误。
代码是:
// This code will take undistorted images as input and give the bird's eye view using them
// First we need to calculate the homography matrix
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define heightBirdEyeView 800
#define widthBirdEyeView 800 //earlier was 300 300
using namespace cv;
using namespace std;
//global/
//camera parameters input
//resolution values
/*
float resolution_x=50, resolution_y=50;
//camera height and tilt
float height_camera = 1.25;
float tilt_camera=12;
//focal length in x and y
float focal_length_x = 354.05700;
float focal_length_y = 353.65297;*/
//intensity finding function
// find the intensity for the transformed point
/*float findintensity(float corrected_x,float corrected_y)
{
int intensity;
if((corrected_x>=1&&corrected_x<=widthBirdEyeView)&&(corrected_y>=1&&corrected_y<=heightBirdEyeView))
intensity=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))+(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))+(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))+(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
else
intensity=0;
return intensity;
}*/
int main(int argc, char** argv)
{
//loading the undistorted image
Mat undistor_img=imread(argv[1], 1);
namedWindow("undistorted image");
int intensity_change=0;
Mat undistor_img_hsv;
cvtColor( undistor_img,undistor_img_hsv, CV_RGB2HSV);
imshow("undistorted image", undistor_img);
imshow("hsv image",undistor_img_hsv);
cout<<"size="<<undistor_img_hsv.rows<<" "<<undistor_img_hsv.cols<<endl; // for obs7.jpg the columns are 220 and the rows are 165
//cout<<"undistorted image="<<undistor_img_hsv<<endl;
// erode the image
Mat eroded;
erode(undistor_img_hsv,eroded,Mat());
imshow("eroded",eroded);
//dilate the image
Mat dilated;
dilate(eroded,dilated,Mat());
imshow("dilated",dilated);
Mat output;
resize(undistor_img_hsv,output,cv::Size(heightBirdEyeView,widthBirdEyeView));// this will be having the orthogonal transform
int i,j;
for(i=0;i<=heightBirdEyeView;i++)
{
for(j=0;j<widthBirdEyeView;j++)
output.at<uchar>(i,j)=0;
}
imshow("output",output);
//should have size as that of height and width of the bird eye view
//camera parameters input
//resolution values
float resolution_x=50, resolution_y=50;
//camera height and tilt
float height_camera = 1.25;
float tilt_camera=12;
//focal length in x and y
float focal_length_x = 354.05700;
float focal_length_y = 353.65297;
//generate transformation matrix
float H1[3][3]={resolution_x,0,widthBirdEyeView/2+1,
0,-1*resolution_y,heightBirdEyeView,
0,0,1};
Mat transformation_matrix(3,3,CV_32FC1,H1);
cout<<"transformation matrix="<<endl<<transformation_matrix<<endl<<endl;
//generate top view matrix
float H2[3][3]={height_camera/focal_length_x,0,0,
0,0,height_camera,
0,cos(tilt_camera)/focal_length_y,sin(tilt_camera)};
Mat topview_matrix(3,3,CV_32FC1,H2);
cout<<"topview matrix="<<endl<<topview_matrix<<endl<<endl;
//generate scale matrix
float H3[3][3]={1,0,undistor_img.rows,
0,1,undistor_img.rows,
0,0,1};
Mat scale_matrix(3,3,CV_32FC1,H3);
cout<<"scale matrix="<<endl<<scale_matrix<<endl<<endl;
//generate homography matrix
Mat homography_matrix=transformation_matrix*topview_matrix/scale_matrix;
cout<<"homography matrix="<<endl<<homography_matrix<<endl<<endl;
Mat transpose_homography_matrix =homography_matrix;
Mat temp_matrix =homography_matrix;
//cout<<"temp_matrix=" <<endl<<temp_matrix<<endl<<endl;
//transpose of homography matrix
for (int i=0;i<3;i++)
{
for (int j=0;j<3;j++)
transpose_homography_matrix.at<float>(i,j)=temp_matrix.at<float>(j,i);
}
//cout<<"transpose homography matrix="<<endl<<transpose_homography_matrix<<endl<<endl;
float bev_zero[heightBirdEyeView][widthBirdEyeView]; //bev_zero will be the new image matrix
for(int i=0;i<heightBirdEyeView;i++)
{
for(int j=0;j<widthBirdEyeView;j++)
bev_zero[i][j]=0;
}
Mat new_point_matrix; // this 3x1 matrix will be used to give new point for all old points in old image
//(undistor_img_hsv.rows,undistor_img_hsv.cols,CV_32FC1,bev_zero);
//cout<<endl<<new_point_matrix<<endl<<endl;
//new_point_matrix=
//Mat new_point_matrix;
float corrected_x,corrected_y;
Mat old_point_matrix;
//conversion from point to its new point
for(int k=0;k<undistor_img_hsv.cols;k++)
{
for(int l=0;l<undistor_img_hsv.rows;l++)
{
float point[3][1]={k,
l,
1};
Mat old_point_matrix(3,1,CV_32FC1,point);
//get the new points for new view
//corrected_x=bev_zero[0][0]/bev_zero[2][0];
//corrected_y=bev_zero[1][0]/bev_zero[2][0];
new_point_matrix=transpose_homography_matrix*old_point_matrix;
cout<<"old point="<<old_point_matrix<<",new point="<<new_point_matrix<<endl; // every old point in the old image has got a new corresponding point
//cout<<"new x="<<corrected_x<<", new y="<<corrected_y<<endl<<endl;
//cout<<bev_zero[0][0]<<bev_zero[1][0]<<endl<<endl;
//cout<<"new x="<<new_point_matrix.at<float>(0,0)/new_point_matrix.at<float>(2,0)<<endl;
//cout<<", new y="<<new_point_matrix.at<float>(1,0)/new_point_matrix.at<float>(2,0)<<endl;x
//new_point_matrix=new_point_matrix/new_point_matrix.at<float>(2,0);
//find corrected values
corrected_x=abs(new_point_matrix.at<float>(0,0)/new_point_matrix.at<float>(2,0));
corrected_y=abs(new_point_matrix.at<float>(1,0)/new_point_matrix.at<float>(2,0));
//cout<<"point= ("<<corrected_x<<","<<corrected_y<<")"<<endl;
//cout<<"old point= ("<<k<<","<<l<<")"<<endl;
cout<<"corrected_x="<<corrected_x<<endl<<"corrected_y="<<corrected_y<<endl;
float intensity; // to store intensity values
// based on the corrected values, find out the intensity points
//cout<<findintensity(corrected_x,corrected_y)<<endl;
if((corrected_x>=1&&corrected_x<=widthBirdEyeView)&&(corrected_y>=1&&corrected_y<=heightBirdEyeView))
{
intensity_change++;
intensity=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))+(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))+(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))+(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
}
else
intensity=0;
cout<<"intensity="<<intensity<<endl;
//cout<<new_point_matrix<<endl;
cout<<floor(new_point_matrix.at<float>(0,0))<<endl;
cout<<int(new_point_matrix.at<float>(1,0))<<endl;
// now I just need to give this intensity value to the corresponding point for the old point
//output.at<uchar>(floor(new_point_matrix.at<float>(0,0)),floor(new_point_matrix.at<float>(1,0)))=intensity;
//cout<<"value="<<new_point_matrix.at<uchar>(0,150);
//cout<<"value21="<<undistor_img.at<uchar>(0,150);
//cout<<"pixel intensity at this point="<<new_point_matrix<<endl;
cout<<endl;
}
}
//cout<<"intensity changed "<<intensity_change<<" times."<<endl;
//imshow("output",output);
//cout<<"old point matrix="<<old_point_matrix<<endl;
//cout<<"new point matrix="<<new_point_matrix<<endl;
//cout<<"pixel intensity at this point="<<new_point_matrix<<endl; //ERROR HERE
//cout<<"changed new point="<<new_point_matrix<<endl;
/*
Mat p_new(3,1,CV_32FC1); // this will give the coordinates in the bird's eye view
float corrected_x,corrected_y;
//float Floor_corrected_y,Floor_corrected_x,Ceil_corrected_x,Ceil_corrected_y,Ratio_corrected_x,Ratio_corrected_y;
int a=0,b=0;
// counters for if and else blocks
//now we need matrix with coordinates of the image plane, to be projected
for(int p=0; p<heightBirdEyeView;p++)
{
uchar* data= undistor_img.ptr<uchar>(p);
uchar* hdata= birdeyeview_img.ptr<uchar>(p);
for(int q=0;q<widthBirdEyeView;q++)
{
//birdeyeview_img.at<cv::Vec3b>(q,p)[0]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[0];
//birdeyeview_img.at<cv::Vec3b>(q,p)[1]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[1];
//birdeyeview_img.at<cv::Vec3b>(q,p)[2]=transpose_homography_matrix*undistor_img.at<cv::Vec3b>(q,p)[2];
//birdeyeview_img.at<uchar>(q,p)=transpose_homography_matrix*undistor_img.at<uchar>(q,p);
//birdeyeview_img.at<uchar>(q,p)=transpose_homography_matrix*int(undistor_img.at<int>(q,p));
//cout<<transpose_homography_matrix*undistor_img.at<int>(q,p)<<endl;
int M[]={q,p,1};
Mat p_old(3,1,CV_32FC1,M); //holding the positions in undistorted image
//cout<<transpose_homography_matrix*p_old<<endl;
p_new=transpose_homography_matrix*p_old;
//cout<<endl<<p_new;
//cout<<endl<<p_new.at<float>(0,0);
//cout<<endl<<p_new[1];
//cout<<endl<<p_new[2];
//cout<<endl<<cvmGet(p_new,0,0);
corrected_x=p_new.at<float>(0,0)/p_new.at<float>(2,0);
corrected_y=p_new.at<float>(1,0)/p_new.at<float>(2,0);
//cout<<"the pixel intensity to be assigned="<<(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)))<<endl;
//cout<<"values to be greater than 1"<<corrected_x<<","<<corrected_y<<endl;
//cout<<"those should be less than"<<undistor_img.rows<<"and"<<undistor_img.cols<<endl;
//cout<<corrected_x<<" "<<corrected_y<<endl;
if (((abs(corrected_y)>=1)&&(corrected_y<=undistor_img.rows))&&((abs(corrected_x)>=1)&&(corrected_x<=undistor_img.cols)))
{// Floor_corrected_y = floor(corrected_y);
//Ceil_corrected_y = ceil(corrected_y);
//Floor_corrected_x = floor(corrected_x);
//Ceil_corrected_x = ceil(corrected_x);
// Ratio_corrected_y = corrected_y-floor(corrected_y);
// Ratio_corrected_x = corrected_x-floor(corrected_x);
//birdeyeview_img.at<uchar>(p,q)=(1-(corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*((int)undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*((int)undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
//cout<<"if read"<<endl;
//hdata[q]=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*data[q] +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*data[q] +( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*data[q]+( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*data[q]; //works to some extent
hdata[q]=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)) +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x))+( corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)) +( corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x));
a++;}
//birdeyeview_img.at<uchar>(q,p)=(1- (corrected_y-floor(corrected_y)))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(floor(corrected_y),floor(corrected_x)))
// +(1-( corrected_y-floor(corrected_y)))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(floor(corrected_y), ceil(corrected_x)))
// +(corrected_y-floor(corrected_y))*(1-(corrected_x-floor(corrected_x)))*(undistor_img.at<uchar>(ceil(corrected_y),floor(corrected_x)))
// +(corrected_y-floor(corrected_y))*(corrected_x-floor(corrected_x))*(undistor_img.at<uchar>(ceil(corrected_y), ceil(corrected_x)));
//}
else{
b++;
hdata[q]=data[q];
//birdeyeview_img.at<uchar>(p,q)=255;
//hdata[q]=undistor_img.at<uchar>(round(corrected_y),round(corrected_x));
//hdata[q]=(int)undistor_img.at<uchar>(q,p);//gives sm output
//hdata[q]= undistor_img.ptr<uchar>(p,q);
//hdata[q]= undistor_img.at<uchar>(p,q);//works to sm extent
//cout<<endl<<"pixel value"<<(int) undistor_img.at<uchar>(p,q);
//birdeyeview_img.at<uchar>(q,p)=undistor_img.at<uchar>(round(corrected_y),round(corrected_x));
}
//cout<<(int)birdeyeview_img.at<uchar>(p,q)<<endl;
//birdeyeview_img.at<uchar>(p,q)=0;}
//cout<<"else read"<<endl;}
//undistor_img.at<uchar>(p,q)=(int)undistor_img.at<uchar>(round(corrected_y),round(corrected_x));}
//birdeyeview_img.at<uchar>(p,q)=124;
//cout<<endl<<(int)undistor_img.at<uchar>(p,q);
//cout<<endl<<(int)birdeyeview_img.at<uchar>(p,q);
//cout<<"working here1"<<endl;
}
}
//flip(birdeyeview_img,birdeyeview_img,1);
// perspectiveTransform(undistor_img,birdeyeview_img ,homography_matrix);
//cout<<"bird"<<birdeyeview_img<<endl;
//cout<<"working here2"<<endl;
//cout<<birdeyeview_img<<endl;
cout<<"input channels="<<undistor_img.channels()<<endl;
//cout<<"grayscaled image channels="<<gray_undistor_img.channels()<<endl;
cout<<"output channels="<<birdeyeview_img.channels()<<endl;
cout<<"if was read"<<a <<"times"<<endl;
cout<<"else was read"<<b <<"times"<<endl;
imshow("bird's eye view image",birdeyeview_img);
cout<<"input size="<<undistor_img.rows<<"X"<<undistor_img.cols<<endl;
cout<<"result size="<<birdeyeview_img.rows<<"X"<<birdeyeview_img.cols<<endl;
//cout<<"working here3"<<endl;
*/
cvWaitKey();
}``
有人可以帮忙做什么吗?
答案 0 :(得分:9)
只需打开名为/etc/ld.so.conf.d/opencv.conf
的文件,然后插入
/usr/local/opencv/
然后输入:sudo ldconfig
答案 1 :(得分:2)
由于无法找到核心库,因此您似乎没有正确安装OpenCV。您有几种选择:
make install
(可能需要像Ubuntu中的sudo
一样)
作为一个快速而肮脏的黑客 - 您可以在运行可执行文件之前修改LD_LIBRARY_PATH环境变量,如:
$ LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/myself/opencv/lib ./my_mighty_opencv_app
(这是一个长执行命令)
答案 2 :(得分:0)
就我而言,我使用的是Anaconda,而opencv库的路径是:
/home/myhome/anaconda3/envs/py27/lib
我的解决方案是:
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/etc...
它可能适用于其他情况。只需将LD_LIBRARY_PATH
导出到OpenCV库所在的文件夹即可。