我正在尝试创建一个cpp JNI文件,允许我在两个调用中执行相当多的this教程,因此我可以在Google Glass应用中使用此SURF功能检测。我遇到的这个错误/问题与Google Glass无关,但我认为Eclipse构建环境相关。我在Eclipse中制作项目还是比较新的,更不用说Eclipse中的Android项目,所以我很确定我错过了一些非常明显的东西,我需要它向我指出。我的主要问题是我的JNI中的最终共享库文件没有被构建,但是inbetween文件和我的构建输出并没有指出任何明显的错误。
我知道我应该能够构建一个JNI lib文件,我可以在Android中的Java中使用,因为JNI已经在Android中使用过了,我也试图按照制作它们的杰出人物提供的教程{{ 3}}和here。这些教程解释了如何构建非自由Opencv库(包含SURF和SIFT的库)并将它们放在Android项目中以及如何在Android项目中利用JNI中的库。
我的JNI代码是从第一个here拆分为两个函数的代码,一个被调用来设置对象关键点设置,只需要发生一次,另一个在每个帧上调用Activity onCameraFrame()函数。
#include <jni.h>
#include <stdio.h>
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/nonfree/nonfree.hpp>
using namespace std;
using namespace cv;
extern "C" {
// SURF Vars
Mat mat_object;
int minHessian = 500;
SurfFeatureDetector detector( minHessian );
vector<KeyPoint> kp_object;
SurfDescriptorExtractor extractor;
Mat des_object;
FlannBasedMatcher matcher;
vector<Point2f> obj_corners(4);
JNIEXPORT void JNICALL Java_com_example_android_glass_surfobjrec_Init_Surf(JNIEnv*,jobject, jlong addr_mat_object);
JNIEXPORT void JNICALL Java_com_example_android_glass_surfobjrec_Proc_Surf(JNIEnv*,jobject, jlong addr_mat_scene);
JNIEXPORT void JNICALL Java_com_example_android_glass_surfobjrec_Init_Surf(JNIEnv*,jobject, jlong addr_mat_object) {
Mat& mat_obj = *(Mat*) addr_mat_object;
mat_object = mat_obj;
//Detect the keypoints using SURF Detector
detector.detect( mat_object, kp_object );
// Calculate descriptors (feature vectors)
extractor.compute(mat_object, kp_object, des_object);
//Get the corners from the object
obj_corners[0] = cvPoint(0,0);
obj_corners[1] = cvPoint( mat_object.cols, 0 );
obj_corners[2] = cvPoint( mat_object.cols, mat_object.rows );
obj_corners[3] = cvPoint( 0, mat_object.rows );
}
JNIEXPORT void JNICALL Java_com_example_android_glass_surfobjrec_Proc_Surf(JNIEnv*,jobject, jlong addr_mat_scene) {
// Do all the stuff in the while loop in the oneDrive example
// EXCEPT, draw the keypoints using DrawKeypoints, don't use DrawMatches
Mat& mat_outputImg = *(Mat*) addr_mat_scene;
Mat mat_scene;
Mat des_scene;
vector<KeyPoint> kp_scene;
vector<vector<DMatch> > matches;
//vector<DMatch> matches;
vector<DMatch> good_matches;
vector<Point2f> obj;
vector<Point2f> scene;
vector<Point2f> scene_corners(4);
double max_dist = 0;
double min_dist = 100.0;
Mat H;
cvtColor(mat_outputImg, mat_scene, CV_BGR2GRAY);
// Detect and calculate keypoints on scene image
detector.detect(mat_scene, kp_scene);
extractor.compute(mat_scene, kp_scene, des_scene);
matcher.knnMatch(des_object, des_scene, matches, 2);
for(int k = 0; k < min(des_scene.rows-1,(int) matches.size()); k++) { //THIS LOOP IS SENSITIVE TO SEGFAULTS
if((matches[k][0].distance < 0.6*(matches[k][7].distance)) && ((int) matches[k].size()<=2 && (int) matches[k].size()>0)) {
good_matches.push_back(matches[k][0]);
}
}
}
}
我得到的唯一错误是匹配[k] [0] .distance说“无法解析场距”。我得到匹配[k] .size()和good_matches.push_back(匹配[k] [0])相同的错误说匹配是一个无效的参数,但我编译了所有相同的代码并运行它使用onedrive链接上的代码从Eclipse中的常规CPP项目中获得好处。并且这些错误不会出现在构建消息中。
对于ndk-build,我已粘贴下面的Application.mk和Android.mk文件。 我的Application.mk文件如下:
#APP_ABI := armeabi
APP_ABI += armeabi-v7a # you can do either armeabi or armeabi-v7a, steps are the same.
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_PLATFORM := android-15
我的Android.mk文件如下:
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := nonfree_prebuilt
LOCAL_SRC_FILES := libnonfree.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := opencv_java_prebuilt
LOCAL_SRC_FILES := libopencv_java.so
include $(PREBUILT_SHARED_LIBRARY)
LOCAL_C_INCLUDES:= /Users/samanthahuston/Development/OpenCV-2.4.8-android-sdk/sdk/native/jni/include
LOCAL_MODULE := nonfree_jni
LOCAL_CFLAGS := -Werror -O3 -ffast-math
LOCAL_LDLIBS += -llog
LOCAL_SHARED_LIBRARIES := nonfree_prebuilt opencv_java_prebuilt
LOCAL_SRC_FILES := nonfree_jni.cpp
include $(BUILD_SHARED_LIBRARY)
非常感谢任何帮助。