我真的需要一些帮助。这个错误一直困扰着我一个月,我仍然无法解决问题。
所以我试图使用android为我的文章进行面部检测。
public class MainActivity extends AppCompatActivity {
//static {System.loadLibrary("openCVLibrary310");}
ImageView imageView;
Button BtnGallery, BtnCamera, BtnProcess;
int x, y, height, width;
Imgproc imgproc = new Imgproc();
Imgcodecs imgcodecs = new Imgcodecs();
String PathImage;
Bitmap btmp, TempImage;
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
//System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
@Override
protected void onCreate(Bundle savedInstanceState) {
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
}
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
imageView = (ImageView) findViewById(R.id.ShowImage);
BtnGallery = (Button) findViewById(R.id.btnGallery);
BtnCamera = (Button) findViewById(R.id.btnCamera);
BtnProcess = (Button) findViewById(R.id.btnProcess);
BtnGallery.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent pickPhoto = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(pickPhoto, 1);//one can be replaced with any action code
}
});
BtnCamera.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent takePicture = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(takePicture, 0);//zero can be replaced with any action code
}
});
BtnProcess.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
//Uri selectedImage = imageReturnedIntent.getData();
//Uri myImageUri = imageReturnedIntent.getData;
//Uri selectedImage = getImageUri(this, UriImage);
Detector(PathImage);
imageView.setImageURI(getImageUri(MainActivity.this, btmp));
//Intent SendImage = new Intent();
//SendImage.putExtra("imageUri", imageView.toString());
//SendImage.setData(UriImage); //throw ImageUri to another function
//startActivityForResult(SendImage, 2);
}
});
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
super.onActivityResult(requestCode, resultCode, imageReturnedIntent);
switch (requestCode) {
case 0:
if (resultCode == RESULT_OK) {
Uri selectedImage = imageReturnedIntent.getData();
imageView.setImageURI(selectedImage);
PathImage = selectedImage.getPath();
}
break;
case 1:
if (resultCode == RESULT_OK) {
Uri selectedImage = imageReturnedIntent.getData();
imageView.setImageURI(selectedImage);
PathImage = selectedImage.getPath();
}
break;
}
}
private void Detector(String myImagePath){
InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_alt);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
File mCascadeFile = new File(cascadeDir,
"haarcascade_frontalface_alt.xml");
//String detect = Environment.getExternalStorageDirectory()+"/data/haarcascade_frontalface_alt.xml";
CascadeClassifier faceDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
faceDetector.load(mCascadeFile.getAbsolutePath());
if (faceDetector.empty()) {
Log.w("FAILED", "Failed to load cascade classifier"+ mCascadeFile.getAbsolutePath());
faceDetector = null;
} else
Log.w("SUCCESSFULL", "Loaded cascade classifier from "
+ mCascadeFile.getAbsolutePath());
Log.e("STEP 2", "ADDING .JPEG");
String trueFilePath = myImagePath+".JPEG";
Mat image = imgcodecs.imread(myImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
MatOfRect facedetect = new MatOfRect();
org.opencv.core.Size MinS = new org.opencv.core.Size(0, 30);
org.opencv.core.Size MaxS = new org.opencv.core.Size(30, 0);
faceDetector.detectMultiScale(image, facedetect, 1.1, 3, 0, MinS, MaxS);
//faceDetector.detectMultiScale(image, facedetect, 1.1, 2);
Mat tmp = new Mat(height, width, CvType.CV_8U, new Scalar(4));
for (Rect rect : facedetect.toArray()) {
imgproc.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
new Scalar(0, 255, 0));
}//create rectangle
try {
//Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_RGB2BGRA);
Imgproc.cvtColor(image, tmp, Imgproc.COLOR_GRAY2RGBA, 4);
btmp = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(tmp, btmp);
//change Mat to Bitmap
} catch (CvException e) {
Log.d("Exception", e.getMessage());
}
}
public Uri getImageUri(Context inContext, Bitmap inImage) {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
inImage.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
String path = MediaStore.Images.Media.insertImage(inContext.getContentResolver(), inImage, "Title", null);
return Uri.parse(path);
}
所以我的应用程序会询问用户的图像,它可以是来自图库的图像,也可以是来自摄像头的图像,之后应用程序会将Uri转换为String Image以进行OpenCV Haarcascade处理,然后再将其显示在主菜单。我认为我的形象Uri to String也有点不对,但我不知道如何使OpenCV能够处理旁边的图像,当我试图运行程序时,我得到了2个错误。第一个是
CvException [org.opencv.core.CvException:cv :: Exception:/Volumes/Linux/builds/master_pack-android/opencv/modules/objdetect/src/cascadedetect.cpp:1639:错误:( - 215)!函数中的empty()void cv :: CascadeClassifier :: detectMultiScale(cv :: InputArray,std :: vector>&,double,int,int,cv :: Size,
第二个是这个。我试图制作这行代码后得到了这个代码
org.opencv.core.Size MinS = new org.opencv.core.Size(0, 30);
org.opencv.core.Size MaxS = new org.opencv.core.Size(30, 0);
faceDetector.detectMultiScale(image, facedetect, 1.1, 3, 0, MinS, MaxS);
java.lang.NullPointerException:尝试调用虚方法' void org.opencv.objdetect.CascadeClassifier.detectMultiScale(org.opencv.core.Mat,org.opencv.core.MatOfRect,double,int,int ,org.opencv.core.Size,org.opencv.core.Size)'在一个空对象上
我认为错误是因为我的程序无法读取haarcascade_frontalface_alt.xml。我将XML文件放在android资源上,在android外部和内部设备上,并尝试使用函数Environment.getExternalStorageDirectory()和Environment.getDataDirectory()调用它们,但仍然无法检测它们。
请帮助我非常绝望。我的讲师建议使用网络服务,但我没有告诉我应该学会做什么。我的意思是我不知道我应该先学会做什么。我真的很感激这里的任何输入。谢谢