相机崩溃:带有JNI代码的致命信号6(SIGABRT)

时间:2018-02-24 09:30:55

标签: java android c++ opencv java-native-interface

我目前正在开发一种应用程序,在相机检测到身体后,图像(衬衫)将被覆盖。然而,在检测到身体后,相机崩溃了,我的logcat说Fatal signal 6 (SIGABRT), code -6 in tid 23908 (Thread-39833)

以下是我目前所做的代码。

nerds_thesis_clartips_OpencvClass.h //头文件

#include <jni.h>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <iostream>
#include <stdio.h>

using namespace cv;
using namespace std;

#ifndef _Included_nerds_thesis_clartips_OpencvClass
#define _Included_nerds_thesis_clartips_OpencvClass
#ifdef __cplusplus
extern "C" {
#endif

void detectHuman(Mat& frame);
Mat putShirt(Mat frame, Point center, Size humanSize);

JNIEXPORT void JNICALL Java_nerds_thesis_clartips_OpencvClass_humanDetection
  (JNIEnv *, jclass, jlong);

#ifdef __cplusplus
}
#endif
#endif

nerds_thesis_clartips_OpencvClass.cpp // C ++文件

#include "nerds_thesis_clartips_OpencvClass.h"

JNIEXPORT void JNICALL Java_nerds_thesis_clartips_OpencvClass_humanDetection
  (JNIEnv *, jclass, jlong addrRgba){
    Mat& frame = *(Mat*)addrRgba;

    detectHuman(frame);
    }

  void detectHuman(Mat& frame){
    // assign xml file to a variable
    String human_cascade_name = "/storage/emulated/0/data/haarcascade_upperbody.xml";
    CascadeClassifier human_cascade;
    // load xml file
    if(!human_cascade.load( human_cascade_name ) ) { printf("--(!)Error loading\n"); return; };
    std::vector<Rect> humans;
    Mat frame_gray;
    //convert input to grayscale
    cvtColor( frame, frame_gray, CV_BGR2GRAY );
    //increase image contrast
    equalizeHist( frame_gray, frame_gray);
    //Detect Human
    human_cascade.detectMultiScale( frame_gray, humans, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, Size(100, 100) );

    // Draw the mask over all rectangles
    for (size_t i = 0; i < humans.size(); i++){

        Rect r = humans[i];

        Mat humanROI = frame_gray( humans[i] ); //image of the upper body

        int h_temp = humans[i].height;    // storing original height
        int x = humans[i].x;
        int y = humans[i].y - h_temp*(-0.6); // y is increased by 0.6*h
        int w = humans[i].width;
        int h = h_temp; // height detected

        rectangle(frame, Point(x,y), Point(x + w,y +h),Scalar(255,0,255));

        Point center( humans[i].x + humans[i].width*0.5, humans[i].y + humans[i].height*0.5 );
        frame = putShirt(frame,center,Size( humans[i].width, humans[i].height));
    }
  }

  Mat putShirt(Mat frame, Point center, Size humanSize){
        Mat mask = imread("C:/Users/Requinala/AndroidStudioProjects/CLARTIPS/app/src/main/res/drawable/bluevelvet.png");
        Mat mask1,src1;
        resize(mask,mask1,humanSize);

        // ROI selection
        Rect roi(center.x - humanSize.width/2, center.y - humanSize.width/2, humanSize.width, humanSize.width);
        frame(roi).copyTo(src1);

        // to make the white region transparent
        Mat mask2,m,m1;
        cvtColor(mask1,mask2,CV_BGR2GRAY);
        threshold(mask2,mask2,230,255,CV_THRESH_BINARY_INV);

        vector<Mat> maskChannels(3),result_mask(3);
        split(mask1, maskChannels);
        bitwise_and(maskChannels[0],mask2,result_mask[0]);
        bitwise_and(maskChannels[1],mask2,result_mask[1]);
        bitwise_and(maskChannels[2],mask2,result_mask[2]);
        merge(result_mask,m );         //    imshow("m",m);

        mask2 = 255 - mask2;
        vector<Mat> srcChannels(3);
        split(src1, srcChannels);
        bitwise_and(srcChannels[0],mask2,result_mask[0]);
        bitwise_and(srcChannels[1],mask2,result_mask[1]);
        bitwise_and(srcChannels[2],mask2,result_mask[2]);
        merge(result_mask,m1 );        //    imshow("m1",m1);

        addWeighted(m,1,m1,1,0,m1);    //    imshow("m2",m1);

        m1.copyTo(frame(roi));
        return frame;
  }

OpencvCamera.java //摄像机的Java类

public class OpencvCamera extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{

    Mat mRgba;
    @Override
    public void onCameraViewStarted(int width, int height) {
        mRgba = new Mat(height, width, CvType.CV_8UC4);
    }

    @Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        mRgba = inputFrame.rgba();
        OpencvClass.humanDetection(mRgba.getNativeObjAddr());
        return mRgba;
    }
}

OpencvClass.java // jni的java类

public class OpencvClass {
    public native static void humanDetection(long addrRgba);
}

logcats

02-24 16:45:36.269 13615-23908/? A/libc: Fatal signal 6 (SIGABRT), code -6 in tid 23908 (Thread-39833)
02-24 16:45:36.372 280-280/? I/DEBUG: *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
02-24 16:45:36.372 280-280/? I/DEBUG: Build fingerprint: 'CMCC/M631Y/M631Y:5.1.1/LMY47V/M631Y_02.24.00RPD_HK.00:user/release-keys'
02-24 16:45:36.372 280-280/? I/DEBUG: Revision: '0'
02-24 16:45:36.372 280-280/? I/DEBUG: ABI: 'arm'
02-24 16:45:36.373 280-280/? I/DEBUG: pid: 13615, tid: 23908, name: Thread-39833  >>> nerds.thesis.clartips <<<
02-24 16:45:36.373 280-280/? I/DEBUG: signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
02-24 16:45:36.396 280-280/? I/DEBUG:     r0 00000000  r1 00005d64  r2 00000006  r3 00000000
02-24 16:45:36.396 280-280/? I/DEBUG:     r4 a4674dd8  r5 00000006  r6 00000000  r7 0000010c
02-24 16:45:36.396 280-280/? I/DEBUG:     r8 00000047  r9 00000001  sl a4674400  fp 0000203e
02-24 16:45:36.397 280-280/? I/DEBUG:     ip 00005d64  sp a46732a0  lr b6dbfc2d  pc b6de5f3c  cpsr 600f0010
02-24 16:45:36.397 280-280/? I/DEBUG: backtrace:
02-24 16:45:36.397 280-280/? I/DEBUG:     #00 pc 00039f3c  /system/lib/libc.so (tgkill+12)
02-24 16:45:36.397 280-280/? I/DEBUG:     #01 pc 00013c29  /system/lib/libc.so (pthread_kill+52)
02-24 16:45:36.397 280-280/? I/DEBUG:     #02 pc 00014847  /system/lib/libc.so (raise+10)
02-24 16:45:36.397 280-280/? I/DEBUG:     #03 pc 00010fd5  /system/lib/libc.so (__libc_android_abort+36)
02-24 16:45:36.397 280-280/? I/DEBUG:     #04 pc 0000f534  /system/lib/libc.so (abort+4)
02-24 16:45:36.397 280-280/? I/DEBUG:     #05 pc 008a2e50  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZN9__gnu_cxx27__verbose_terminate_handlerEv+344)
02-24 16:45:36.398 280-280/? I/DEBUG:     #06 pc 0087914c  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZN10__cxxabiv111__terminateEPFvvE+4)
02-24 16:45:36.398 280-280/? I/DEBUG:     #07 pc 0087918c  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZSt9terminatev+16)
02-24 16:45:36.398 280-280/? I/DEBUG:     #08 pc 00878b68  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (__cxa_throw+168)
02-24 16:45:36.398 280-280/? I/DEBUG:     #09 pc 001c8305  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZN2cv5errorERKNS_9ExceptionE+244)
02-24 16:45:36.398 280-280/? I/DEBUG:     #10 pc 001c8445  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZN2cv5errorEiRKNS_6StringEPKcS4_i+96)
02-24 16:45:36.398 280-280/? I/DEBUG:     #11 pc 0037660f  /data/app/nerds.thesis.clartips-2/lib/arm/libopencv_java3.so (_ZN2cv6resizeERKNS_11_InputArrayERKNS_12_OutputArrayENS_5Size_IiEEddi+406)
02-24 16:45:36.399 280-280/? I/DEBUG:     #12 pc 0000fbf5  /data/app/nerds.thesis.clartips-2/lib/arm/libMyLibs.so (putShirt+172)
02-24 16:45:36.399 280-280/? I/DEBUG:     #13 pc 0000f7ab  /data/app/nerds.thesis.clartips-2/lib/arm/libMyLibs.so (detectHuman+842)
02-24 16:45:36.399 280-280/? I/DEBUG:     #14 pc 0015fc6d  /data/dalvik-cache/arm/data@app@nerds.thesis.clartips-2@base.apk@classes.dex

我是初学者,我已经搜索了SIGABRT,并且说它有关于记忆问题的事情。我无法找到我的代码中出错的地方。

这非常重要。所有帮助/想法都将受到赞赏,因为这将成为我在大学的最终成果。

1 个答案:

答案 0 :(得分:1)

pairs

从您的PC磁盘读取 png 。难怪它失败了。你应该

  • 始终检查文件读取的结果等是否正常
  • 从手机存储空间或应用资产中读取图片

您可以使用Java将资源(或资源)中的图像提取到本地存储,也可以使用Android assets native API直接从APK中读取此类图像。

资产似乎更适合您的事业,因为可绘制的资源可以适应屏幕分辨率。

使用带有 imread()的原生资产API是可行的,但很棘手。