Android OpenCV:在执行AsyncTask时更新相机帧

时间:2014-07-21 09:09:00

标签: android opencv android-asynctask camera

我在调用AsyncTask后创建了onClick()来处理多个相机帧,但相机会冻结,直到AsyncTask完成执行。

有没有办法避免这种行为?我的意思是,是否可以在执行AsyncTask

期间不断更新相机帧

AsyncTask代码:

private class ReadCodeTask extends AsyncTask<Mat[], List<Mat>, List<Bitmap>> {

    protected Mat[] onPreExecute(List<Mat> frames){
        Mat[] output = new Mat[4];;
        for(int i=0; i<frames.size(); i++){
            mH = new Mat();
            List<Mat> channels = new ArrayList<Mat>(3);
            // convert inputFrame to HSV for segmentation
            Mat hsvFrame = new Mat(frames.get(i).height(), frames.get(i).width(), CvType.CV_8UC3, new Scalar(4));
            Imgproc.cvtColor(frames.get(i), hsvFrame, Imgproc.COLOR_RGB2HSV, 0);

            Core.split(hsvFrame, channels);

            // get H channel of the input image
            mH = channels.get(0);

            if(mH!=null){
                double meanH = 0;
                double size = mH.rows()*mH.cols();
                // average saturation in frame
                for(int k=0; k<mH.rows(); k++){
                    for(int j=0; j<mH.cols(); j++){
                        meanH += mH.get(k, j)[0]/size;
                    }
                }
                if(meanH<=0.2*180){ // if WHITE BACKGROUND
                    output[i] = frames.get(i);
                    continue;
                }
            }else{
                Log.e("ERR", "Error converting to HSV");
            }
            gray = new Mat(frames.get(i).height(), frames.get(i).width(), CvType.CV_8UC1, new Scalar(4));
            Imgproc.cvtColor(frames.get(i), gray, Imgproc.COLOR_RGB2GRAY, 0);
            Mat grayBin = new Mat();
            Scalar lowG = new Scalar(0.9*255);
            Scalar highG = new Scalar(1*255);
            Core.inRange(gray, lowG, highG, grayBin);
            Imgproc.findContours(grayBin, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
            for(int c=0; c<contours.size(); c++){
                if(couldItBeASquare(contours.get(c))){
                    boundingRect = Imgproc.boundingRect(contours.get(c));
                }
            }
            Mat qrSubIm = new Mat();
            try{
                qrSubIm = frames.get(i).submat(boundingRect);
            }catch(NullPointerException e){
                //          Log.e("ERR", e.getMessage());
            }
            if(qrSubIm != null){
                output[i] = qrSubIm;
            }else{
                output[i] = frames.get(i);
            }
        }
        return output;
    }

    @Override
    protected List<Bitmap> doInBackground(Mat[]... params) {
        List<Bitmap> bmp = new ArrayList<Bitmap>();

        for(int i=0; i<params.length; i++){
            Mat mRgb = params[0][i];
            qr = new Mat();
            mQrFinder.qrFindJNI(mRgb.getNativeObjAddr(), qr.getNativeObjAddr());
            Bitmap aux = null;
            if(qr.cols()!=0 || qr.rows()!=0){
                try {
                    aux = Bitmap.createBitmap(qr.cols(), qr.rows(), Bitmap.Config.ARGB_8888);
                    Utils.matToBitmap(qr, aux);
                    bmp.add(aux);
                }catch (CvException e){
                    Log.e("Exception",e.getMessage());
                }
            }
        }
        return bmp;
    }

    @Override
    protected void onPostExecute(List<Bitmap> bmp) {
        List<String> results = new ArrayList<String>(bmp.size());
        for(int i=0; i<bmp.size(); i++){
            try {
                String aux = readQRCode(bmp.get(i));
                results.add(aux);
            } catch (FileNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (NotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        for(int i=0; i<results.size(); i++){
            if(!results.get(i).contentEquals("NOTHING")){
                Toast toast = Toast.makeText(mContext, results.get(i), android.widget.Toast.LENGTH_SHORT);
                toast.show();
            }
        }
        return;
    }
}

请注意,onPreExecute()方法未被覆盖,我必须手动调用它:

ReadCodeTask task = new ReadCodeTask();
task.execute(task.onPreExecute(frames));

完成后,以下代码对应于我用于检索框架的camera listener

camListener = new CvCameraViewListener2() {

        @Override
        public void onCameraViewStopped() {
            // TODO Auto-generated method stub
        }

        @Override
        public void onCameraViewStarted(int width, int height) {
            //              
        }

        @Override
        public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
            rgb = inputFrame.rgba();
            if(frames != null){
                if(frames.size()<4){
                    frames.add(rgb);
                }
                if(frames.size()==4){
                    List<Mat> aux = frames;
                    for (int k = 0; k<aux.size()-1; k++){
                        frames.set(k, aux.get(k+1));
                    }
                    frames.set(aux.size()-1, rgb);
                }
            }
            return rgb;
        }
    };

1 个答案:

答案 0 :(得分:1)

我的意思是这样的。要注意mContext,你应该传递一个上下文,或者如果你的asynctask是扩展Activiti的类中的私有类,那么你可以通过&#39; .this&#39;敬酒。

private class ReadCodeTask extends AsyncTask<List<Mat>, Void, List<Bitmap>> {       

    @Override
    protected List<Bitmap> doInBackground(List<Mat>... frames) {

         Mat[] output = new Mat[4];;
         for(int i=0; i<frames.length; i++){
             mH = new Mat();
             List<Mat> channels = new ArrayList<Mat>(3);
             // convert inputFrame to HSV for segmentation
             Mat hsvFrame = new Mat(frames[0].get(i).height(), frames[0].get(i).width(), CvType.CV_8UC3, new Scalar(4));
             Imgproc.cvtColor(frames[0].get(i), hsvFrame, Imgproc.COLOR_RGB2HSV, 0);

             Core.split(hsvFrame, channels);

             // get H channel of the input image
             mH = channels.get(0);

             if(mH!=null){
                 double meanH = 0;
                 double size = mH.rows()*mH.cols();
                 // average saturation in frame
                 for(int k=0; k<mH.rows(); k++){
                     for(int j=0; j<mH.cols(); j++){
                         meanH += mH.get(k, j)[0]/size;
                     }
                 }
                 if(meanH<=0.2*180){ // if WHITE BACKGROUND
                     output[i] = frames[0].get(i);
                     continue;
                 }
             }else{
                 Log.e("ERR", "Error converting to HSV");
             }
             gray = new Mat(frames[0].get(i).height(), frames[0].get(i).width(), CvType.CV_8UC1, new Scalar(4));
             Imgproc.cvtColor(frames[0].get(i), gray, Imgproc.COLOR_RGB2GRAY, 0);
             Mat grayBin = new Mat();
             Scalar lowG = new Scalar(0.9*255);
             Scalar highG = new Scalar(1*255);
             Core.inRange(gray, lowG, highG, grayBin);
             Imgproc.findContours(grayBin, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
             for(int c=0; c<contours.size(); c++){
                 if(couldItBeASquare(contours.get(c))){
                     boundingRect = Imgproc.boundingRect(contours.get(c));
                 }
             }
             Mat qrSubIm = new Mat();
             try{
                 qrSubIm = frames[0].get(i).submat(boundingRect);
             }catch(NullPointerException e){
                 //          Log.e("ERR", e.getMessage());
             }
             if(qrSubIm != null){
                 output[i] = qrSubIm;
             }else{
                 output[i] = frames[0].get(i);
             }
         }

        List<Bitmap> bmp = new ArrayList<Bitmap>();

        for(int i=0; output.length; i++){
            Mat mRgb = output[i];
            qr = new Mat();
            mQrFinder.qrFindJNI(mRgb.getNativeObjAddr(), qr.getNativeObjAddr());
            Bitmap aux = null;
            if(qr.cols()!=0 || qr.rows()!=0){
                try {
                    aux = Bitmap.createBitmap(qr.cols(), qr.rows(), Bitmap.Config.ARGB_8888);
                    Utils.matToBitmap(qr, aux);
                    bmp.add(aux);
                }catch (CvException e){
                    Log.e("Exception",e.getMessage());
                }
            }
        }
        return bmp;
    }

    @Override
    protected void onPostExecute(List<Bitmap> bmp) {
        List<String> results = new ArrayList<String>(bmp.size());
        for(int i=0; i<bmp.size(); i++){
            try {
                String aux = readQRCode(bmp.get(i));
                results.add(aux);
            } catch (FileNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (NotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        for(int i=0; i<results.size(); i++){
            if(!results.get(i).contentEquals("NOTHING")){
                Toast toast = Toast.makeText(mContext, results.get(i), android.widget.Toast.LENGTH_SHORT);
                toast.show();
            }
        }
        return;
    }
}

我不确定我是否拼错了什么。为什么这样?通常,onPreExceute的类型为void,因此当您重载此方法时,UI会被阻止。无论如何我会尝试一下。

请记住onPreExecute方法在UI线程中运行。请参阅文档:http://developer.android.com/reference/android/os/AsyncTask.html#onPreExecute()