如何将屏幕方向从横向更改为纵向?

时间:2018-03-19 11:17:30

标签: java android opencv

我已应用以下帖子答案但无法解决问题。 OpenCV camera orientation issue 我收到以下异常。

protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
    Mat modified;

    if (mListener != null) {
        modified = mListener.onCameraFrame(frame);
    } else {
        modified = frame.rgba();
    }

    boolean bmpValid = true;
    Canvas canvas = null ;
    if (modified != null) {
        try {
            Utils.matToBitmap(modified, mCacheBitmap);
            canvas = getHolder().lockCanvas();
        } catch(Exception e) {
            Log.e(TAG, "Mat type: " + modified);
            Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmpValid = false;
        }
    }

    if (bmpValid && mCacheBitmap != null) {

        if (canvas != null) {
            canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
            Log.d(TAG, "mStretch value: " + mScale);

            if (mScale != 0) {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                        new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
                                (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
                                (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
                                (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
            } else {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                        new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
            }

            if (mFpsMeter != null) {
                mFpsMeter.measure();
                mFpsMeter.draw(canvas, 20, 30);
            }
            getHolder().unlockCanvasAndPost(canvas);
        }
    }
}

这是opencv库函数抛出异常:

package org.opencv.samples.facedetect;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Camera;
import android.media.AudioManager;
import android.net.ConnectivityManager;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.facedetect.DetectionBasedTracker;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

public class FdActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 , SharedPreferences.OnSharedPreferenceChangeListener {

    private static final String TAG = "OCVSample::Activity";
    private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
    public static final int JAVA_DETECTOR = 0;
    public static final int NATIVE_DETECTOR = 1;

    //private MenuItem mItemExit;
    private MenuItem mItemSettings ;
    private MenuItem showCounterValues ;

    private TextView faceCounterTv ;
    private Button resetButton ;
    private Button savebtn ;
    private Button quitButton ;

    private Mat mRgba;
    private Mat mGray;
    private File mCascadeFile;
    private CascadeClassifier mJavaDetector;
    private DetectionBasedTracker mNativeDetector;

    private int mDetectorType = JAVA_DETECTOR;
//    private String[] mDetectorName;

    private float mRelativeFaceSize = 0.2f;
    private int mAbsoluteFaceSize = 0;
    private float scaleFactor ;
    private int minNeighbour ;
    private int delayTime ;

    private boolean isFaces_detect ;
    private boolean isFaces_detect_pre ;
    private boolean count_Face_Logic ;
    private float countFace = 0.0f ;
    private long startTime ;

    private AudioManager mAudioManager ;

    private static final int MY_PERMISSIONS_REQUEST_ACCOUNTS = 1;
    private CameraBridgeViewBase mOpenCvCameraView;

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS: {
                    Log.i(TAG, "OpenCV loaded successfully");

                    // Load native library after(!) OpenCV initialization
                    System.loadLibrary("detectionBasedTracker");

                    try {
                        // load cascade file from application resources
                        InputStream is = getResources().openRawResource(R.raw.haarcascade_frontalface_default);
                        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                        mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
                        FileOutputStream os = new FileOutputStream(mCascadeFile);

                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                            os.write(buffer, 0, bytesRead);
                        }
                        is.close();
                        os.close();

                        mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                        if (mJavaDetector.empty()) {
                            Log.e(TAG, "Failed to load cascade classifier");
                            mJavaDetector = null;
                        } else
                            Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

                        mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);

                        cascadeDir.delete();

                    } catch (IOException e) {
                        e.printStackTrace();
                        Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
                    }

                    mOpenCvCameraView.enableView();
                }
                break;
                default: {
                    super.onManagerConnected(status);
                }
                break;
            }
        }
    };

    public FdActivity() {

        isFaces_detect = false ;
        isFaces_detect_pre = false ;
        count_Face_Logic = true ;

        startTime = System.currentTimeMillis();

        mAbsoluteFaceSize = 200 ;
        scaleFactor = 1.2f ;
        minNeighbour = 1 ;
        delayTime = 1 ;
        Log.i(TAG, "Instantiated new " + this.getClass());
    }

    /**
     * Called when the activity is first created.
     */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        //getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.face_detect_surface_view);
        setSupportActionBar((Toolbar) findViewById(R.id.toolbar));

        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
        faceCounterTv = (TextView) findViewById(R.id.faceCountertv);
        resetButton = (Button) findViewById(R.id.resetbtn);
        savebtn = (Button) findViewById(R.id.savebtn);
        quitButton = (Button) findViewById(R.id.quitbtn);

        mAudioManager = (AudioManager) getSystemService(AUDIO_SERVICE);

        resetButton.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                countFace = 0 ;
                Toast.makeText(getApplicationContext() , "Reset the Face Counter" , Toast.LENGTH_LONG).show();
            }
        });

        savebtn.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                saveFaceCounter();
                Toast.makeText(getApplicationContext() , "Counter Value Saved" , Toast.LENGTH_SHORT).show();
            }
        });
        quitButton.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {

                System.exit(0);

            }
        });

        mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);

       // mOpenCvCameraView.setAlpha(0);
        mOpenCvCameraView.setCameraIndex(1);
        mOpenCvCameraView.setCvCameraViewListener(this);

        //if (checkAndRequestPermissions()){

          //  Toast.makeText(getApplicationContext() , "OnCreate" , Toast.LENGTH_LONG).show();
            //setSharedPreferences();
        //}

            // check current state first
//            boolean state = isAirplaneMode();
//            // toggle the state
//            if (state)
//                toggleAirplaneMode(0, state);
//            else
//                toggleAirplaneMode(1, state);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume() {
        super.onResume();
        if (!OpenCVLoader.initDebug()) {
            Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
        } else {
            Log.d(TAG, "OpenCV library found inside package. Using it!");
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    public void onDestroy() {
        super.onDestroy();
        mOpenCvCameraView.disableView();
        PreferenceManager.getDefaultSharedPreferences(this).unregisterOnSharedPreferenceChangeListener(this);
    }

    public void onCameraViewStarted(int width, int height) {
        mGray = new Mat();
        mRgba = new Mat();
    }

    public void onCameraViewStopped() {
        mGray.release();
        mRgba.release();
    }

    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
//        Mat mRgbaT = mRgba.t();
//        Core.flip(mRgba.t(), mRgbaT, 1);
//        Imgproc.resize(mRgbaT, mRgbaT, mRgba.size());
        mGray = inputFrame.gray();

        //Core.transpose(mGray, mGray);
        //Core.flip(mGray, mGray, 0);

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, scaleFactor, minNeighbour, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        } else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();

        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
        }

        countDetectedFace(faces);
        runOnUiThread(new Runnable() {
            @Override
            public void run() {

                faceCounterTv.setText(String.valueOf(countFace));
            }
        });

        return mRgba;
    }

    public void countDetectedFace(MatOfRect faces){

//        do{

            // This block is to make sure the it only count face when it appears. e.g. : no detected face --> face --> no detected face (count as 1)
            if (faces.empty()){

                isFaces_detect = isFaces_detect_pre = false ;

            }
            else{
                isFaces_detect = true ;
            }

            // Only count when previous frame = 0 and current frame = 1. Eliminate counting when successive frame have face detected
            if ((isFaces_detect_pre == false) && (isFaces_detect == true) && (count_Face_Logic == true)){

                countFace += 0.25 ; // four times it detect face equal to 1
                startTime = System.currentTimeMillis(); // store new time value so that it do not count every miliseconds
                isFaces_detect_pre = true ;

                Log.d(TAG , String.valueOf(countFace));

            }

            if ((System.currentTimeMillis() - startTime) < delayTime){ // to make sure it doesnt count every frame, buffer of 1 seconds

                count_Face_Logic = false ;

            }
            else{
                count_Face_Logic = true ;
            }


//        }while(!isAppExit);

    }
}

这是我使用opencv库调用此方法的主要活动:

dict_nomi = [
    {'first_name': 'Luca', 'last_name': 'Rossi'},
    {'first_name': 'Stefano', 'last_name': 'De Rosso'},
    {'first_name': 'Luca', 'last_name': 'Bianchi'},
    {'first_name': 'Luca', 'last_name': 'Rossi'},
]

如何摆脱这种异常?

0 个答案:

没有答案
相关问题