" JavaCameraView"相机定位问题

时间:2017-04-23 20:14:42

标签: android opencv

我是Android开发的新手。我正在创建一个简单的应用程序,它有一个Activity。在本活动中,我尝试从相机获取帧并实时处理,但我有相机方向问题,即接收的图像是90度旋转。有许多解决方案可以解决这个问题但是找不到#C; JavaCameraView"。因此,请帮助我解决如何仅针对" JavaCameraView"来解决方向问题。

这是我的代码:

public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {

private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                javaCameraView.enableView();
            }
            break;
            default: {
                super.onManagerConnected(status);
            }
            break;
        }
    }
};

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
    ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, CAMERA_PERMISSION_REQUEST_CODE);
    //}

    javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
    javaCameraView.setVisibility(View.VISIBLE);
    javaCameraView.setCvCameraViewListener(this);
}

@Override
protected void onPause() {
    super.onPause();
    if (javaCameraView != null)
        javaCameraView.disableView();
}

@Override
protected void onDestroy() {
    super.onDestroy();
    if (javaCameraView != null)
        javaCameraView.disableView();
}

@Override
protected void onResume() {

    super.onResume();
    if (OpenCVLoader.initDebug()) {
        Log.i(TAG, "OpenCV loaded successfully.");
        mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
    } else {
        Log.i(TAG, "OpenCV not loaded.");
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
    }

}

@Override
public void onCameraViewStarted(int width, int height) {
    frame = new Mat(height, width, CV_8UC4);


}

@Override
public void onCameraViewStopped() {
    frame.release();
}

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    frame = inputFrame.rgba();
    //frame=processFrame();
    //Imgcodecs.imwrite("/storage/emulated/0/aaaaa+.jpg", frame);
    return frame;

}
}

2 个答案:

答案 0 :(得分:3)

我已经解决了这个问题: 使用下面的类而不是javaCameraView:

       public class PortraitCameraView extends CameraBridgeViewBase implements Camera.PreviewCallback {

private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";

private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;

public Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mCameraId;
Handler handler;
boolean callBuffer = false;
Camera.Size bestSize = null;
Camera.Size pictureSize = null;
private LayoutMode mLayoutMode;
private int mCenterPosX = -1;
private int mCenterPosY;

public static enum LayoutMode {
    FitToParent, // Scale to the size that no side is larger than the parent
    NoBlank // Scale to the size that no side is smaller than the parent
}

public static class JavaCameraSizeAccessor implements ListItemAccessor {

    public int getWidth(Object obj) {
        Camera.Size size = (Camera.Size) obj;
        return size.width;
    }

    public int getHeight(Object obj) {
        Camera.Size size = (Camera.Size) obj;
        return size.height;
    }
}

public PortraitCameraView(Context context, int cameraId) {
    super(context, cameraId);
}

public PortraitCameraView(Context context, AttributeSet attrs) {
    super(context, attrs);
}

protected boolean initializeCamera(int width, int height) {

    handler = new Handler();
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
        mCamera = null;

        boolean connected = false;
        int numberOfCameras = android.hardware.Camera.getNumberOfCameras();
        android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo();
        for (int i = 0; i < numberOfCameras; i++) {
            android.hardware.Camera.getCameraInfo(i, cameraInfo);
            if (cameraInfo.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
                try {
                    mCamera = Camera.open(i);
                    mCameraId = i;
                    connected = true;
                } catch (RuntimeException e) {
                    Log.e(TAG, "Camera #" + i + "failed to open: " + e.getMessage());
                }
                if (connected) break;
            }
        }

        if (mCamera == null) return false;

    /* Now set camera parameters */
        try {
            Camera.Parameters params = mCamera.getParameters();
            List<Camera.Size> sizes = params.getSupportedPreviewSizes();
            List<Camera.Size> Picturesizes = params.getSupportedPictureSizes();
            pictureSize = Picturesizes.get(0);

            List<Camera.Size> sizeList = sizes;
            bestSize = sizeList.get(0);
            Log.d(TAG, "getSupportedPreviewSizes()  " + bestSize.width + "  " + bestSize.height);
            Log.d(TAG, "Picturesizes()  " + pictureSize.width + "  " + pictureSize.height);

       //                bestSize.width = GlobalArea.display_width;
     ////                bestSize.height = GlobalArea.display_height;
            for (int i = 1; i < sizeList.size(); i++) {

                if ((sizeList.get(i).width * sizeList.get(i).height) > (bestSize.width * bestSize.height)) {
                    Log.d(TAG, "getSupportedPreviewSizes()   " + sizeList.get(i).width + "  " + sizeList.get(i).height);
                    bestSize = sizeList.get(i);
                }
            }


            if (sizes != null) {
            /* Select the size that fits surface considering maximum size allowed */
                Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), height, width); //use turn around values here to get the correct prev size for portrait mode

                params.setPreviewFormat(ImageFormat.NV21);
                Log.e(TAG, "Set preview size to " + Integer.valueOf((int) bestSize.width) + " x " + Integer.valueOf((int) bestSize.height));
                Log.e(TAG, "Set preview size to " + width + " x " + height);
                params.setPreviewSize((int) bestSize.width, (int) bestSize.height);
                params.setPictureSize((int) pictureSize.width, (int) pictureSize.height);

                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
                    params.setRecordingHint(true);

                List<String> FocusModes = params.getSupportedFocusModes();
                if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                }
                boolean hasFlash = SevenBitsDemo.getInstance().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH);
                if (hasFlash) {
     //            mOpenCvCameraView.flashOn();
                    params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);

                }
                List<int[]> ints = params.getSupportedPreviewFpsRange();
                for (int i = 0; i < ints.size(); i++) {
                    Log.e("privew size", String.valueOf(ints.get(i).length));
                }
      //                    params.setPreviewFpsRange(10000,10000);
                mCamera.setParameters(params);

     //                   boolean mSurfaceConfiguring = adjustSurfaceLayoutSize(bestSize, true, width, height);

                params = mCamera.getParameters();
                GlobalArea.preview_size = params.getPreviewSize();
                mFrameWidth = params.getPreviewSize().height; //the frame width and height of the super class are used to generate the cached bitmap and they need to be the size of the resulting frame
                mFrameHeight = params.getPreviewSize().width;

                int realWidth = mFrameHeight; //the real width and height are the width and height of the frame received in onPreviewFrame ...
                int realHeight = mFrameWidth;
                if ((getLayoutParams().width == LinearLayout.LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LinearLayout.LayoutParams.MATCH_PARENT))
                    mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
                else
                    mScale = 0;

                if (mFpsMeter != null) {
                    mFpsMeter.setResolution((int) pictureSize.width, (int) pictureSize.height);
                }

                int size = mFrameWidth * mFrameHeight;
                size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
                mBuffer = new byte[size];

                mCamera.addCallbackBuffer(mBuffer);
                mCamera.setPreviewCallbackWithBuffer(this);

                mFrameChain = new Mat[2];
                mFrameChain[0] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1); //the frame chane is still in landscape
                mFrameChain[1] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1);

                AllocateCache();

                mCameraFrame = new JavaCameraFrame[2];
                mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight); //the camera frame is in portrait
                mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);

                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
                    mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                    mCamera.setPreviewTexture(mSurfaceTexture);
                } else
                    mCamera.setPreviewDisplay(null);

            /* Finally we are ready to start the preview */
                Log.d(TAG, "startPreview");
                mCamera.startPreview();
            } else
                result = false;
        } catch (Exception e) {
            result = false;
            e.printStackTrace();
        }
    }

    return result;
}

protected void releaseCamera() {
    synchronized (this) {
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.setPreviewCallback(null);

            mCamera.release();
        }
        mCamera = null;
        if (mFrameChain != null) {
            mFrameChain[0].release();
            mFrameChain[1].release();
        }
        if (mCameraFrame != null) {
            mCameraFrame[0].release();
            mCameraFrame[1].release();
        }
    }
}

@Override
protected boolean connectCamera(int width, int height) {

/* 1. We need to instantiate camera
 * 2. We need to start thread which will be getting frames
 */
/* First step - initialize camera connection */
    Log.d(TAG, "Connecting to camera");
    if (!initializeCamera(width, height))
        return false;

/* now we can start update thread */
    Log.d(TAG, "Starting processing thread");
    mStopThread = false;
    mThread = new Thread(new CameraWorker());
    mThread.start();

    return true;
}

protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
 * 2. Stop camera and release it
 */
    Log.d(TAG, "Disconnecting from camera");
    try {
        mStopThread = true;
        Log.d(TAG, "Notify thread");
        synchronized (this) {
            this.notify();
        }
        Log.d(TAG, "Wating for thread");
        if (mThread != null)
            mThread.join();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } finally {
        mThread = null;
    }

/* Now release camera */
    releaseCamera();
}

public void onPreviewFrame(byte[] frame, Camera arg1) {
    synchronized (this) {
        mFrameChain[1 - mChainIdx].put(0, 0, frame);
        this.notify();
    }
    if (mCamera != null)
        mCamera.addCallbackBuffer(mBuffer);
}

private class JavaCameraFrame implements CvCameraViewFrame {
    private Mat mYuvFrameData;
    private Mat mRgba;
    private int mWidth;
    private int mHeight;
    private Mat mRotated;

    public Mat gray() {
        if (mRotated != null) mRotated.release();
        mRotated = mYuvFrameData.submat(0, mWidth, 0, mHeight); 
     //submat with reversed width and height because its done on the 
    landscape frame
        mRotated = mRotated.t();
        Core.flip(mRotated, mRotated, 1);
        return mRotated;
    }

    public Mat rgba() {
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
        if (mRotated != null) mRotated.release();
        mRotated = mRgba.t();
        Core.flip(mRotated, mRotated, 1);
        return mRotated;
    }

    public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
        super();
        mWidth = width;
        mHeight = height;
        mYuvFrameData = Yuv420sp;
        mRgba = new Mat();
    }

    public void release() {
        mRgba.release();
        if (mRotated != null) mRotated.release();
    }
}

private class CameraWorker implements Runnable {

    public void run() {
        do {
            synchronized (PortraitCameraView.this) {
                try {
                    PortraitCameraView.this.wait();
                } catch (InterruptedException e) {
                    Log.e(TAG, "CameraWorker interrupted", e);
                }
            }

            if (!mStopThread) {
                if (!mFrameChain[mChainIdx].empty())
                    deliverAndDrawFrame(mCameraFrame[mChainIdx]);
                mChainIdx = 1 - mChainIdx;
            }
        } while (!mStopThread);
        Log.d(TAG, "Finish processing thread");
    }


}
 }

所以现在在你的xml和java文件中使用PortraitCameraView,因为我在这个类中以纵向模式转换了javacamera视图。

答案 1 :(得分:0)

您可以使用setMaxFrame size函数。

javaCameraView.setMaxFrameSize(480, 640);

480是宽度,640是高度。现在,javacameraview是纵向的。