相机预览Android

时间:2016-12-15 08:19:24

标签: android android-asynctask android-camera

我正在开发一款拥有Camera Preview Activity的Android应用。它使用计时器每2秒调用takePicture()并对PictureCallback中捕获的图像进行一些处理。从Android文档中,我了解到PictureCallbackCamera.open()在同一个帖子中发生。

此外,建议在单独的帖子中调用takePicture()What's the best way to call StartPreview() after an image is captured?

我希望每次捕获的处理都在不同的线程上进行,相机预览应该在主UI线程中继续。使用AsyncTask()实现此目的的最佳方法是什么?

public class CameraActivity extends AppCompatActivity{


public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static String TAG = "Exception";
int viewWidth = 0;
int viewHeight = 0;
private Camera mCamera;
private CameraPreview mPreview;
private ImageView iv;
private RelativeLayout rl;
private Camera.PictureCallback mPicture;
private MRZ_OCR mrz = null;


@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_camera);

    rl = (RelativeLayout) findViewById(R.id.rel_camera);
    iv = (ImageView) findViewById(R.id.black_above);
    viewWidth = iv.getWidth();
    viewHeight = rl.getHeight() - 2 * iv.getHeight();

    // Create an instance of Camera
    mCamera = getCameraInstance();


    mPreview = new CameraPreview(this, mCamera);
    FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
    preview.addView(mPreview);

    new Timer().schedule(new TimerTask() {
        @Override
        public void run() {

            mCamera.startPreview();
            mrz = new MRZ_OCR();
            mrz.execute();


        }
    }, 4000, 4000); 


    mPicture = new Camera.PictureCallback() {

        @Override
        public void onPictureTaken(byte[] data, Camera camera) {


            // Crop to get only MRZ
            Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
            bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);


            //Verify if it has MRZ
            bm = MRZ.getMRZ(bm);


            if (bm != null) {


                ByteArrayOutputStream stream = new ByteArrayOutputStream();
                bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
                byte[] byteArray = stream.toByteArray();
                createImageFile(byteArray);
                Toast.makeText(getApplicationContext(), "Pic Saved", Toast.LENGTH_LONG).show();


            }


        }
    };


}

@Override
protected void onPause() {
    super.onPause();
    releaseCamera();              // release the camera immediately on pause event
}

private void releaseCamera() {
    if (mCamera != null) {
        mCamera.release();        // release the camera for other applications
        mCamera = null;
    }
}

private class MRZ_OCR extends AsyncTask<Void, Void, Void>

{
    private byte[] data;


    @Override
    protected Void doInBackground(Void... params) {

        mCamera.takePicture(null, null, mPicture);

        // Sleep for however long, you could store this in a variable and
        // have it updated by a menu item which the user selects.
        try {
            Thread.sleep(3000); // 3 second preview
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        return null;
    }

    @Override
    protected void onPostExecute(Void result) {
        // This returns the preview back to the live camera feed
        mCamera.startPreview();
    }
}

public static int pxFromDp(final Context context, final float dp) {
    return (int) (dp * context.getResources().getDisplayMetrics().density);
}

/**
 * A safe way to get an instance of the Camera object.
 */
public static Camera getCameraInstance() {
    Camera c = null;
    try {
        c = Camera.open(); // attempt to get a Camera instance
    } catch (Exception e) {
        // Camera is not available (in use or does not exist)
    }
    return c; // returns null if camera is unavailable
}

private static File getOutputMediaFile(int type)

{


    File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
            Environment.DIRECTORY_PICTURES), "MyCameraApp");


    // Create the storage directory if it does not exist
    if (!mediaStorageDir.exists()) {
        if (!mediaStorageDir.mkdirs()) {
            Log.d("MyCameraApp", "failed to create directory");
            return null;
        }
    }

    // Create a media file name
    String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
    File mediaFile;
    if (type == MEDIA_TYPE_IMAGE) {
        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                "IMG_" + timeStamp + ".jpg");
    } else if (type == MEDIA_TYPE_VIDEO) {
        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                "VID_" + timeStamp + ".mp4");
    } else {
        return null;
    }


    return mediaFile;
}

private static void createImageFile(byte[] byteArray) {
    //create empty image type file
    File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
    if (pictureFile == null) {
        Log.d(TAG, "Error creating media file, check storage permissions: ");
        return;
    }


    try {
        FileOutputStream fos = new FileOutputStream(pictureFile);
        fos.write(byteArray);
        fos.close();
    } catch (FileNotFoundException e) {
        Log.d(TAG, "File not found: " + e.getMessage());
    } catch (IOException e) {
        Log.d(TAG, "Error accessing file: " + e.getMessage());
    }
}

} 

2 个答案:

答案 0 :(得分:0)

我不知道api takePicture(),但我认为您需要做的是将此代码放在一个单独的主题中。

 Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
            bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);


            //Verify if it has MRZ
            bm = MRZ.getMRZ(bm);


            if (bm != null) {


                ByteArrayOutputStream stream = new ByteArrayOutputStream();
                bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
                byte[] byteArray = stream.toByteArray();
                createImageFile(byteArray);

            }

decodeBitmap是一个耗时的过程,尤其是在你的app中,每2秒执行一次。它会阻塞主线程。以及为什么建议在一个单独的线程中调用takePicture(),我认为这是同样的原因。

答案 1 :(得分:0)

你已经回答了你的问题。将byte[] data传递给AsyncTask:

private class PictureConverter extends AsyncTask<Void, Void, Void> {
    private byte[] data;
    private Camera camera;
    public PictureConverter(byte[] _data, Camera _camera) {
        data = _data;
        camera = _camera;

    }
    protected Void doInBackground(Void... data) {
        Camera.Parameters parameters = camera.getParameters();

        ByteArrayOutputStream out = new ByteArrayOutputStream();
        YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), parameters.getPreviewSize().width, parameters.getPreviewSize().height, null);
        yuvImage.compressToJpeg(new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height), 90, out);
        byte[] imageBytes = out.toByteArray();
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);


        out.flush();
        out.close();

        //TODO save the image

        return null;
    }

    protected void onProgressUpdate() {
    }

    protected void onPostExecute() {
        //TODO report that the image got saved
    }
}