显示和处理框架android相机

时间:2012-12-11 17:19:55

标签: java android opencv android-camera

我有一个应用程序处理来自相机的帧并显示在布局上,捕获和管理相机帧的类如下:

package org.opencv.face;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";

public Camera              mCamera;
private SurfaceHolder       mHolder;
private int                 mFrameWidth;
private int                 mFrameHeight;
private byte[]              mFrame;
private boolean             mThreadRun;
private byte[]              mBuffer;


public SampleViewBase(Context context) {
    super(context);
    mHolder = getHolder();
    mHolder.addCallback(this);
    Log.i(TAG, "Instantiated new " + this.getClass());
}

public int getFrameWidth() {
    return mFrameWidth;
}

public int getFrameHeight() {
    return mFrameHeight;
}

public void setPreview() throws IOException {
        mCamera.setPreviewDisplay(null);
}

public boolean openCamera() {
    Log.i(TAG, "openCamera");
    releaseCamera();
    mCamera = Camera.open();
    if(mCamera == null) {
        Log.e(TAG, "Can't open camera!");
        return false;
    }

    mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
        public void onPreviewFrame(byte[] data, Camera camera) {
            synchronized (SampleViewBase.this) {
                System.arraycopy(data, 0, mFrame, 0, data.length);
                SampleViewBase.this.notify(); 
            }
            camera.addCallbackBuffer(mBuffer);
        }
    });
    return true;
}

public void releaseCamera() {
    Log.i(TAG, "releaseCamera");
    mThreadRun = false;
    synchronized (this) {
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.setPreviewCallback(null);
            mCamera.release();
            mCamera = null;
        }
    }
    onPreviewStopped();
}

public void setupCamera(int width, int height) {
    Log.i(TAG, "setupCamera");
    synchronized (this) {
        if (mCamera != null) {
            Camera.Parameters params = mCamera.getParameters();
            List<Camera.Size> sizes = params.getSupportedPreviewSizes();
            mFrameWidth = width;
            mFrameHeight = height;

            // selecting optimal camera preview size
            {
                int  minDiff = Integer.MAX_VALUE;
                for (Camera.Size size : sizes) {
                    if (Math.abs(size.height - height) < minDiff) {
                        mFrameWidth = size.width;
                        mFrameHeight = size.height;
                        minDiff = Math.abs(size.height - height);
                    }
                }
            }

            params.setPreviewSize(getFrameWidth(), getFrameHeight());

            List<String> FocusModes = params.getSupportedFocusModes();
            if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
            {
                params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            }            

            mCamera.setParameters(params);

            /* Now allocate the buffer */
            params = mCamera.getParameters();
            int size = params.getPreviewSize().width * params.getPreviewSize().height;
            size  = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8 * 2;
            mBuffer = new byte[size];
            /* The buffer where the current frame will be copied */
            mFrame = new byte [size];
            mCamera.addCallbackBuffer(mBuffer);

            try {
                setPreview();
            } catch (IOException e) {
                Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
            }

            /* Notify that the preview is about to be started and deliver preview size */
            onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);

            /* Now we can start a preview */
            mCamera.startPreview();
        }
    }
}

public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
    Log.i(TAG, "surfaceChanged");
    setupCamera(width, height);
}

public void surfaceCreated(SurfaceHolder holder) {
    Log.i(TAG, "surfaceCreated");
    (new Thread(this)).start();
}

public void surfaceDestroyed(SurfaceHolder holder) {
    Log.i(TAG, "surfaceDestroyed");
    releaseCamera();
}


/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);

/**
 * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
 * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
 * @param previewWidth - the width of the preview frames that will be delivered via processFrame
 * @param previewHeight - the height of the preview frames that will be delivered via processFrame
 */
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);

/**
 * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
 * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
 * Any other resources used during the preview can be released.
 */
protected abstract void onPreviewStopped();

public void run() {
    mThreadRun = true;
    Log.i(TAG, "Starting processing thread");
    while (mThreadRun) {
        Bitmap bmp = null;

        synchronized (this) {
            try {
                this.wait();
                bmp = processFrame(mFrame);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        if (bmp != null) {
            Canvas canvas = mHolder.lockCanvas();
            if (canvas != null) {
                canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
                mHolder.unlockCanvasAndPost(canvas);
            }
        }
    }
}

}

除了用以下方法替换方法“setPreview()”之外,屏幕无法显示任何内容:

@TargetApi(11)
public void setPreview() throws IOException {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
        mSf = new SurfaceTexture(10);
        mCamera.setPreviewTexture( mSf );
    }
    else
        mCamera.setPreviewDisplay(null);

现在可以正常使用,但只适用于3.0以上的Android版本,我想要的是适用于任何版本。

我该如何解决这个问题?

1 个答案:

答案 0 :(得分:0)

如果您想接收预览回调,必须显示预览,使用带有非空参数的setPreviewDisplay()。该API旨在强制您在屏幕上显示此预览。任何允许您创建从未渲染的虚拟SurfaceView的解决方法可能无法在某些设备上或下次升级后使用。

在Honeycomb之前,可以在屏幕外创建预览SurfaceView(使其位置远离右侧),因此不会显示预览。这个错误后来修复了。

幸运的是,使用3+你可以使用setPreviewTexture(),平台无法强迫你实际显示纹理。