在我的Android编码中我有一个摄像头表面视图类,当我显示这个摄像头类的预览时,它是一个实时摄像头场景,但是如何创建一个为摄像头当前帧创建位图的功能? 所以我可以从其他类中获取捕获的位图。
任何指导都将受到赞赏〜
这是我的相机类编码:
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
Camera mCamera;
boolean isPreviewRunning = false;
CameraSurfaceView(Context context) {
super(context);
SurfaceHolder mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
synchronized(this) {
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
Log.e("Camera", "mCamera.setPreviewDisplay(holder);");
}
mCamera.setDisplayOrientation(90);
mCamera.startPreview();
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(this) {
try {
if (mCamera!=null) {
mCamera.stopPreview();
isPreviewRunning=false;
mCamera.release();
}
} catch (Exception e) {
Log.e("Camera", e.getMessage());
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
}
}
答案 0 :(得分:1)
请注意,以下代码用于捕获图像及其显示的像素,而不是拍摄相机。这意味着,结果图像将给出屏幕(即1080x768)的实际像素大小,而不是相机应用程序的数百万像素高分辨率图像。如果您想将图像作为相机应用提供,请使用takePicture方法。
为了在SurfaceView
,
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraSurfaceView";
private SurfaceHolder mSurfaceHolder;
private Camera mCamera = null;
private Bitmap mBitmap;
private Context mContext;
private Camera.Parameters mParameters;
private byte[] byteArray;
private List<Camera.Size> mSupportedPreviewSizes;
private Camera.Size mPreviewSize;
public CameraSurfaceView (Context context) {
this(context, null);
}
public CameraSurfaceView (Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CameraSurfaceView (Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
try {
mSurfaceHolder = getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void surfaceCreated(final SurfaceHolder surfaceHolder) {
if (mCamera == null) {
try {
mCamera = Camera.open();
} catch (RuntimeException ignored) {
}
}
try {
if (mCamera != null) {
WindowManager winManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mCamera.setPreviewDisplay(mSurfaceHolder);
}
} catch (Exception e) {
if (mCamera != null)
mCamera.release();
mCamera = null;
}
if (mCamera == null) {
return;
} else {
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
if (mParameters == null)
{
return;
}
byteArray = bytes;
}
});
}
setWillNotDraw(false);
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
try {
mParameters = mCamera.getParameters();
List<Size> cameraSize = mParameters.getSupportedPreviewSizes();
mPreviewSize = cameraSize.get(0);
for (Size s : cameraSize) {
if ((s.width * s.height) > (mPreviewSize.width * mPreviewSize.height)) {
mPreviewSize = s;
}
}
mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
mCamera.setParameters(mParameters);
mCamera.startPreview();
} catch (Exception e) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
public Bitmap getBitmap() {
try {
if (mParameters == null)
return null;
if (mPreviewSize == null)
return null;
int format = mParameters.getPreviewFormat();
YuvImage yuvImage = new YuvImage(byteArray, format, mPreviewSize.width, mPreviewSize.height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, mPreviewSize.width, mPreviewSize.height);
yuvImage.compressToJpeg(rect, 75, byteArrayOutputStream);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPurgeable = true;
options.inInputShareable = true;
mBitmap = BitmapFactory.decodeByteArray(byteArrayOutputStream.toByteArray(), 0, byteArrayOutputStream.size(), options);
byteArrayOutputStream.flush();
byteArrayOutputStream.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
return mBitmap;
}
public Camera getCamera() {
return mCamera;
}
}