我是android应用程序开发的新手。我想进行类似google lens camera活动的活动,该活动显示点动画并在背景中处理图像。 对于上述任务,我创建了一个简单的摄影机活动,但是我不知道如何在摄影机活动中显示该点动画,并通过调用api处理背景中单击的图像,仅在准备好显示结果时才打开下一个片段。 我添加了我的Java代码供您参考。
package com.example.camera2api;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MainActivity extends AppCompatActivity {
Button button;
TextureView textureView;
private static final SparseIntArray orientation = new SparseIntArray();
static {
orientation.append(Surface.ROTATION_0, 90);
orientation.append(Surface.ROTATION_90, 0);
orientation.append(Surface.ROTATION_180, 270);
orientation.append(Surface.ROTATION_270, 180);
}
private String cameraId;
CameraDevice cameraDevice;
CameraCaptureSession cameraCaptureSession;
CaptureRequest captureRequest;
CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
private File file;
Handler mBackgroundHandler;
HandlerThread mBackgroundThread;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textureView = findViewById(R.id.texture);
button = findViewById(R.id.button_capture);
textureView.setSurfaceTextureListener(textureListner);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
takepicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if(requestCode==101)
{
if(grantResults[0]==PackageManager.PERMISSION_DENIED)
{
Toast.makeText(getApplicationContext(),"Sorry camera permission is necessary",Toast.LENGTH_SHORT).show();
}
}
}
TextureView.SurfaceTextureListener textureListner = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
cameraDevice.close();
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
private void createCameraPreview() throws CameraAccessException {
SurfaceTexture texture = textureView.getSurfaceTexture();
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if(cameraDevice== null)
{
return;
}
cameraCaptureSession = session ;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Toast.makeText(getApplicationContext(),"Configuration changed",Toast.LENGTH_SHORT).show();
}
},null);
}
private void updatePreview() {
if(cameraDevice==null)
return ;
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(),null,mBackgroundHandler) ;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if(textureView.isAvailable())
{
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
else
{
textureView.setSurfaceTextureListener(textureListner);
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background") ;
mBackgroundThread.start(); ;
mBackgroundHandler = new Handler(mBackgroundThread.getLooper()) ;
}
@Override
protected void onPause() {
try {
stopBackgroundThread() ;
} catch (InterruptedException e) {
e.printStackTrace();
}
super.onPause();
}
protected void stopBackgroundThread() throws InterruptedException {
mBackgroundThread.quitSafely() ;
mBackgroundThread.join();
mBackgroundThread=null ;
mBackgroundHandler = null ;
}
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, 101);
return;
}
manager.openCamera(cameraId, stateCallback, null);
}
private void takepicture() throws CameraAccessException {
if(cameraDevice==null)
return ;
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE) ;
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId()) ;
Size[] jpegsizes = null ;
jpegsizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG) ;
int width = 640 ;
int height = 480 ;
if(jpegsizes!=null && jpegsizes.length>0)
{
width = jpegsizes[0].getWidth() ;
height = jpegsizes[0].getHeight() ;
}
ImageReader reader = ImageReader.newInstance(width,height,ImageFormat.JPEG,1) ;
List<Surface> outputSurfaces = new ArrayList<>(2) ;
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture())) ;
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.
TEMPLATE_STILL_CAPTURE);
captureBuilder. addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE,CameraMetadata.CONTROL_MODE_AUTO) ;
int rotation = getWindowManager().getDefaultDisplay().getRotation() ;
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,orientation.get(rotation)) ;
Long tsLong = System.currentTimeMillis()/1000 ;
String ts = tsLong.toString() ;
file = new File(Environment.getExternalStorageDirectory()+"/"+ts+".jpg ") ;
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null ;
image = reader.acquireLatestImage() ;
ByteBuffer buffer = image.getPlanes()[0].getBuffer() ;
byte[] bytes = new byte[buffer.capacity()] ;
buffer.get(bytes) ;
try {
save(bytes) ;
} catch (IOException e) {
e.printStackTrace();
}finally {
if(image!=null)
{
image.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener,mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListner = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(getApplicationContext(),"Saved",Toast.LENGTH_SHORT).show() ;
try {
createCameraPreview();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(),captureListner,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
},mBackgroundHandler);
}
private void save(byte[] bytes) throws IOException {
OutputStream outputStream = null ;
outputStream = new FileOutputStream(file) ;
outputStream.write(bytes);
outputStream.close();
}
}