我正在从事相机项目。我已经实现了相机将焦点对准我按下屏幕的部分。当我触摸屏幕时,效果很好,相机对焦了该部分,但仅持续了几毫秒,然后相机便不对焦并交替对焦。只有在我拍照后,相机才会对焦。
我怎么能知道我按屏幕对焦并保持对焦。
这是整个活动
public class PictureActivity extends AppCompatActivity {
private static final int statePreview = 0;
private static final int waitLock = 1;
private int mState;
private ImageButton button;
private TextureView textureView;
private static final SparseIntArray orientation = new SparseIntArray();
private CameraDevice cameraDevice;
private String cameraID;
private Size dimension;
private CameraCaptureSession cameraCaptureSession;
private CaptureRequest captureRequest;
private CaptureRequest.Builder captureRequestBuilder;
private ImageReader imageReader;
private Handler mBackgroundHandler;
private HandlerThread handlerThread;
private File file;
private Bitmap bitmap;
static {
orientation.append(Surface.ROTATION_0,90);
orientation.append(Surface.ROTATION_90,0);
orientation.append(Surface.ROTATION_180,270);
orientation.append(Surface.ROTATION_270,180);
}
@SuppressLint({"ClickableViewAccessibility", "ResourceType"})
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_picture);
textureView = (TextureView) findViewById(R.id.texture);
button = (ImageButton) findViewById(R.id.takePic);
textureView.setSurfaceTextureListener(textureListener);
button.setOnClickListener(new View.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.O)
@Override
public void onClick(View view) {
try {
takePic();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
});
textureView.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
int pointerId = motionEvent.getPointerId(0);
int pointerIndex = motionEvent.findPointerIndex(pointerId);
float x = motionEvent.getX(pointerIndex);
float y = motionEvent.getY(pointerIndex);
Rect touchRect = new Rect((int)(x-100),(int)(y-100),(int)(x+100),(int)(y+100));
if(cameraID == null) return false;
//other stuff
MeteringRectangle focusArea = new MeteringRectangle(touchRect,MeteringRectangle.METERING_WEIGHT_DONT_CARE);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
//captureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{focusArea});
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusArea});
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CameraMetadata.CONTROL_AF_TRIGGER_START);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
try {
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
/*mManualFocusEngaged = true;*/
} catch (CameraAccessException e) {
// error handling
}catch (IllegalStateException e){
Toast.makeText(getApplicationContext(),"Not focused yet",Toast.LENGTH_SHORT).show();
}
return false;
}
});
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
try {
openCamera();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
@Override
public void onRequestPermissionsResult(int requestCode,@NonNull String[] permissions,@NonNull int[] grantReesults){
if(requestCode == 101){
if(grantReesults[0]==PackageManager.PERMISSION_DENIED){
Toast.makeText(getApplicationContext(),"Sorry, camera permission is neccesary",Toast.LENGTH_LONG).show();
//finsh
}
}
}
CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
cameraDevice = camera;
try {
createCamerPreview();
}catch (CameraAccessException e){
e.printStackTrace();
}
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
cameraDevice.close();
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
@Override
public void onError(@NonNull CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void createCamerPreview() throws CameraAccessException {
//MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x- 150,0),
// Math.max())
SurfaceTexture texture = textureView.getSurfaceTexture();
texture.setDefaultBufferSize(dimension.getWidth(),dimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusAreaTouch});
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_AUTO);
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,CaptureRequest.CONTROL_AF_TRIGGER_START);
captureRequestBuilder.setTag("FOCUS TAG");
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
if (cameraDevice == null) {
return;
}
cameraCaptureSession = session;
try {
updatePreview();
}catch (CameraAccessException e){
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Toast.makeText(getApplicationContext(), "Configuration Changed", Toast.LENGTH_LONG).show();
}
},null);
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void updatePreview() throws CameraAccessException {
if(cameraDevice==null){
return;
}
//captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(),null,mBackgroundHandler);
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void openCamera() throws CameraAccessException {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
cameraID = manager.getCameraIdList()[0];
CameraCharacteristics cc = manager.getCameraCharacteristics(cameraID);
StreamConfigurationMap map = cc.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
dimension = map.getOutputSizes(SurfaceTexture.class)[0];
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED)
{
ActivityCompat.requestPermissions(PictureActivity.this,new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE},101);
return;
}
manager.openCamera(cameraID, stateCallback, null);
}
@RequiresApi(api = Build.VERSION_CODES.O)
private void takePic() throws CameraAccessException {
//lockFocus();
//CameraStateHandler csh = new CameraStateHandler(getActivity(),this);
if(cameraDevice==null){
return;
}
CameraManager manager = (CameraManager)getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSize = null;
jpegSize = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
int width = 640;
int height = 480;
if(jpegSize!=null && jpegSize.length>0){
width = jpegSize[0].getWidth();
height = jpegSize[0].getHeight();
}
final ImageReader reader = ImageReader.newInstance(width,height,ImageFormat.JPEG,1);
List<Surface> outputSurface = new ArrayList<>(2);
outputSurface.add(reader.getSurface());
outputSurface.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
//captureBuilder.set(CaptureRequest.CONTROL_MODE,CameraMetadata.CONTROL_MODE_AUTO);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,orientation.get(rotation));
Long tsLong = System.currentTimeMillis()/1000;
file = new File(Environment.getExternalStorageDirectory()+"/"+tsLong.toString()+".jpg");
ImageReader.OnImageAvailableListener readerListenenr = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
Image image = null;
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
Matrix matrix = new Matrix();
matrix.postRotate(90);
bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length, null);
bitmap = Bitmap.createBitmap(bitmap,0,0,bitmap.getWidth(),bitmap.getHeight(),matrix,true);
check();
if (image != null) {
image.close();
}
}
};
reader.setOnImageAvailableListener(readerListenenr,mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result){
switch (mState){
case statePreview:
break;
case waitLock:
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED) {
//unLockFocus();
//Toast.makeText(getApplicationContext(), "Focus Locked", Toast.LENGTH_LONG).show();
}
break;
}
}
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
try {
createCamerPreview();
}catch (CameraAccessException e){
e.printStackTrace();
}
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Toast.makeText(getApplicationContext(), "Focus don't Locked", Toast.LENGTH_LONG).show();
}
};
cameraDevice.createCaptureSession(outputSurface, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
try {
cameraCaptureSession.capture(captureBuilder.build(),captureListener,mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
}
},mBackgroundHandler);
}
private void save(String meta) throws IOException{
// Bitmap bitmap = BitmapFactory.decodeFile(file.getAbsolutePath());
// imageView.setImageBitmap(bitmap);
byte[] bytes = Base64.decode(meta.getBytes(),Base64.DEFAULT);
Toast.makeText(getApplicationContext(),"Saved",Toast.LENGTH_LONG).show();
OutputStream outputStream = null;
outputStream = new FileOutputStream(file);
outputStream.write(bytes);
outputStream.close();
PictureActivity.this);
}
@RequiresApi(api = Build.VERSION_CODES.O)
private static String toBase64(Bitmap img){
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
img.compress(Bitmap.CompressFormat.JPEG,40,byteArrayOutputStream);
byte[] bytes = byteArrayOutputStream.toByteArray();
return Base64.encodeToString(bytes,Base64.DEFAULT);
}
public void check(){
ImageView imageView = new ImageView(this);
imageView.setImageBitmap(bitmap);
AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.O)
@Override
public void onClick(DialogInterface dialogInterface, int i) {
System.out.println("Ok Button gedrückt");
try {
save(toBase64(bitmap));
} catch (IOException e) {
e.printStackTrace();
}
}
}).setView(imageView);
adb.setNegativeButton("Again", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
}
});
adb.create().show();
}
@Override
protected void onResume() {
super.onResume();
startBackgroundThread();
if (textureView.isAvailable()){
try {
openCamera();
}catch (CameraAccessException e){
e.printStackTrace();
}
}else{
textureView.setSurfaceTextureListener(textureListener);
}
}
private void startBackgroundThread(){
handlerThread = new HandlerThread("Camera Background");
handlerThread.start();
mBackgroundHandler = new Handler(handlerThread.getLooper());
}
public static void addImagetoGallery(final String filePath,final Context context){
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis());
values.put(MediaStore.Images.Media.MIME_TYPE,"image/jpg");
values.put(MediaStore.MediaColumns.DATA,filePath);
context.getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI,values);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@Override
protected void onPause(){
try {
stopBackgroundThread();
}catch (InterruptedException e){
e.printStackTrace();
}
super.onPause();
}
@SuppressLint("NewApi")
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
protected void stopBackgroundThread() throws InterruptedException {
handlerThread.quitSafely();
handlerThread.join();
handlerThread = null;
mBackgroundHandler = null;
}
}