我正在尝试测试相机,但是在低光下我的图像太暗(R <5 G <5 B <5)。请帮我弄清楚我哪里出错了
初始化摄像头捕获回调
private final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
if (mPicturesTaken.lastEntry() != null) {
Log.i(TAG, "Done taking picture from camera " + mCameraDevice.getId());
}
closeCamera();
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request,
@NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Log.i(TAG, "Error in Camera " + mCameraDevice.getId() + " capture session \n was image captured " + failure.wasImageCaptured() +
"Reason for failure : " + failure.getReason() );
closeCamera();
}
};
初始化侦听器以获取图像可用性
private final ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imReader) {
// Get the bytes of the image into an array
final Image image = imReader.acquireLatestImage();
final ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
// Update the list of pictures taken.
updatePictureTaken(bytes);
image.close();
}
};
CameraManager.takeCamera()的回调
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraClosed = false;
Log.d(TAG, "camera " + camera.getId() + " opened");
mCameraDevice = camera;
// Delay the handler by 500 ms to take clear pictures.
final int delay = 500;
Log.i(TAG, "Taking picture from camera " + camera.getId());
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
try {
takePicture();
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}, delay);
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.d(TAG, " camera " + camera.getId() + " disconnected");
if (mCameraDevice != null && !mCameraClosed) {
mCameraClosed = true;
mCameraDevice.close();
}
}
@Override
public void onClosed(@NonNull CameraDevice camera) {
mCameraClosed = true;
Log.d(TAG, "camera " + camera.getId() + " closed");
//once the current camera has been closed, start taking another picture
if (!mCameraIds.isEmpty()) {
takeAnotherPicture();
} else {
onDoneCapturingAllPhotos(mPicturesTaken);
}
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "camera in error, int code " + error);
if (mCameraDevice != null && !mCameraClosed) {
mCameraDevice.close();
}
}
};
这会封锁图像并存储字节。这是从cordova插件调用的。
public void captureImagesFromCamera() {
sResultMap.put(mCameraOrientation, false);
sResultDescriptionMap.put(mCameraOrientation, "Capturing Image");
Log.d(TAG, "Camera orientation = " + mCameraOrientation + " and Initial result is = "
+ sResultMap.get(mCameraOrientation));
if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Permission for camera has been granted");
mPicturesTaken = new TreeMap<>();
if (!mCameraIds.isEmpty()) {
Log.d(TAG, "camera ids list is not empty and size = " + mCameraIds.size());
mCurrentCameraId = mCameraIds.poll();
Log.d(TAG, "Current camera id = " + mCurrentCameraId);
openCamera();
} else {
//No camera detected!
Log.e(TAG, "Camera list received from plugin is empty");
onDoneCapturingAllPhotos(mPicturesTaken);
sResultDescriptionMap.put(mCameraOrientation, "No Camera Detected!");
}
} else {
Log.d(TAG, "Camera permissions are missing");
sResultDescriptionMap.put(mCameraOrientation, "Camera permissions are missing");
}
}
/**
* Manager.openCamera() is called from here after checking for permissions.
*/
private void openCamera() {
try {
// This check needs to be there as it is compiler imposed
if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permissions are granted. Assigning state call back now");
mManager.openCamera(mCurrentCameraId, stateCallback, null);
} else {
Log.d(TAG, "Camera permissions are not granted");
sResultMap.put(mCameraOrientation, false);
sResultDescriptionMap.put(mCameraOrientation, "Camera permissions are not granted");
}
} catch (CameraAccessException e) {
Log.e(TAG, " exception occurred while opening camera " + mCurrentCameraId, e);
sResultMap.put(mCameraOrientation, false);
sResultDescriptionMap.put(mCameraOrientation, "exception occurred while opening camera");
}
}
/**
* Method to read the image from the camera in jpeg format and assign the image listeners.
*
* @throws CameraAccessException
*/
private void takePicture() throws CameraAccessException {
if (null == mCameraDevice) {
Log.d(TAG, "Camera device is null");
return;
}
final CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraDevice.getId());
Size[] jpegSizes = null;
StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamConfigurationMap != null) {
jpegSizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG);
}
final boolean jpegSizesNotEmpty = jpegSizes != null && 0 < jpegSizes.length;
final int width = jpegSizesNotEmpty ? jpegSizes[0].getWidth() : 640;
final int height = jpegSizesNotEmpty ? jpegSizes[0].getHeight() : 480;
final ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
//Define output surfaces to host the image
final List<Surface> outputSurfaces = new ArrayList<>();
outputSurfaces.add(reader.getSurface());
final CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 12);
captureBuilder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_PARTY);
reader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "Will attempt to create camera capture session now");
// Creating the picture capure session
mCameraDevice.createCaptureSession(outputSurfaces,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
try {
Log.d(TAG, "Session capture calling");
session.capture(captureBuilder.build(), captureListener, null);
Log.d(TAG, "Session capture called");
} catch (CameraAccessException e) {
Log.e(TAG, " exception occurred while accessing " + mCurrentCameraId, e);
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.d(TAG, "Configuration failed");
}
}
, null
);
}
// Take another pic if there is another camera.
private void takeAnotherPicture() {
Log.d(TAG, "Attempting to take another picture");
mCurrentCameraId = mCameraIds.poll();
Log.d(TAG, "Current Camera id = " + mCurrentCameraId);
openCamera();
}
private void closeCamera() {
Log.d(TAG, "closing camera " + mCameraDevice.getId());
if (null != mCameraDevice && !mCameraClosed) {
Log.d(TAG, "Camera closed");
mCameraDevice.close();
}
if (null != mImageReader) {
Log.d(TAG, "Image reader closed");
mImageReader.close();
mImageReader = null;
}
}
/**
* We've finished taking pictures from all cameras.
*/
private void onDoneCapturingAllPhotos(TreeMap<String, byte[]> picturesTaken) {
Log.d(TAG, "Capturing all images done. Currently no non-black pixel found");
sResultDescriptionMap.put(mCameraOrientation, "Reading Capturted Image!");
boolean foundNonBlackPixel = false;
long imageProcessingStartTime = System.nanoTime();
Log.d(TAG, "Pictures taken is null? = " + (picturesTaken == null) + "Is picture taken empty? = " + picturesTaken.isEmpty());
if (picturesTaken != null && !picturesTaken.isEmpty()) {
Log.d(TAG, "Iterate over all pictures");
StringBuffer description = new StringBuffer("");
for (Map.Entry<String, byte[]> entryMap : picturesTaken.entrySet()) {
Log.d(TAG, "Compressing image");
// Get the byte stream from bytes array to decode into bitmap.
final ByteArrayInputStream stream = new ByteArrayInputStream(entryMap.getValue());
final BitmapFactory.Options largeOption = new BitmapFactory.Options();
// This makes sure only bitmap information is fetched and not the bitmap itself.
largeOption.inJustDecodeBounds = true;
BitmapFactory.decodeStream(stream, null, largeOption);
// The new size we want to scale to
final int requiredWidth = 200;
final int requiredHeight = 200;
// Find the correct scale value. It should be the power of 2.
int scale = 1;
while (largeOption.outWidth / scale >= requiredWidth && largeOption.outHeight / scale >= requiredHeight) {
scale *= 2;
}
Bitmap bitmapLarge = BitmapFactory.decodeByteArray(entryMap.getValue(), 0, entryMap.getValue().length);
int width = bitmapLarge.getWidth();
int height = bitmapLarge.getHeight();
// Scale down the bitmap to iterate over all pixels.
int scaledWidth = 160;
int scaledHeight = ((160 * height) / width);
Bitmap bitmap = Bitmap.createScaledBitmap(bitmapLarge, 160, ((160 * height) / width), true);
description.append("\n Bitmap width = " + scaledWidth + " and height = " + scaledHeight + "\n");
foundNonBlackPixel = false;
for (int x = 0; (x < scaledWidth && !foundNonBlackPixel); x++) {
for (int y = 0; (y < scaledHeight && !foundNonBlackPixel); y++) {
int pixel = bitmap.getPixel(x, y);
Log.d(TAG, "Pixel value = " + pixel);
Log.d(TAG, "Max Red pixel value = " + Color.RED);
int redValue = Color.red(pixel);
int blueValue = Color.blue(pixel);
int greenValue = Color.green(pixel);
// A range of 5 is allowed to determine black pixels.
if (!(redValue <= 5 && blueValue <= 5 && greenValue <= 5)) {
foundNonBlackPixel = true;
long endTime = System.nanoTime();
description.append("Camera pixel : Red color = " + redValue + " Green color = " +
greenValue + " Blue color = " + blueValue + " found non black pixel = " + foundNonBlackPixel +
"\n Total Time Taken = " + (cameraTestStartTime - endTime) +
"ns \n Time Taken to Capture Image = " + (cameraTestStartTime - imageProcessingStartTime) +
" \n Time Taken to find Non Black Pixel = " + (imageProcessingStartTime - endTime));
sResultDescriptionMap.put(mCameraOrientation, description.toString());
}
Log.d(TAG, "Camera pixel : Red color = " + redValue + " Green color = " +
greenValue + " Blue color = " + blueValue + " found non black pixel = " + foundNonBlackPixel);
}
}
if (foundNonBlackPixel) {
break;
}
}
}
Log.d(TAG, "Putting " + foundNonBlackPixel + " into " + mCameraOrientation);
sResultMap.put(mCameraOrientation, foundNonBlackPixel);
if (!foundNonBlackPixel) {
sResultDescriptionMap.put(mCameraOrientation, "Found All Black Pixels!");
}
}
private void updatePictureTaken(final byte[] bytes) {
mPicturesTaken.put(mCurrentCameraId, bytes);
}