使用新的CameraX API,我尝试使用public void takePicture(final OnImageCapturedListener listener)
方法在内存中拍照,然后将给定的图像转换为OpenCV Mat。
当我尝试以高质量捕获图像时,我能够在Image Analyzer中成功将图像转换为Mat时,出现了一个问题,但是getPlanes返回一个只有一项的数组(其中image分析器中,我得到了三个SurfacePlane项),并且看似坏了:
package com.example.scanner;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.media.Image;
import android.os.Bundle;
import android.util.Rational;
import android.util.Size;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.Toast;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import java.nio.ByteBuffer;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageAnalysisConfig;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
public class CameraXActivity extends AppCompatActivity {
private final String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA", "android.permission.WRITE_EXTERNAL_STORAGE"};
//array w/ permissions from manifest
TextureView mSurfaceView;
private int REQUEST_CODE_PERMISSIONS = 10; //arbitrary number, can be changed accordingly
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_gallery);
mSurfaceView = findViewById(R.id.action_sync);
if (allPermissionsGranted()) {
startCamera(); //start camera if permission has been granted by user
} else {
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void startCamera() {
androidx.camera.core.CameraX.unbindAll();
/* start preview */
int aspRatioW = mSurfaceView.getWidth(); // get width of screen
int aspRatioH = mSurfaceView.getHeight(); // get height
Rational asp = new Rational(aspRatioW, aspRatioH); // aspect ratio
Size screen = new Size(aspRatioW, aspRatioH); // size of the screen
PreviewConfig pConfig = new PreviewConfig.Builder()
.setTargetAspectRatio(asp)
.setTargetResolution(screen)
.setLensFacing(androidx.camera.core.CameraX.LensFacing.BACK)
.build();
Preview preview = new Preview(pConfig); //lets build it
preview.setOnPreviewOutputUpdateListener(
new Preview.OnPreviewOutputUpdateListener() {
@Override
public void onUpdated(Preview.PreviewOutput output) {
mSurfaceView.setSurfaceTexture(output.getSurfaceTexture());
}
});
ImageAnalysisConfig imgAConfig = new ImageAnalysisConfig.Builder()
.setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
.setLensFacing(CameraX.LensFacing.BACK)
.setTargetResolution(new android.util.Size(2480, 3508))
.build();
ImageAnalysis analysis = new ImageAnalysis(imgAConfig);
analysis.setAnalyzer(
new ImageAnalysis.Analyzer() {
@Override
public void analyze(ImageProxy image, int rotationDegrees) {
Mat mat = imageToMat(image.getImage()); // no errors here!
}
});
preview.setOnPreviewOutputUpdateListener(
new Preview.OnPreviewOutputUpdateListener() {
//to update the surface texture we have to destroy it first, then re-add it
@Override
public void onUpdated(Preview.PreviewOutput output) {
ViewGroup parent = (ViewGroup) mSurfaceView.getParent();
parent.removeView(mSurfaceView);
parent.addView(mSurfaceView, 0);
mSurfaceView.setSurfaceTexture(output.getSurfaceTexture());
}
});
ImageCaptureConfig imgCapConfig =
new ImageCaptureConfig.Builder()
.setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY)
.setLensFacing(CameraX.LensFacing.BACK)
.setTargetResolution(new android.util.Size(2480, 3508))
.build();
final ImageCapture imgCap = new ImageCapture(imgCapConfig);
// call after 5 seconds of starting
new Thread(() -> {
try {
Thread.sleep(5000);
imgCap.takePicture(new ImageCapture.OnImageCapturedListener() {
@Override
public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {
Mat mat = imageToMat(image.getImage()); // ERROR HERE!
}
@Override
public void onError(ImageCapture.UseCaseError useCaseError, String message, @Nullable Throwable cause) {
// Error
}
});
} catch (Exception e) {
System.err.println(e);
}
}).start();
//bind to lifecycle:
androidx.camera.core.CameraX.bindToLifecycle((LifecycleOwner) this, analysis, imgCap, preview);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
//start camera when permissions have been granted otherwise exit app
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissionsGranted()) {
startCamera();
} else {
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT).show();
finish();
}
}
}
private boolean allPermissionsGranted() {
//check if req permissions have been granted
for (String permission : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
public static Mat imageToMat(Image image) {
ByteBuffer buffer;
int rowStride;
int pixelStride;
int width = image.getWidth();
int height = image.getHeight();
int offset = 0;
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
for (int i = 0; i < planes.length; i++) {
buffer = planes[i].getBuffer();
rowStride = planes[i].getRowStride();
pixelStride = planes[i].getPixelStride();
int w = (i == 0) ? width : width / 2;
int h = (i == 0) ? height : height / 2;
for (int row = 0; row < h; row++) {
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
if (pixelStride == bytesPerPixel) {
int length = w * bytesPerPixel;
buffer.get(data, offset, length);
if (h - row != 1) {
buffer.position(buffer.position() + rowStride - length);
}
offset += length;
} else {
if (h - row == 1) {
buffer.get(rowData, 0, width - pixelStride + 1);
} else {
buffer.get(rowData, 0, rowStride);
}
for (int col = 0; col < w; col++) {
data[offset++] = rowData[col * pixelStride];
}
}
}
}
Mat mat = new Mat(height + height / 2, width, CvType.CV_8UC1);
mat.put(0, 0, data);
return mat;
}
}
尝试使用1.0.0-alpha01和相机的最新版本(1.0.0-alpha03)。最初,尽管问题是YUV到Mat的转换或图像太大,但事实并非如此。
使用的电话是Huawei P20 Pro。
答案 0 :(得分:0)
找到了导致此问题的问题。捕获的图像不是Yuv,而是JPEG格式,如documentation中所述。
要将JPEG转换为Mat,可以使用以下代码:
imgCap.takePicture(new ImageCapture.OnImageCapturedListener() {
@Override
public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
byte[] buf = new byte[bb.remaining()];
bb.get(buf);
Mat mat = Imgcodecs.imdecode(new MatOfByte(buf), Imgcodecs.IMREAD_UNCHANGED);
// Do something with Mat...
image.close();
}
@Override
public void onError(
ImageCapture.UseCaseError error, String message, @Nullable Throwable cause) {
// silently ingore error
}
});