使用:我正在使用camera2 API进行视频录制。
原因:但是,当我使用lenevo选项卡的后置摄像头时,它工作正常,但是当我使用前置摄像头时,纹理视图被冻结并且应用程序停止了。
错误:我收到缓冲区错误。
我在下面执行了一些代码。
{
private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
connectCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.e(TAG, "onSurfaceTextureDestroyed");
if(mCameraDevice != null)
{
closeCamera();
mCameraDevice = null;
}
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraDevice = camera;
mMediaRecorder = new MediaRecorder();
if(mIsRecording) {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
startRecord();
mMediaRecorder.start();
runOnUiThread(new Runnable() {
@Override
public void run() {
mChronometer.setBase(SystemClock.elapsedRealtime());
mChronometer.setVisibility(View.VISIBLE);
mChronometer.start();
}
});
} else {
startPreview();
}
// Toast.makeText(getApplicationContext(),
// "Camera connection made!", Toast.LENGTH_SHORT).show();
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if (cameraManager != null) {
for(String cameraId : cameraManager.getCameraIdList()){
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
AtomicReference<Integer> integer = new AtomicReference<>(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING));
if (integer.get() == CameraCharacteristics.LENS_FACING_BACK){
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
mTotalRotation = sensorToDeviceRotation(cameraCharacteristics, deviceOrientation);
boolean swapRotation = mTotalRotation == 90 || mTotalRotation == 270;
int rotatedWidth = width;
int rotatedHeight = height;
if(swapRotation) {
rotatedWidth = height;
rotatedHeight = width;
}
assert map != null;
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth, rotatedHeight);
mVideoSize = chooseOptimalSize(map.getOutputSizes(MediaRecorder.class), rotatedWidth, rotatedHeight);
mImageSize = chooseOptimalSize(map.getOutputSizes(ImageFormat.JPEG), rotatedWidth, rotatedHeight);
mImageReader = ImageReader.newInstance(mImageSize.getWidth(), mImageSize.getHeight(), ImageFormat.JPEG, 1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
mCameraId = cameraId;
return;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void connectCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if(ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) ==
PackageManager.PERMISSION_GRANTED) {
assert cameraManager != null;
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} else {
if(shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
Toast.makeText(this,
"Video app required access to camera", Toast.LENGTH_SHORT).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO
}, REQUEST_CAMERA_PERMISSION_RESULT);
}
} else {
assert cameraManager != null;
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void startRecord() {
try {
if(mIsRecording) {
setupMediaRecorder();
} else if(mIsTimelapse) {
setupTimelapse();
}
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
Surface recordSurface = mMediaRecorder.getSurface();
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mCaptureRequestBuilder.addTarget(previewSurface);
mCaptureRequestBuilder.addTarget(recordSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, recordSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mRecordCaptureSession = session;
try {
mRecordCaptureSession.setRepeatingRequest(
mCaptureRequestBuilder.build(), null, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startRecord");
}
}, null);
} catch (Exception e) {
e.printStackTrace();
}
}
private void startPreview() {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
try {
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
Log.d(TAG, "onConfigured: startPreview");
mPreviewCaptureSession = session;
try {
mPreviewCaptureSession.setRepeatingRequest(mCaptureRequestBuilder.build(),
null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
Log.d(TAG, "onConfigureFailed: startPreview");
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
在执行过程中,还会生成日志,我在下面发布了日志,以找出开发的程序中的实际问题。
Logcat:
> E/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> queueBuffer: BufferQueue has been abandoned 06-20 16:59:02.445
> 29637-29650/com.tsa.exam E/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> dequeueBuffer: BufferQueue has been abandoned 06-20 16:59:02.493
> 29637-29659/com.tsa.exam D/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> cancelBuffer: slot 0 06-20 16:59:02.493 29637-29659/com.tsa.exam
> E/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> cancelBuffer: BufferQueue has been abandoned 06-20 16:59:02.493
> 29637-29649/com.tsa.exam D/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> cancelBuffer: slot 2 06-20 16:59:02.493 29637-29649/com.tsa.exam
> E/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:336,c:-1)
> cancelBuffer: BufferQueue has been abandoned 06-20 16:59:02.541
> 29637-29681/com.tsa.exam I/BufferQueueProducer:
> [SurfaceTexture-1-29637-2](this:0x7b3776e800,id:3,api:4,p:-1,c:-1)
> disconnect(P): api 4 06-20 16:59:02.548 29637-29681/com.tsa.exam
> D/Camera-JNI: (tid:29681)[release camera] - X context=0x7b342b69e0
> 06-20 16:59:02.556 29637-29650/com.tsa.exam D/Camera-JNI:
> (tid:29650)[~MtkJNICameraContext] this:0x7b342b69e0 06-20 16:59:02.826
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 192, 35, 9, 20 06-20 16:59:03.820
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 386, 15, 21 06-20 16:59:04.843
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 406, 14, 22 06-20 16:59:05.836
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 427, 16, 20 06-20 16:59:06.828
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 446, 15, 21 06-20 16:59:07.849
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 466, 14, 21 06-20 16:59:08.838
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 112, 486, 15, 20 06-20 16:59:09.865
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 200, 0, 14, 22 06-20 16:59:10.854
> 29637-29654/com.tsa.exam D/OpenGLRenderer: CacheTexture 4 upload: x,
> y, width height = 200, 21, 14, 22 06-20 17:00:01.407
> 29637-29654/com.tsa.exam I/GLConsumer: [SurfaceTexture-0-29637-0]
> detachFromContext 06-20 17:00:01.407 29637-29637/com.tsa.exam
> E/Camera2VideoImageActivi: onSurfaceTextureDestroyed
========================================================================