我对Android还是很陌生,因此,我需要使用一个应用程序,该应用程序需要在预定的时间内记录带有音频的视频。该代码基于Google's example。但是,相机无法记录,并且应用程序崩溃。
到目前为止,我一直在尝试一些比我更有知识的人的建议(检查库,重新检查打开和关闭相机的过程,等等),直到最终使我的代码适应为止以Google的示例为例,它产生了以下代码。但是,相同的错误不断发生,我无法弄清楚为什么或如何克服它们。
package com.example.filipa.eyetracker;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Semaphore;
public class Calibration extends AppCompatActivity {
private static final String TAG = "Camera2Video";
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
private static final int REQUEST_CAMERA_PERMISSION_RESULT = 1;
private static final int REQUEST_RECORD_AUDIO_PERMISSION_RESULT = 1;
private static final int REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT = 1;
private TextureView mTextureView; //A {@link TextureView} for camera preview.
private CameraDevice mCameraDevice; //A reference to the opened {@link android.hardware.camera2.CameraDevice}
// Handles several lifecycle events on a {@link TextureView}
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private Size mPreviewSize; //The {@link android.util.Size} of camera preview.
private Size mVideoSize; //The {@link android.util.Size} of video recording.
private MediaRecorder mMediaRecorder; //MediaRecorder
private boolean mIsRecordingVideo; //Whether the app is recording video now
private HandlerThread mBackgroundThread; //An additional thread for running tasks that shouldn't block the UI.
private Handler mBackgroundHandler; //A {@link Handler} for running tasks in the background.
private Semaphore mCameraOpenCloseLock
= new Semaphore(1); //A {@link Semaphore} to prevent the app from exiting before closing the camera.
private int milliseconds;
//{@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status.
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int i) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
};
private String mCameraId;
private CameraCharacteristics characteristics;
private File mVideoFolder;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
private CameraCaptureSession mPreviewSession; //A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for preview.
private int cdotnumber;
int[] cdots = {R.drawable.calgelo, R.drawable.calgelo, R.drawable.calgelo};
/*
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* @param choices The list of available sizes
* @return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/*
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
View decorView = getWindow().getDecorView();
/* Hide both the navigation bar and the status bar.
SYSTEM_UI_FLAG_FULLSCREEN is only available on Android 4.1 and higher, but as
a general rule, you should design your app to hide the status bar whenever you
hide the navigation bar.*/
int uiOptions = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN;
decorView.setSystemUiVisibility(uiOptions);
setContentView(R.layout.activity_calibration);
mTextureView = (TextureView) findViewById(R.id.texture);
if (mVideoFolder == null){createVideoFolder();}
mMediaRecorder = new MediaRecorder();
new CountDownTimer(15000, 3000) {
@Override
public void onTick(long millisUntilFinished) {
delay(1);
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
startRecordingVideo();
}
@Override
public void onFinish() {
stopRecordingVideo();
closeCamera();
}
}.start();
}
public void delay(int seconds){
milliseconds = seconds * 1000;
runOnUiThread(new Runnable() {
@Override
public void run() {
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
nextDot(); //add your code here
}
}, milliseconds);
}
});
}
public void nextDot() {
if (cdotnumber < (cdots.length)) {
((ImageView) findViewById(R.id.imageView2)).setImageDrawable(getDrawable(cdots[cdotnumber]));
} else {
stopRecordingVideo();
closeCamera();
Intent intent = new Intent(this, PostCalibration.class);
startActivity(intent);
}
cdotnumber++;
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
/*
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/*
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode == REQUEST_CAMERA_PERMISSION_RESULT) {
if(grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without camera services", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_RECORD_AUDIO_PERMISSION_RESULT) {
if(grantResults[0] != PackageManager.PERMISSION_GRANTED) {
Toast.makeText(getApplicationContext(),
"Application will not run without audio permission", Toast.LENGTH_SHORT).show();
}
}
if(requestCode == REQUEST_WRITE_EXTERNAL_STORAGE_PERMISSION_RESULT) {
if(grantResults[0] == PackageManager.PERMISSION_GRANTED) {
try {
createVideoFileName();
} catch (IOException e) {
e.printStackTrace();
}
Toast.makeText(this,
"Permission successfully granted!", Toast.LENGTH_SHORT).show();
}
else {
Toast.makeText(this,
"Application will not run with storage permission", Toast.LENGTH_SHORT).show();
}
}
}
private void openCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
mCameraId = cameraManager.getCameraIdList()[1];
// Choose the sizes for camera preview and video recording
characteristics = cameraManager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
/* TODO: Increase camera exposure
Range<Integer> range1 = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
minExposure = range1.getLower();
maxExposure = range1.getUpper();
*/
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED &&
ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED &&
ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(Calibration.this,
new String[]{Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION_RESULT);
}
cameraManager.openCamera(mCameraId, mStateCallback, null);
} else {
cameraManager.openCamera(mCameraId, mStateCallback, null);
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
Toast.makeText(getApplicationContext(),
"NullPointerException has occurred", Toast.LENGTH_SHORT).show();
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/*
* Start the camera preview
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Toast.makeText(getApplicationContext(),
"Unable to setup camera preview", Toast.LENGTH_SHORT).show();
}
}, mBackgroundHandler);
}catch (CameraAccessException e) {
e.printStackTrace();
}
}
/*
* Update the camera preview. {@link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice){
return;
}
try{
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e){
}
}
private void closePreviewSession() {
if (mPreviewSession != null){
mPreviewSession.close();
mPreviewSession = null;
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/*
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
//TODO: Code for configureTransform - don't know if needed
private void configureTransform(int viewWidth, int viewHeight) {
if(null == mTextureView || null == mPreviewSize){
return;
}
}
private void setUpMediaRecorder() throws IOException{
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()){
createVideoFileName();
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.prepare();
}
private void startRecordingVideo(){
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize){
return;
}
try{
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
//Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
//Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
/* TODO: Increase the camera exposure
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
exp = (int)(maxExposure * 0.35);
mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, exp);
mCaptureRequestBuilder.addTarget(recordSurface);*/
//Start a capture session
//Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
runOnUiThread(new Runnable() {
@Override
public void run() {
mIsRecordingVideo = true;
mMediaRecorder.start();
}
});
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void stopRecordingVideo(){
mIsRecordingVideo = false;
//Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
mNextVideoAbsolutePath = null;
startPreview();
}
private void createVideoFolder() {
File movieFile = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES);
mVideoFolder = new File(movieFile, "camera2VideoImage");
if(!mVideoFolder.exists()) {
mVideoFolder.mkdirs();
}
}
private void createVideoFileName() throws IOException{
if (mVideoFolder == null){
createVideoFolder();
}
String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String prepend = "VIDEO_" +"CAL_"+ timestamp + "_";
File videoFile = File.createTempFile(prepend, ".mp4", mVideoFolder);
mNextVideoAbsolutePath = videoFile.getAbsolutePath();
}
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
}
为了给您一些背景信息,该应用程序在运行引发错误的代码之前,会运行一些活动,这些活动允许用户设置稍后使用的一些参数。当您完成此特定活动时,它会按预期的方式开始运行,但是几秒钟后,它崩溃了,并且在调试时进入了尝试记录和失败的循环。
该行为是为了让屏幕显示白色加号的图像供某人查看,同时使用前置摄像头进行记录。
在调试时指出的一些错误,我无法弄清楚。这是full stack trace。非常感谢您为我提供的任何帮助/建议。非常感谢!