我正在尝试录制一个视频,然后在保存之前处理这些帧。我遵循了这一点:http://nezarobot.blogspot.com/2016/03/android-surfacetexture-camera2-opencv.html,并创建了一个C++-function
,它可以在图像上绘制圆圈,并且还可以渲染用作native
函数输入的表面。
下一步是将所有内容保存到视频中。为此,我目前的方法是使用MediaRecorder
来捕获video
和audio
。我使用VideoSource.Surface
作为MediaRecorder
的来源。然后,Surface
中的MediaRecorder
在本机函数中用于处理。
它工作正常,同时捕获了音频和视频,并且在视频中也显示了圆圈。
一个问题仍然存在,我无法在屏幕上看到渲染。我已经通过创建
创建了一个PreviewSurfaceSurface surfacePreview = new Surface(texture);
其中texture
是我的SurfaceTexture
中的TextureView
。我通过{p>将previewSurface
附加到MediaRecorder
mAudioRecorder.setPreviewDisplay(previewSurface)
我可以看到视频,但看不到渲染,在这种情况下是圆圈。
在这里设置MediaRecorder
:
MediaRecorder mAudioRecorder;
private void initRecorder(File outputFile, Surface surface){
mAudioRecorder = new MediaRecorder();
mAudioRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mAudioRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mAudioRecorder.setInputSurface(surface);
mAudioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mAudioRecorder.setOutputFile(outputFile.toString());
mAudioRecorder.setVideoEncodingBitRate(10000000);
mAudioRecorder.setVideoFrameRate(30);
mAudioRecorder.setVideoSize(1280,720);
mAudioRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mAudioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
mAudioRecorder.setPreviewDisplay(previewSurface);
try {
mAudioRecorder.prepare();
} catch (IOException e){
e.printStackTrace();
}
}
这里是createCameraPreviewSession
:
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of the default buffer to be the size of the camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
surfacePreview = new Surface(texture);
mSurface = mAudioRecorder.getSurface(); // Get the surface from the MediaRecorder
File audioFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "testSurface.mp4");
mAudioRecorder = new AudioRecorder(audioFile, surfacePreview); <---
// We set up a CaptureRequest.Builder with the output Surface
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// mPreviewRequestBuilder.addTarget(mSurface);
mPreviewRequestBuilder.addTarget(mImageReader.get().getSurface());
mPreviewRequestBuilder.addTarget(surfacePreview);
BlockingSessionCallback sessionCallback = new BlockingSessionCallback();
List<Surface> outputSurfaces = new ArrayList<>();
outputSurfaces.add(mImageReader.get().getSurface());
outputSurfaces.add(surfacePreview)
mCameraDevice.createCaptureSession(outputSurfaces, sessionCallback, mBackgroundHandler);
try {
Log.d(TAG, "waiting on session");
mCaptureSession = sessionCallback.waitAndGetSession(SESSION_WAIT_TIMEOUT_MS);
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
Log.d(TAG, "setting repeating request");
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
} catch (TimeoutRuntimeException e) {
showToast("Failed to configure capture session");
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
然后在我打电话的OnImageAvailableListener
中
JNIUtils.drawCircle(image, mSurface, mFileName, mTakePicture, isRecording);
调用此本机函数:
extern "C"
JNIEXPORT void JNICALL
Java_com_erikbylow_mycamera3_JNIUtils_drawCircle(
JNIEnv *env, jobject type, jint srcWidth, jint srcHeight, jobject srcBuf, jobject dstSurface,
jstring path_, jint saveFile, jboolean isRecording) {
uint8_t *srcLumaPtr = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(srcBuf));
if (srcLumaPtr == nullptr){
LOGE("blit NULL pointer Error");
}
int dstWidth;
int dstHeight;
cv::Mat mYuv(srcHeight + srcHeight / 2, srcWidth, CV_8UC1, srcLumaPtr);
uint8_t*srcChromaUVInterleavedptr = nullptr;
bool swapDstUV;
ANativeWindow *win = ANativeWindow_fromSurface(env, dstSurface);
ANativeWindow_Buffer buf;
dstWidth = srcHeight;
dstHeight = srcWidth;
ANativeWindow_setBuffersGeometry(win, dstWidth, dstHeight, 0);
if (int32_t err = ANativeWindow_lock(win, &buf, NULL)){
LOGE("ANativeWindow_lock failed with error code %d\n", err);
ANativeWindow_release(win);
}
uint8_t *dstLumaPtr = reinterpret_cast<uint8_t*>(buf.bits);
cv::Mat dstRgba(dstHeight, buf.stride, CV_8UC4, dstLumaPtr);
cv::Mat srcRgba(srcHeight, srcWidth, CV_8UC4);
cv::Mat flipRgba(dstHeight, dstWidth, CV_8UC4);
// Convert YUV to RGBA
cv::cvtColor(mYuv, srcRgba, CV_YUV2RGBA_NV21);
// Rotate 90 degree
// cv::transpose(srcRgba, flipRgba);
cv::flip(srcRgba, flipRgba, 1);
cv::flip(flipRgba, flipRgba, 0);
// flipRgba = srcRgba;
// LaneDetect(flipRgba, str, savefile, outStr);
// Copy to TextureView surface
uchar *dbuf;
uchar *sbuf;
dbuf = dstRgba.data;
sbuf = flipRgba.data;
for (int i = 0; i < flipRgba.rows; i++){
dbuf = dstRgba.data + i * buf.stride*4;
memcpy(dbuf, sbuf, buf.stride*4); // Is this one row at a time?
sbuf += flipRgba.cols * 4;
}
cv::Point p1(100,100);
int radius = 10;
cv::circle(flipRgba, p1, radius, cv::Scalar(255,255,255), CV_FILLED);
cv::circle(dstRgba, p1, radius, cv::Scalar(255,255,255), CV_FILLED);
if (isRecording) {
LOGE("RECORDING");
// mWriter.write(flipRgba);
}
ANativeWindow_unlockAndPost(win);
ANativeWindow_release(win);
}