这是我的full code,这里是my project,当我在#onCreate中使用MediaRecorder #stop时,它会提升
java.lang.RuntimeException: stop failed.
at android.media.MediaRecorder.stop(Native Method)
at ro.rotry.TestMr.onCreate(TestMr.java:39)
at android.app.Activity.performCreate(Activity.java:6237)
at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1107)
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2369)
at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2476)
at android.app.ActivityThread.-wrap11(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1344)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:148)
at android.app.ActivityThread.main(ActivityThread.java:5417)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616)
那么如何解决呢?
答案 0 :(得分:0)
而不是:
mr.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mr.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mr.setVideoSize(1920, 1080);
mr.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
尝试使用摄像机配置文件:
try {
camera.unlock();
recorder.setPreviewDisplay(holder.getSurface());
recorder.setCamera(camera);
recorder.setOrientationHint(cameraRotationDegree);
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
recorder.setVideoEncodingBitRate(5000000);
recorder.setOutputFile(getFilesDir() + "/try.mp4");
recorder.prepare();
Thread.sleep(1000);
recorder.start();
}catch (IOException e) {
e.printStackTrace();
}
答案 1 :(得分:0)
原因是我错过了#start之前的#createVirtualDisplay,这是我的完整活动
package ro.rotry;
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import java.io.IOException;
public class TestMr extends AppCompatActivity {
MediaRecorder mr;
MediaProjection mp;
int dw = 720;
int dh = 1280;
MediaProjectionManager mpm;
DisplayMetrics metrics;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test_mr);
mr = new MediaRecorder();
initRecorder();
mpm = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
startActivityForResult(mpm.createScreenCaptureIntent(), 1000);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mp = mpm.getMediaProjection(resultCode, data);
mp.createVirtualDisplay(getClass().getName(), dw, dh, metrics.densityDpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mr.getSurface(), new Callback(), null);
mr.start();
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mr.stop();
}
class Callback extends VirtualDisplay.Callback {
}
private void initRecorder() {
try {
mr.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mr.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mr.setOutputFile(Environment
.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DOWNLOADS) + "/video.mp4");
mr.setVideoSize(dw, dh);
mr.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mr.setVideoEncodingBitRate(12 * 1000 * 1000);
mr.setVideoFrameRate(60);
mr.prepare();
} catch (IOException e) {
e.printStackTrace();
}
}
}