我正在开发一个将视频保存为Android设备上的.mpeg文件的应用程序。我一直在Github上与vanevery的MJPEG项目合作,取得了一定的成功,(https://github.com/vanevery/Android-MJPEG-Video-Capture-FFMPEG/blob/master/src/com/mobvcasting/mjpegffmpeg/MJPEGFFMPEGTest.java)。
到目前为止,这是我的代码:
public class VideoCapture extends Activity implements OnClickListener, SurfaceHolder.Callback, Camera.PreviewCallback {
public static final String LOGTAG = "VIDEOCAPTURE";
String szBoundaryStart = "\r\n\r\n--myboundary\r\nContent-Type: image/jpeg\r\nContent-Length: ";
String szBoundaryDeltaTime = "\r\nDelta-time: 110";
String szBoundaryEnd = "\r\n\r\n";
private SurfaceHolder holder;
private Camera camera;
private CamcorderProfile camcorderProfile;
boolean bRecording = false;
boolean bPreviewRunning = false;
byte[] previewCallbackBuffer;
File mjpegFile;
FileOutputStream fos;
BufferedOutputStream bos;
Button btnRecord;
Camera.Parameters p;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Date T = new Date();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String szFileName = "videocapture-"+sdf.format(T)+"-";
try {
mjpegFile = File.createTempFile(szFileName, ".mjpeg", Environment.getExternalStorageDirectory());
} catch (Exception e) {
Log.v(LOGTAG,e.getMessage());
finish();
}
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
btnRecord = (Button) this.findViewById(R.id.RecordButton);
btnRecord.setOnClickListener(this);
camcorderProfile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.CameraView);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraView.setClickable(true);
cameraView.setOnClickListener(this);
}
public void onClick(View v) {
if (bRecording) {
bRecording = false;
try {
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
Log.v(LOGTAG, "Recording Stopped");
} else {
try {
fos = new FileOutputStream(mjpegFile);
bos = new BufferedOutputStream(fos);
bRecording = true;
Log.v(LOGTAG, "Recording Started");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceCreated");
camera = Camera.open();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOGTAG, "surfaceChanged");
if (!bRecording) {
if (bPreviewRunning){
camera.stopPreview();
} try {
p = camera.getParameters();
p.setPreviewSize(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);
p.setPreviewFrameRate(camcorderProfile.videoFrameRate);
camera.setParameters(p);
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(this);
Log.v(LOGTAG,"startPreview");
camera.startPreview();
bPreviewRunning = true;
} catch (IOException e) {
Log.e(LOGTAG,e.getMessage());
e.printStackTrace();
}
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceDestroyed");
if (bRecording) {
bRecording = false;
try {
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
bPreviewRunning = false;
camera.release();
finish();
}
public void onPreviewFrame(byte[] b, Camera c) {
if (bRecording) {
// Assuming ImageFormat.NV21
if (p.getPreviewFormat() == ImageFormat.NV21) {
Log.v(LOGTAG,"Started Writing Frame");
try {
ByteArrayOutputStream jpegByteArrayOutputStream = new ByteArrayOutputStream();
YuvImage im = new YuvImage(b, ImageFormat.NV21, p.getPreviewSize().width, p.getPreviewSize().height, null);
Rect r = new Rect(0,0,p.getPreviewSize().width,p.getPreviewSize().height);
im.compressToJpeg(r, 5, jpegByteArrayOutputStream);
byte[] jpegByteArray = jpegByteArrayOutputStream.toByteArray();
byte[] boundaryBytes = (szBoundaryStart + jpegByteArray.length + szBoundaryDeltaTime + szBoundaryEnd).getBytes();
bos.write(boundaryBytes);
bos.write(jpegByteArray);
bos.flush();
} catch (IOException e) {
e.printStackTrace();
}
Log.v(LOGTAG,"Finished Writing Frame");
} else {
Log.v(LOGTAG,"NOT THE RIGHT FORMAT");
}
}
}
@Override
public void onConfigurationChanged(Configuration conf)
{
super.onConfigurationChanged(conf);
}
}
我怀疑问题可能出在onPreviewFrame()的jpeg格式化(解析)中。任何帮助或建议将受到高度赞赏。提前致谢。
答案 0 :(得分:0)
对于MJPEG文件播放所需的jpeg质量,似乎存在阈值。改变,
im.compressToJpeg(r, 5, jpegByteArrayOutputStream);
要,
im.compressToJpeg(r, 75, jpegByteArrayOutputStream);
会生成有效的MJPEG文件。