我想制作一个简单的视频流。我使用的应用程序是使用c#的android和简单媒体播放器。我审查的大多数代码都会有POST方法到ip服务器。同时我的应用程序正在使用gsm网络,它需要在传输数据媒体时使用数据包。
我的第一步是拥有一个可以记录的android应用程序。这一步完成了。这是我的代码。
public class Videotest1Activity extends Activity implements
SurfaceHolder.Callback, OnInfoListener, OnErrorListener{
Camera camera;
VideoView videoView;
SurfaceHolder holder;
TextView msg;
Button initBtn, startBtn, stopBtn, playBtn, stprevBtn;
MediaRecorder recorder;
String outputFileName;
static final String TAG = "RecordVideo";
int maxDuration = 7000;//7sec
int frameRate = 1;//15
String serverIP = "172.19.117.12";
int serverPort = 2000;
Socket socket;
int mCount;
//TimerThread mTimer;
Chronometer chronometer;
LocalSocket receiver,sender;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_videotest1);
videoView = (VideoView) findViewById(R.id.videoView1);
initBtn = (Button) findViewById(R.id.initialize);
startBtn = (Button) findViewById(R.id.button1);
stopBtn = (Button) findViewById(R.id.button3);
msg = (TextView) findViewById(R.id.textView1);
playBtn = (Button) findViewById(R.id.reviewBtn);
stprevBtn = (Button) findViewById(R.id.stprevBtn);
chronometer = (Chronometer) findViewById(R.id.chronometer1);
/*
mTimer= new TimerThread();
mTimer.setOnAlarmListener(mSTimer_OnAlarm);
mTimer.setPeriod(100);
*/
}
public void buttonTapped(View view){
switch(view.getId()){
case R.id.initialize:
initRecorder();
break;
case R.id.button1:
beginRecording();
break;
case R.id.button3:
stopRecording();
break;
case R.id.reviewBtn:
playRecording();
break;
case R.id.stprevBtn:
stopPlayback();
break;
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
Log.e(TAG, "Record error");
stopRecording();
Toast.makeText(this, "Recording limit reached", 2500).show();
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
Log.i(TAG, "recording event");
if(what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED){
Log.i(TAG, "...max duration reached");
stopRecording();
Toast.makeText(this, "Recording limit info", 2500).show();
}
}
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
}
@Override
public void surfaceCreated(SurfaceHolder arg0) {
Log.v(TAG, "in surfaceCreated");
try{
camera.setPreviewDisplay(holder);
camera.startPreview();
}catch(IOException e){
Log.v(TAG, "Could not start the preview");
e.printStackTrace();
}
initBtn.setEnabled(true);
startBtn.setEnabled(true);
stopBtn.setEnabled(true);
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
}
protected void onResume(){
Log.v(TAG, "in onResume");
super.onResume();
initBtn.setEnabled(false);
startBtn.setEnabled(false);
stopBtn.setEnabled(false);
playBtn.setEnabled(false);
stprevBtn.setEnabled(false);
if(!initCamera())
finish();
}
public boolean initCamera(){
try{
camera = Camera.open();
Camera.Parameters camParam = camera.getParameters();
camera.lock();
holder = videoView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
//Thread thread = new Thread(new hantarThread());
//thread.start();
}catch(RuntimeException re){
Log.v(TAG, "Could not initialize the camera");
re.printStackTrace();
return false;
}
return true;
}
public void initRecorder(){
if(recorder != null)return;
outputFileName = Environment.getExternalStorageDirectory() + "/videooutput.mp4";
File outputFile = new File(outputFileName);
if(outputFile.exists())
outputFile.delete();//knp nk dlt?
try{
Toast.makeText(this, "InitRecord", 2500).show();
camera.stopPreview();
camera.unlock();
recorder = new MediaRecorder();
recorder.setCamera(camera);
recorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setVideoSize(176, 144);
recorder.setVideoFrameRate(15);//15
recorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);//mpeg_4
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setMaxDuration(60000);
recorder.setPreviewDisplay(holder.getSurface());
recorder.setOutputFile(outputFileName);
recorder.prepare();
Log.v(TAG, "MediaRecorder initialized");
initBtn.setEnabled(false);
startBtn.setEnabled(true);
}catch(Exception e){
Log.v(TAG, "MediaRecorder failed");
e.printStackTrace();
}
}
public void beginRecording(){
//mCount = 0;
//mTimer.start();
try{
Log.v(TAG, "start Recording begin");
int stoppedMilliseconds = 0;
String chronoText = chronometer.getText().toString();
String array[] = chronoText.split(":");
if (array.length == 2) {
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 1000
+ Integer.parseInt(array[1]) * 1000;
} else if (array.length == 3) {
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 60 * 1000
+ Integer.parseInt(array[1]) * 60 * 1000
+ Integer.parseInt(array[2]) * 1000;
}
chronometer.setBase(SystemClock.elapsedRealtime() - stoppedMilliseconds);
//chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
Log.v(TAG, "timer start");
long dTime = chronometer.getDrawingTime();
long autoLink = chronometer.getAutoLinkMask();
Log.v(TAG, "getDrawingTime: " + dTime);
Log.v(TAG, "AutoLink: " + autoLink);
recorder.setOnInfoListener(this);
recorder.setOnErrorListener(this);
recorder.start();
msg.setText("Recording");
startBtn.setEnabled(false);
stopBtn.setEnabled(true);
}catch(Exception e){
Log.v(TAG, "start Recording failed");
e.printStackTrace();
}
}
public void stopRecording(){
if(recorder != null){
recorder.setOnErrorListener(null);
recorder.setOnInfoListener(null);
try{
recorder.stop();
Log.v(TAG, "stop Record Begin");
chronometer.stop();
Log.v(TAG, "Timer stop");
//mTimer.stop();
}catch(IllegalStateException e){
Log.e(TAG, "stop is ILLEGAL");
}
releaseRecorder();
msg.setText("");
releaseCamera();
startBtn.setEnabled(false);
stopBtn.setEnabled(false);
playBtn.setEnabled(true);
}
else{
Log.v(TAG, "video cannot stop. Video null");
long autoLink = chronometer.getAutoLinkMask();
Log.v(TAG, "stop aLink: " + autoLink);
long elapsedMillis = SystemClock.elapsedRealtime() - chronometer.getBase();
Log.v(TAG, "elapsedMillis: " + elapsedMillis);
}
}
private void releaseCamera(){
if(camera != null){
try{
camera.reconnect();
}catch(IOException e){
e.printStackTrace();
}
camera.release();
camera = null;
}
}
private void releaseRecorder(){
if(recorder != null){
recorder.release();
recorder = null;
}
}
private void playRecording(){
MediaController mc = new MediaController(this);
videoView.setMediaController(mc);
videoView.setVideoPath(outputFileName);
videoView.start();
stprevBtn.setEnabled(true);
}
private void stopPlayback(){
videoView.stopPlayback();
}
}
}
我做的下一步是让媒体播放器可以播放videooutput.mp4。在这里,为了使它播放,它首先必须加载视频然后播放。好吧,这是第二步。它也运作良好。这是我的代码。
private void button6_Click_1(object sender, EventArgs e)
{
if (openFileDialog1.ShowDialog() == DialogResult.OK)
{
// create video source
FileVideoSource fileSource = new FileVideoSource(openFileDialog1.FileName);
// open it
OpenVideoSource(fileSource);
}
}
// Open video source
private void OpenVideoSource(IVideoSource source)
{
// set busy cursor
this.Cursor = Cursors.WaitCursor;
// stop current video source
CloseCurrentVideoSource();
// start new video source
videoSourcePlayer.VideoSource = source;
videoSourcePlayer.Start();
// reset stop watch
stopWatch = null;
// start timer
videoTimer.Start();
this.Cursor = Cursors.Default;
}
// New frame received by the player
private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image)
{
DateTime now = DateTime.Now;
Graphics g = Graphics.FromImage(image);
// paint current time
SolidBrush brush = new SolidBrush(Color.Red);
g.DrawString(now.ToString(), this.Font, brush, new PointF(5, 5));
brush.Dispose();
g.Dispose();
}
// Close video source if it is running
private void CloseCurrentVideoSource()
{
if (videoSourcePlayer.VideoSource != null)
{
videoSourcePlayer.SignalToStop();
// wait ~ 3 seconds
for (int i = 0; i < 30; i++)
{
if (!videoSourcePlayer.IsRunning)
break;
System.Threading.Thread.Sleep(100);
}
if (videoSourcePlayer.IsRunning)
{
videoSourcePlayer.Stop();
}
videoSourcePlayer.VideoSource = null;
}
}
我正在使用AForge为c#中的媒体播放器。我所做的就是添加AForge.Controls,AForge.Video和AForge.Video.DirectShow的参考资料。
现在,我需要做的是逐字节地将媒体数据发送到媒体播放器(c#)。我不知道如何实现这一目标。我还找到了将视频带到服务器的样本编码IP camera android。但是,我仍然没有在我的应用程序中做到这一点的线索。
请注意,gsm将需要使用的SIM卡号码以及调制解调器gsm的端口号。
任何人都可以帮助我。或者也许给我一些修改代码的想法。任何帮助表示赞赏。谢谢你的进步。