我一直在尝试在ASP.NET中设置MJPEG流。我想从URL检索MJEPG流,并将每个帧发送到每个连接的客户端。我能够找到的示例只能读取设置文件,而不是来自URL的连续流,并通过 MultiStreamContent 发送整个文件。由于我逐帧检索,我不能这样做。 我想知道是否可以用ASP.NET MVC做我想做的事情。我目前正在使用AForge视频从链接中检索MJPEG流。 我的控制器类代码:
using System.Net.Http;
using System.Web.Http;
using AForge.Video;
namespace VideoPrototypeMVC.Controllers
{
public class CameraController : ApiController
{
int framecounter = 0;
MJPEGStream stream = new MJPEGStream();
[HttpGet]
public void GetVideoContent()
{
stream.Source = @"http://127.0.0.1:5002/stream";
stream.NewFrame += new NewFrameEventHandler(showFrame);
stream.Start();
MultipartContent content = new MultipartContent();
while (stream.IsRunning)
{
//Continues streaming should be here?
}
}
//Can be used to display of a frame is available
private void showFrame(object sender, NewFrameEventArgs eventArgs)
{
framecounter++;
System.Diagnostics.Debug.WriteLine("New frame event: " + framecounter);
}
//Should be called at the end of the stream
private void stopStream(object sender, ReasonToFinishPlaying reason)
{
System.Diagnostics.Debug.WriteLine("Stop stream");
stream.Stop();
framecounter = 0;
}
}
}
此代码不是最终版,但我只需要继续关闭。我找到了使用Socket servers的示例,但我想坚持MVC,因为它允许我更容易地设置服务器的其余部分。
答案 0 :(得分:1)
确保其他人也可以使用此功能。我设法结合@Evk(再次感谢你)说的,以及我在这里找到的信息:creating my own MJPEG stream。
注意:下面的代码只是原型/概念验证!当我运行这个时,由于 StartStream 中无休止的循环,我的处理器拍摄到100%。将努力使这个更基于事件,但我认为下面的代码更容易解释。
fetching ip from: ip-api1
fetching ip from: broken_api
catched
Task was destroyed but it is pending!
task: <Task pending coro=<ThreadedResolver.resolve() running at /Volumes/external/venv/lib/python3.6/site-packages/aiohttp/resolver.py:31> wait_for=<Future pending cb=[_chain_future.<locals>._call_check_cancel() at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/futures.py:408, <TaskWakeupMethWrapper object at 0x1038e2948>()]> cb=[shield.<locals>._done_callback() at /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/tasks.py:679]>
Task was destroyed but it is pending!
task: <Task pending coro=<get_ip() done, defined at ./run_7.py:15> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x1038e23d8>()]>>
答案 1 :(得分:0)
来自Arastelion的好答案,但是我注意到,如果您离开应用程序,那么后台仍然有一个正在处理的请求,这可能是资源浪费 弹出stream.FlushAsync(); stream.Close(); stream.Dispose();在stopStream之后似乎可以解决该问题。
using System;
using System.IO;
using System.Net;
using System.Web;
using System.Net.Http;
using System.Web.Http;
using AForge.Video;
using System.Drawing;
using System.Text;
using System.Drawing.Imaging;
using System.Threading;
namespace VideoPrototypeMVC.Controllers
{
public class CameraController : ApiController
{
private MJPEGStream mjpegStream = new MJPEGStream();
private bool frameAvailable = false;
private Bitmap frame = null;
private string BOUNDARY = "frame";
/// <summary>
/// Initializer for the MJPEGstream
/// </summary>
CameraController()
{
mjpegStream.Source = @"{{INSERT STREAM URL}}";
mjpegStream.NewFrame += new NewFrameEventHandler(showFrameEvent);
}
[HttpGet]
public HttpResponseMessage GetVideoContent()
{
mjpegStream.Start();
var response = Request.CreateResponse();
response.Content = new PushStreamContent((Action<Stream, HttpContent, TransportContext>)StartStream);
response.Content.Headers.ContentType = System.Net.Http.Headers.MediaTypeHeaderValue.Parse("multipart/x-mixed-replace; boundary=" + BOUNDARY);
return response;
}
/// <summary>
/// Craete an appropriate header.
/// </summary>
/// <param name="length"></param>
/// <returns></returns>
private byte[] CreateHeader(int length)
{
string header =
"--" + BOUNDARY + "\r\n" +
"Content-Type:image/jpeg\r\n" +
"Content-Length:" + length + "\r\n\r\n";
return Encoding.ASCII.GetBytes(header);
}
public byte[] CreateFooter()
{
return Encoding.ASCII.GetBytes("\r\n");
}
/// <summary>
/// Write the given frame to the stream
/// </summary>
/// <param name="stream">Stream</param>
/// <param name="frame">Bitmap format frame</param>
private void WriteFrame(Stream stream, Bitmap frame)
{
// prepare image data
byte[] imageData = null;
// this is to make sure memory stream is disposed after using
using (MemoryStream ms = new MemoryStream())
{
frame.Save(ms, ImageFormat.Jpeg);
imageData = ms.ToArray();
}
// prepare header
byte[] header = CreateHeader(imageData.Length);
// prepare footer
byte[] footer = CreateFooter();
// Start writing data
stream.Write(header, 0, header.Length);
stream.Write(imageData, 0, imageData.Length);
stream.Write(footer, 0, footer.Length);
}
/// <summary>
/// While the MJPEGStream is running and clients are connected,
/// continue sending frames.
/// </summary>
/// <param name="stream">Stream to write to.</param>
/// <param name="httpContent">The content information</param>
/// <param name="transportContext"></param>
private void StartStream(Stream stream, HttpContent httpContent, TransportContext transportContext)
{
while (mjpegStream.IsRunning && HttpContext.Current.Response.IsClientConnected)
{
if (frameAvailable)
{
try
{
WriteFrame(stream, frame);
frameAvailable = false;
} catch (Exception e)
{
System.Diagnostics.Debug.WriteLine(e);
}
}else
{
Thread.Sleep(30);
}
}
stopStream();
stream.FlushAsync();
stream.Close();
stream.Dispose();
}
/// <summary>
/// This event is thrown when a new frame is detected by the MJPEGStream
/// </summary>
/// <param name="sender">Object that is sending the event</param>
/// <param name="eventArgs">Data from the event, including the frame</param>
private void showFrameEvent(object sender, NewFrameEventArgs eventArgs)
{
frame = new Bitmap(eventArgs.Frame);
frameAvailable = true;
}
/// <summary>
/// Stop the stream.
/// </summary>
private void stopStream()
{
System.Diagnostics.Debug.WriteLine("Stop stream");
mjpegStream.Stop();
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
stopStream();
}
base.Dispose(disposing);
}
}
}