我有一个程序使用AForge库来保持内存缓冲区中来自网络摄像头的60秒图像的运行流。在检测到移动时,它将接下来的30秒记录到同一缓冲区中,最后覆盖前30秒。实际上,在任何尚未录制的动作的任何一侧,您都可以播放30秒的视频,总共可以播放60秒的视频。
问题是,AForge COMPRESSED的60秒位图图像在RAM中大约为3GB。最重要的是,生成的avi文件大约为3MB。这是一个很大的差异!
谁能看到我可能出错的地方?按照这个速度,每次将视频直接录制到磁盘一小时更有利,并为任何事件手动循环播放它们!
该系统由以下三个部分组成:
CameraController.cs - 为每个连接的网络摄像头排序初始化。我留下了注释掉的组件,以便了解以前使用过的设置。
public class CameraController : ServiceBase
{
public virtual void OnStart()
{
Start(60, 0.4f);
}
private FilterInfoCollection _VideoCaptureDevices;
private MotionDetector _MotionDetector;
private Dictionary<string, Recording> _Streams = new Dictionary<string, Recording>();
private Dictionary<int, VideoCaptureDevice> _Devices = new Dictionary<int, VideoCaptureDevice>();
private int _Framerate;
private int _MaxVideoLength;
private float _MotionSensitivity;
public void Start(int maxVideoLength, float motionSensitivity){
_MaxVideoLength = maxVideoLength;
_MotionSensitivity = motionSensitivity;
Init();
}
public void Init()
{
try
{
_MotionDetector = GetDefaultMotionDetector();
_VideoCaptureDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
int counter = 0;
foreach (FilterInfo device in _VideoCaptureDevices)
{
var videoDevice = new VideoCaptureDevice(device.MonikerString);
//_Framerate = videoDevice.VideoCapabilities[0].AverageFrameRate == 0
// ? 25
// : videoDevice.VideoCapabilities[0].AverageFrameRate;
_Framerate = 15;
_Streams.Add(videoDevice.@Source, new Recording(counter, device.Name, videoDevice.@Source, _MaxVideoLength, _Framerate));
videoDevice.NewFrame += new NewFrameEventHandler(NewFrame);
videoDevice.Start();
_Devices.Add(counter++, videoDevice);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void NewFrame(object sender, NewFrameEventArgs eventArgs)
{
try
{
var device = (VideoCaptureDevice) sender;
_Streams[@device.Source].AddBitmap((Bitmap) eventArgs.Frame.Clone());
if (_Streams[@device.Source].IsRecording)
{
_Streams[@device.Source].CheckRecording();
if (_Streams[@device.Source].SaveRequired)
_Streams[@device.Source].WriteToFile();
}
else
{
var motion = _MotionDetector.ProcessFrame(_Streams[@device.Source].Bitmap);
if (motion > _MotionSensitivity)
_Streams[@device.Source].StartRecording();
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void StopVideo(bool stopWebcams = false)
{
foreach (var device in _Devices)
{
var stream = _Streams[device.Value.Source];
if(stream.IsRecording)
stream.FileWriter.Close();
if(device.Value.IsRunning && stopWebcams)
device.Value.SignalToStop();
}
}
public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
{
AForge.Vision.Motion.IMotionDetector detector = null;
AForge.Vision.Motion.IMotionProcessing processor = null;
AForge.Vision.Motion.MotionDetector motionDetector = null;
//detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
//{
// DifferenceThreshold = 15,
// SuppressNoise = true
//};
//detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
//{
// DifferenceThreshold = 15,
// KeepObjectsEdges = true,
// SuppressNoise = true
//};
detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
{
DifferenceThreshold = 10,
FramesPerBackgroundUpdate = 10,
KeepObjectsEdges = true,
MillisecondsPerBackgroundUpdate = 10,
SuppressNoise = true
};
//processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
//{
// HighlightColor = System.Drawing.Color.Red,
// HighlightMotionGrid = true,
// GridWidth = 100,
// GridHeight = 100,
// MotionAmountToHighlight = 100F
//};
processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
{
//HighlightColor = System.Drawing.Color.Red,
//HighlightMotionRegions = true,
MinObjectsHeight = 10,
MinObjectsWidth = 10
};
motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);
return motionDetector;
}
}
然后是Recording.cs - 控制何时停止/开始/写录音
public class Recording
{
public int Id { get; set; }
public string Name { get; set; }
public string Source { get; set; }
public Bitmap Bitmap { get; set; }
public bool IsRecording { get; set; }
public bool SaveRequired { get; set; }
public int TimeLimitSec { get; set; }
public int FrameRate { get; set; }
public string DirString = ConfigurationManager.AppSettings["DesinationFolder"].ToString();
public Stopwatch Timer = new Stopwatch();
public VideoFileWriter FileWriter = new VideoFileWriter();
public VideoBuffer VideoBuffer;
public int BufferPosition { get; set; }
public Recording(int id, string name, string source, int timeLimit, int framerate)
{
Id = id;
Name = name;
Source = @source;
IsRecording = false;
SaveRequired = false;
TimeLimitSec = timeLimit;
FrameRate = framerate;
VideoBuffer = new VideoBuffer(timeLimit, framerate);
}
public string FileName { get; set; }
public void StartRecording()
{
IsRecording = true;
Timer.Reset();
Timer.Start();
}
public void StopRecording()
{
IsRecording = false;
SaveRequired = true;
Timer.Reset();
Timer.Stop();
}
public void WriteToFile()
{
try
{
if (!Directory.Exists(@DirString))
Directory.CreateDirectory(@DirString);
FileName = @DirString + @"\Video_" + Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".avi";
FileWriter.Open(FileName, Bitmap.Width, Bitmap.Height, FrameRate, VideoCodec.Default);
for (int frame = 0; frame < VideoBuffer.BufferPosition; frame++)
{
FileWriter.WriteVideoFrame(Compression.Decompress<Bitmap>(VideoBuffer.Buffer[frame]));
}
FileWriter.Close();
SaveRequired = false;
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void AddBitmap(Bitmap bitmap)
{
try
{
this.Bitmap = bitmap;
this.VideoBuffer.AddBitmap(bitmap);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void CheckRecording()
{
try
{
if (IsRecording && Timer.Elapsed.TotalSeconds > TimeLimitSec)
StopRecording();
}
catch (Exception ex)
{
var msg = ex.Message;
Console.WriteLine(ex.Message);
}
}
private void SaveImage()
{
Bitmap.Save(@"D:\Storage\IMG_"+ Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".jpg");
}
}
最后VideoBuffer.cs - 控制Bitmaps的运行缓冲区。请注意,位图也已压缩为byte []。
public class VideoBuffer
{
public int BufferLengthSeconds { get; set; }
public byte[][] Buffer { get; set; }
public int BufferPosition { get; set; }
public int MaxPosition { get; set; }
public bool Recorded { get; set; }
public VideoBuffer(int secondsToBuffer, int framerate)
{
MaxPosition = secondsToBuffer * framerate * 2; // Have our buffer before an event is started, as well as the length of time for the next
//Buffer = new Bitmap[MaxPosition + 1]; // Plus one allows us to add the latest bitmap and then clone everything but the first index
Buffer = new byte[MaxPosition + 1][];
BufferPosition = 0;
}
public void AddBitmap(Bitmap bitmap)
{
try
{
// If we haven't reached the maximum buffer size, keep adding it as normal
if (BufferPosition < MaxPosition)
{
Buffer[BufferPosition++] = Compression.Compress(bitmap);
}
else
{
// Otherwise, shuffle everything down one.
Buffer[MaxPosition] = Compression.Compress(bitmap);
var tempBuffer = new byte[MaxPosition + 1][];
Array.Copy(Buffer, 1, tempBuffer, 0, Buffer.Length - 1);
tempBuffer.CopyTo(Buffer, 0);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
}
所以真正的问题是,如何进一步减少缓冲区的内存占用量,但是在任何时候内存中仍保留最后30秒的视频?
本周我有点倦怠,看不出可能会遗漏的东西。欢迎任何建议!
答案 0 :(得分:1)
一些快速的数学表明高清视频在1920x1080x24位颜色,15fps,60秒,大约5.3 GB。您正在获得一些帧压缩以消耗3GB。
VideoFileWriter
(为什么函数本地变量不是?)正在使用默认的AVI视频编解码器,它也会在帧之间进行压缩。由于大概帧数大多是静态的,因此可以节省大量空间。
我建议您找一种方法将内存中的视频保存为压缩视频流。