我的目标是使用我的网络摄像头拍摄我的脸并通过面部表情来检测我的心情。输出应该是概率,如70%快乐,10%悲伤,......
我的方法:非常快乐和非常悲伤(和其他州)的面孔应该保存在哈尔卡斯特。
我的问题有两个:
String path = @"C:\Users\haarcascade-frontalface_alt2.xml"; HaarCascade cascade1 = HaarCascade.FromXml(path);
与opencv-xml一起使用或使用HaarCascade m = new HaarCascade(20,20,HaarCascadeStages);
生成? HaarCascadeStages会是什么?是否有其他人已经解决了这个问题并提供了c#的源代码?
我目前的代码:
public partial class Form1 : Form
{
private bool DeviceExist = false;
private FilterInfoCollection videoDevices;
private VideoCaptureDevice videoSource = null;
Bitmap picture;
HaarObjectDetector detector;
FaceHaarCascade cascade;
public Form1()
{
InitializeComponent();
}
// get the devices name
private void getCamList()
{
try
{
videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
comboBox1.Items.Clear();
if (videoDevices.Count == 0)
throw new ApplicationException();
DeviceExist = true;
foreach (FilterInfo device in videoDevices)
{
comboBox1.Items.Add(device.Name);
}
comboBox1.SelectedIndex = 0; //make dafault to first cam
}
catch (ApplicationException)
{
DeviceExist = false;
comboBox1.Items.Add("No capture device on your system");
}
}
//toggle start and stop button
private void start_Click(object sender, EventArgs e)
{
}
//eventhandler if new frame is ready
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
picture = (Bitmap)eventArgs.Frame.Clone();
//Rectangle[] objects = detector.ProcessFrame(picture);
//if (objects.Length > 0)
//{
// RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia);
// pictureBox1.Image = marker.Apply(picture);
//}
pictureBox1.Image = picture;
}
//close the device safely
private void CloseVideoSource()
{
if (!(videoSource == null))
if (videoSource.IsRunning)
{
videoSource.SignalToStop();
videoSource = null;
}
}
//get total received frame at 1 second tick
private void timer1_Tick(object sender, EventArgs e)
{
label2.Text = "Device running... " + videoSource.FramesReceived.ToString() + " FPS";
}
//prevent sudden close while device is running
private void Form1_FormClosed(object sender, FormClosedEventArgs e)
{
CloseVideoSource();
}
private void Form1_Load(object sender, EventArgs e)
{
getCamList();
cascade = new FaceHaarCascade();
detector = new HaarObjectDetector(cascade, 30);
detector.SearchMode = ObjectDetectorSearchMode.Average;
detector.ScalingFactor = 1.5f;
detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
detector.UseParallelProcessing = false;
detector.Suppression = 2;
}
private void rfsh_Click_1(object sender, EventArgs e)
{
}
private void start_Click_1(object sender, EventArgs e)
{
if (start.Text == "Start")
{
if (DeviceExist)
{
videoSource = new VideoCaptureDevice(videoDevices[comboBox1.SelectedIndex].MonikerString);
videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);
CloseVideoSource();
videoSource.DesiredFrameSize = new Size(640, 480);
//videoSource.DesiredFrameRate = 10;
videoSource.Start();
label2.Text = "Device running...";
start.Text = "Stop";
timer1.Enabled = true;
}
else
{
label2.Text = "Error: No Device selected.";
}
}
else
{
if (videoSource.IsRunning)
{
timer1.Enabled = false;
CloseVideoSource();
label2.Text = "Device stopped.";
start.Text = "Start";
}
}
}
}