我正在使用Aforge的VideoCaptureDeviceForm在我的应用上显示网络摄像头视频。当网络摄像头打开时,它可以捕获每个帧。我有一个opentk的openglcontrol,它将图像文件传递给opengl并将包含黑色区域的图像的颜色更改为红色。 现在,我想合并这两个功能。意思是,当我的网络摄像头开始播放时,我想将该帧传递给opengl并应在openglcontrol中显示这些帧。 简而言之,glcontrol应该显示带有我已应用的shadercode的网络摄像头视频。
我该如何实现? 我的代码:
using System.Windows.Forms;
using OpenTK.Graphics;
using OpenTK.Graphics.OpenGL;
using System.Drawing.Imaging;
using AForge.Video.DirectShow;
using AForge.Video;
namespace DemoWebCam1
{
public partial class Form2 : Form
{
Bitmap FrameData;
bool loaded = false;
string file = "content/penguine.png";
int program;
int vertShader;
int fragShader;
int buffer;
int positionLocation;
int texture;
float[] vertices = {
// Left bottom triangle
-1f, -1f, 0f,
1f, -1f, 0f,
1f, 1f, 0f,
// Right top triangle
1f, 1f, 0f,
-1f, 1f, 0f,
-1f, -1f, 0f
};
public Form2()
{
InitializeComponent();
}
private void glControl1_Load(object sender, EventArgs e)
{
loaded = true;
Init();
}
private void Form2_Load(object sender, EventArgs e)
{
VideoCaptureDeviceForm form = new VideoCaptureDeviceForm();
if (form.ShowDialog(this) == DialogResult.OK)
{
// create video source
VideoCaptureDevice videoSource = form.VideoDevice;
// glControl1_Resize(sender, e);
// open it
OpenVideoSource(videoSource);
}
}
private void glControl1_Paint(object sender, PaintEventArgs e)
{
if (!loaded)
return;
DrawImage(texture);
}
private void OpenVideoSource(IVideoSource source)
{
// set busy cursor
this.Cursor = Cursors.WaitCursor;
// stop current video source
CloseCurrentVideoSource();
// start new video source
videoSourcePlayer.VideoSource = source;
videoSourcePlayer.Start();
this.Cursor = Cursors.Default;
source.NewFrame += new AForge.Video.NewFrameEventHandler(Video_NewFrame);
}
private void Video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs)
{
FrameData = new Bitmap(eventArgs.Frame);
//Add to PictureBox
pictureBox1.Image = FrameData;
//need to pass this framedata to opengl texture
}
private void Init()
{
texture = LoadTexture(file);
CreateShaders();
CreateProgram();
InitBuffers();
}
private void CreateProgram()
{
program = GL.CreateProgram();
GL.AttachShader(program, vertShader);
GL.AttachShader(program, fragShader);
GL.LinkProgram(program);
}
private void CreateShaders()
{
/***********Vert Shader********************/
vertShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertShader, @"attribute vec3 a_position;
varying vec2 vTexCoord;
void main() {
vTexCoord = (a_position.xy + 1) / 2;
gl_Position = vec4(a_position, 1);
}");
GL.CompileShader(vertShader);
/***********Frag Shader ****************/
fragShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(fragShader, @"precision highp float;
uniform sampler2D sTexture;
varying vec2 vTexCoord;
void main ()
{
vec4 color = texture2D (sTexture, vTexCoord);
if(color.r < 0.3){color.r = 1.0;}
// Save the result
gl_FragColor = color;
}");
GL.CompileShader(fragShader);
}
private void InitBuffers()
{
buffer = GL.GenBuffer();
positionLocation = GL.GetAttribLocation(program, "a_position");
GL.EnableVertexAttribArray(positionLocation);
GL.BindBuffer(BufferTarget.ArrayBuffer, buffer);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * sizeof(float)), vertices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(positionLocation, 3, VertexAttribPointerType.Float, false, 0, 0);
}
public int LoadTexture(string file)
{
Bitmap bitmap = new Bitmap(file);//instead of file, need to pass FrameData here
int tex;
GL.Hint(HintTarget.PerspectiveCorrectionHint, HintMode.Nicest);
GL.GenTextures(1, out tex);
GL.BindTexture(TextureTarget.Texture2D, tex);
FrameData.RotateFlip(RotateFlipType.RotateNoneFlipY);
BitmapData data = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height),
ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, data.Width, data.Height, 0,
OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0);
bitmap.UnlockBits(data);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
return tex;
}
public void DrawImage(int image)
{
GL.Viewport(new Rectangle(0, 0, 1920, 1080));
GL.MatrixMode(MatrixMode.Projection);
GL.PushMatrix();
GL.LoadIdentity();
//GL.Ortho(0, 1920, 0, 1080, 0, 1);
GL.MatrixMode(MatrixMode.Modelview);
GL.PushMatrix();
GL.LoadIdentity();
GL.Disable(EnableCap.Lighting);
GL.Enable(EnableCap.Texture2D);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, image);
RunShaders();
GL.Disable(EnableCap.Texture2D);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Projection);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Modelview);
ErrorCode ec = GL.GetError();
if (ec != 0)
System.Console.WriteLine(ec.ToString());
Console.Read();
glControl1.SwapBuffers();
}
private void RunShaders()
{
GL.ClearColor(Color.Yellow);
GL.UseProgram(program);
GL.DrawArrays(PrimitiveType.Triangles, 0, vertices.Length / 3);
ErrorCode ec = GL.GetError();
if (ec != 0)
System.Console.WriteLine(ec.ToString());
Console.Read();
}
private void Form2_FormClosed(object sender, FormClosedEventArgs e)
{
CloseCurrentVideoSource();
}
}
}