我正在使用opengl离屏渲染的项目。但是在我创建了opengl上下文之后,我发现一些opengl扩展名是不可用的。例如:
#include <windows.h>
#include <GL/glew.h>
#include <iostream>
#include <gl/gl.h>
#include <gl/glu.h>
#include <string>
#include <time.h>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
using namespace std;
using namespace cv;
void mGLRender()
{
glClearColor(0.9f, 0.9f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(30.0, 1.0, 1.0, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0, 0, -5, 0, 0, 0, 0, 1, 0);
glBegin(GL_TRIANGLES);
glColor3d(1, 0, 0);
glVertex3d(0, 1, 0);
glColor3d(0, 1, 0);
glVertex3d(-1, -1, 0);
glColor3d(0, 0, 1);
glVertex3d(1, -1, 0);
glEnd();
glFlush(); // remember to flush GL output!
}
void mGLRender1()
{
glClearColor(0.3f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(30.0, 1.0, 1.0, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0, 0, -5, 0, 0, 0, 0, 1, 0);
glBegin(GL_TRIANGLES);
glColor3d(1, 0, 0);
glVertex3d(0, 1, 0);
glColor3d(0, 1, 0);
glVertex3d(-1, -1, 0);
glColor3d(0, 0, 1);
glVertex3d(1, -1, 0);
glEnd();
glFlush(); // remember to flush GL output!
}
int main(int argc, char* argv[])
{
clock_t clockBegin, clockEnd;
const int WIDTH = 400;
const int HEIGHT = 400;
// Create a memory DC compatible with the screen
HDC hdc = CreateCompatibleDC(0);
if (hdc == 0) cout << "Could not create memory device context";
// Create a bitmap compatible with the DC
// must use CreateDIBSection(), and this means all pixel ops must be synchronised
// using calls to GdiFlush() (see CreateDIBSection() docs)
BITMAPINFO bmi = {
{ sizeof(BITMAPINFOHEADER), WIDTH, HEIGHT, 1, 32, BI_RGB, 0, 0, 0, 0, 0 },
{ 0 }
};
unsigned char *pbits; // pointer to bitmap bits
HBITMAP hbm = CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, (void **)&pbits,
0, 0);
if (hbm == 0) cout << "Could not create bitmap";
//HDC hdcScreen = GetDC(0);
//HBITMAP hbm = CreateCompatibleBitmap(hdcScreen,WIDTH,HEIGHT);
// Select the bitmap into the DC
HGDIOBJ r = SelectObject(hdc, hbm);
if (r == 0) cout << "Could not select bitmap into DC";
// Choose the pixel format
PIXELFORMATDESCRIPTOR pfd = {
sizeof(PIXELFORMATDESCRIPTOR), // struct size
1, // Version number
PFD_DRAW_TO_BITMAP | PFD_SUPPORT_OPENGL, // use OpenGL drawing to BM
PFD_TYPE_RGBA, // RGBA pixel values
32, // color bits
0, 0, 0, // RGB bits shift sizes...
0, 0, 0, // Don't care about them
0, 0, // No alpha buffer info
0, 0, 0, 0, 0, // No accumulation buffer
32, // depth buffer bits
0, // No stencil buffer
0, // No auxiliary buffers
PFD_MAIN_PLANE, // Layer type
0, // Reserved (must be 0)
0, // No layer mask
0, // No visible mask
0, // No damage mask
};
int pfid = ChoosePixelFormat(hdc, &pfd);
if (pfid == 0) cout << "Pixel format selection failed";
// Set the pixel format
// - must be done *after* the bitmap is selected into DC
BOOL b = SetPixelFormat(hdc, pfid, &pfd);
if (!b) cout << "Pixel format set failed";
// Create the OpenGL resource context (RC) and make it current to the thread
HGLRC hglrc = wglCreateContext(hdc);
if (hglrc == 0) cout << "OpenGL resource context creation failed";
wglMakeCurrent(hdc, hglrc);
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
std::cout << "glew init error" << std::endl;
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
std::cout << (glewGetExtension("GL_ARB_fragment_shader") == GL_TRUE);
std::cout << (glewGetExtension("GL_ARB_shader_objects") == GL_TRUE);
std::cout << (glewGetExtension("GL_ARB_shading_language_100") == GL_TRUE);
// Draw using GL - remember to sync with GdiFlush()
clockBegin = clock();
GdiFlush();
mGLRender();
//SaveBmp(hbm,"output.bmp");
clockEnd = clock();
printf("%d\n", clockEnd - clockBegin);
clockBegin = clock();
GdiFlush();
mGLRender1();
//SaveBmp(hbm,"output1.bmp");
clockEnd = clock();
printf("%d\n", clockEnd - clockBegin);
//opencv show img
Mat img(HEIGHT, WIDTH, CV_8UC4, (void *)pbits);
imshow("img", img);
waitKey();
destroyWindow("img");
// Clean up
wglDeleteContext(hglrc); // Delete RC
SelectObject(hdc, r); // Remove bitmap from DC
DeleteObject(hbm); // Delete bitmap
DeleteDC(hdc); // Delete DC
system("pause");
return 0;
}
上面的代码在vs2015中运行良好。但是这句话:
std::cout << (glewGetExtension("GL_ARB_fragment_shader") == GL_TRUE);
结果是GL_ARB_fragment_shader扩展名无法使用。但我确信我的gpu支持这个扩展。因为在一个简单的freeglut应用程序中,glewGetExtension(&#34; GL_ARB_fragment_shader&#34;)返回True。代码在这里:
#include <stdlib.h>
#include <GL/glew.h>
#include <GL/glut.h>
#include <iostream>
// Window attributes
static const unsigned int WIN_POS_X = 30;
static const unsigned int WIN_POS_Y = WIN_POS_X;
static const unsigned int WIN_WIDTH = 512;
static const unsigned int WIN_HEIGHT = WIN_WIDTH;
void glInit(int, char **);
int main(int argc, char * argv[])
{
// Initialize OpenGL
glInit(argc, argv);
// A valid OpenGL context has been created.
// You can call OpenGL functions from here on.
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
std::cout << "glew init error" << std::endl;
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
std::cout << (glewGetExtension("GL_ARB_fragment_shader") == GL_TRUE);
std::cout << (glewGetExtension("GL_ARB_shader_objects") == GL_TRUE);
std::cout << (glewGetExtension("GL_ARB_shading_language_100") == GL_TRUE);
glutMainLoop();
return 0;
}
void Display()
{
} // end Display()
void glInit(int argc, char ** argv)
{
// Initialize GLUT
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE);
glutInitWindowPosition(WIN_POS_X, WIN_POS_Y);
glutInitWindowSize(WIN_WIDTH, WIN_HEIGHT);
glutCreateWindow("Hello OpenGL!");
glutDisplayFunc(Display);
return;
}
上面的代码在vs2015中运行良好。和glewGetExtension的值(&#34; GL_ARB_fragment_shader&#34;)
是真的。那么不同的opengl上下文有不同的opengl扩展吗?请帮帮我。
答案 0 :(得分:1)
是的,不同的OpenGL上下文可能支持不同的OpenGL版本和/或扩展。在您的特定情况下,您正在创建的屏幕外上下文将使用GDI软件光栅化器后备。 您创建上下文的方式永远不会加速GPU加速!
如果要创建GPU加速的OpenGL上下文,则必须
或
或
然而,即使OpenGL上下文是GPU加速的,即使它们恰好在同一台机器和GPU上创建,它们在版本和扩展支持方面也可能不同。