Did you ever solve this? It doesn’t seem to work for me either:
//Turn on high power graphics for NVidia cards on laptops (with built in graphics cards + Nvidia cards)
extern “C”
{
_declspec(dllexport) DWORD NvOptimusEnablement = 0x00000001;
}
then much later in a constructor:
RenderPipe::RenderPipe(HDC dc)
{
GLenum err;
m_XMin = 0.0;
m_XMax = 0.0;
m_YMin = 0.0;
m_YMax = 0.0;
m_ZMin = 0.0;
m_ZMax = 0.0;
m_hrc = NULL;
m_hdc = dc;
m_pipe = this;
/*static PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_BITMAP |
PFD_SUPPORT_GDI |
PFD_SUPPORT_OPENGL, // support OpenGL
PFD_TYPE_RGBA, // RGBA type
32, // 24-bit color depth
0, 0, 0, 0, 0, 0, // color bits ignored
8, // alpha buffer bit depth
0, // shift bit ignored
0, // no accumulation buffer
0, 0, 0, 0, // accum bits ignored
32, // 32-bit z-buffer
0, // no stencil buffer
0, // no auxiliary buffer
PFD_MAIN_PLANE, // main layer
0, // reserved
0, 0, 0 // layer masks ignored
};*/
static PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | // support window
PFD_SUPPORT_OPENGL | // support OpenGL
PFD_DOUBLEBUFFER, // double buffered
PFD_TYPE_RGBA, // RGBA type
32, // 32-bit color depth
0, 0, 0, 0, 0, 0, // color bits ignored
0, // no alpha buffer
0, // shift bit ignored
0, // no accumulation buffer
0, 0, 0, 0, // accum bits ignored
24, // 24-bit z-buffer
0, // no stencil buffer
0, // no auxiliary buffer
PFD_MAIN_PLANE, // main layer
0, // reserved
0, 0, 0 // layer masks ignored
};
// Get device context only once.
// Pixel format.
int pixelFormat = ChoosePixelFormat(m_hdc, &pfd);
if (0 == pixelFormat)
{
throw GfxEngineError("Error: Could not find a pixel format that works on your computer");
}
SetPixelFormat(m_hdc, pixelFormat, &pfd);
NvAPI_Status retVal = NvAPI_Initialize();
ASSERT(retVal == NVAPI_OK);
HMODULE advancedGraphicsDll;
advancedGraphicsDll = LoadLibrary(L"nvapi.dll");
NvOptimusEnablement = 0x00000001;
HGLRC temp_hrc = wglCreateContext(m_hdc);//make a 1.0 context so we can find the pointer to make the 4.3 context. Windows is stupid.
wglMakeCurrent(m_hdc, temp_hrc);
const GLubyte* vendor_string = glGetString(GL_VENDOR);
TRACE("%s\n",vendor_string);
const GLint attribs[] = { WGL_CONTEXT_MAJOR_VERSION_ARB, 4,
WGL_CONTEXT_MINOR_VERSION_ARB, 3,
0 };
PFNWGLCREATECONTEXTATTRIBSARBPROC wglCreateContextAttribsARB;
wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
ASSERT(wglCreateContextAttribsARB);
// Create the OpenGL Rendering Context.
m_hrc = wglCreateContextAttribsARB(m_hdc,
0,//don't share data between contexts
attribs);//demand at least an openGL 4.3 window
//create glew context
m_GlewContext = new GLEWContext();
if (m_GlewContext == NULL)
{
throw GfxEngineError("Error: Could not create GLEW Context!");
}
wglMakeCurrent(NULL, NULL);
wglDeleteContext(temp_hrc);
MakeGLContextCurrent();//use 4.3 context
vendor_string = glGetString(GL_VENDOR);
TRACE("%s", vendor_string);
vendor_string = glGetString(GL_RENDERER);
TRACE("%s", vendor_string);
GL_VENDOR is always reported as Intel unless I go into the Nvidia control panel and force high performance graphics. the LoadLibrary to get the dll returns a non-null value. I’m baffled. (Yes… this is an MFC program. please… its painful enough, don’t tease me about it)