Bug 1088345 - Handle possibly-invalid enums for queries. - r=kamidphish

This commit is contained in:
Jeff Gilbert 2014-10-23 18:55:49 -07:00
parent e15b3b3c98
commit a095bc6ede
3 changed files with 40 additions and 44 deletions

View File

@ -1679,31 +1679,22 @@ WebGLContext::InitAndValidateGL()
// however these constants only entered the OpenGL standard at OpenGL 3.2. So we will try reading,
// and check OpenGL error for INVALID_ENUM.
// before we start, we check that no error already occurred, to prevent hiding it in our subsequent error handling
error = gl->fGetError();
if (error != LOCAL_GL_NO_ERROR) {
GenerateWarning("GL error 0x%x occurred during WebGL context initialization!", error);
return false;
}
// On the public_webgl list, "problematic GetParameter pnames" thread, the following formula was given:
// mGLMaxVaryingVectors = min (GL_MAX_VERTEX_OUTPUT_COMPONENTS, GL_MAX_FRAGMENT_INPUT_COMPONENTS) / 4
GLint maxVertexOutputComponents,
minFragmentInputComponents;
gl->fGetIntegerv(LOCAL_GL_MAX_VERTEX_OUTPUT_COMPONENTS, &maxVertexOutputComponents);
gl->fGetIntegerv(LOCAL_GL_MAX_FRAGMENT_INPUT_COMPONENTS, &minFragmentInputComponents);
GLint maxVertexOutputComponents = 0;
GLint maxFragmentInputComponents = 0;
error = gl->fGetError();
switch (error) {
case LOCAL_GL_NO_ERROR:
mGLMaxVaryingVectors = std::min(maxVertexOutputComponents, minFragmentInputComponents) / 4;
break;
case LOCAL_GL_INVALID_ENUM:
mGLMaxVaryingVectors = 16; // = 64/4, 64 is the min value for maxVertexOutputComponents in OpenGL 3.2 spec
break;
default:
GenerateWarning("GL error 0x%x occurred during WebGL context initialization!", error);
return false;
const bool ok = (gl->GetPotentialInteger(LOCAL_GL_MAX_VERTEX_OUTPUT_COMPONENTS,
&maxVertexOutputComponents) &&
gl->GetPotentialInteger(LOCAL_GL_MAX_FRAGMENT_INPUT_COMPONENTS,
&maxFragmentInputComponents));
if (ok) {
mGLMaxVaryingVectors = std::min(maxVertexOutputComponents,
maxFragmentInputComponents) / 4;
} else {
mGLMaxVaryingVectors = 16;
// = 64/4, 64 is the min value for maxVertexOutputComponents in OpenGL 3.2 spec
}
}
}

View File

@ -162,10 +162,9 @@ static const char *sExtensionNames[] = {
};
static bool
ParseGLVersion(GLContext* gl, unsigned int* version)
ParseGLVersion(GLContext* gl, uint32_t* out_version)
{
GLenum error = gl->fGetError();
if (error != LOCAL_GL_NO_ERROR) {
if (gl->fGetError() != LOCAL_GL_NO_ERROR) {
MOZ_ASSERT(false, "An OpenGL error has been triggered before.");
return false;
}
@ -175,29 +174,27 @@ ParseGLVersion(GLContext* gl, unsigned int* version)
* OpenGL 3.2. The bug is that GetIntegerv(LOCAL_GL_{MAJOR,MINOR}_VERSION)
* returns OpenGL 3.2 instead of generating an error.
*/
if (!gl->IsGLES())
{
if (!gl->IsGLES()) {
/**
* OpenGL 3.1 and OpenGL ES 3.0 both introduce GL_{MAJOR,MINOR}_VERSION
* with GetIntegerv. So we first try those constants even though we
* might not have an OpenGL context supporting them, has this is a
* might not have an OpenGL context supporting them, as this is a
* better way than parsing GL_VERSION.
*/
GLint majorVersion = 0;
GLint minorVersion = 0;
gl->fGetIntegerv(LOCAL_GL_MAJOR_VERSION, &majorVersion);
gl->fGetIntegerv(LOCAL_GL_MINOR_VERSION, &minorVersion);
const bool ok = (gl->GetPotentialInteger(LOCAL_GL_MAJOR_VERSION,
&majorVersion) &&
gl->GetPotentialInteger(LOCAL_GL_MINOR_VERSION,
&minorVersion));
// If it's not an OpenGL (ES) 3.0 context, we will have an error
error = gl->fGetError();
while (gl->fGetError() != LOCAL_GL_NO_ERROR);
if (error == LOCAL_GL_NO_ERROR &&
if (ok &&
majorVersion > 0 &&
minorVersion >= 0)
{
*version = majorVersion * 100 + minorVersion * 10;
*out_version = majorVersion * 100 + minorVersion * 10;
return true;
}
}
@ -232,8 +229,7 @@ ParseGLVersion(GLContext* gl, unsigned int* version)
*/
const char* versionString = (const char*)gl->fGetString(LOCAL_GL_VERSION);
error = gl->fGetError();
if (error != LOCAL_GL_NO_ERROR) {
if (gl->fGetError() != LOCAL_GL_NO_ERROR) {
MOZ_ASSERT(false, "glGetString(GL_VERSION) has generated an error");
return false;
} else if (!versionString) {
@ -248,7 +244,7 @@ ParseGLVersion(GLContext* gl, unsigned int* version)
const char* itr = versionString;
char* end = nullptr;
int majorVersion = (int)strtol(itr, &end, 10);
auto majorVersion = strtol(itr, &end, 10);
if (!end) {
MOZ_ASSERT(false, "Failed to parse the GL major version number.");
@ -263,7 +259,7 @@ ParseGLVersion(GLContext* gl, unsigned int* version)
end = nullptr;
int minorVersion = (int)strtol(itr, &end, 10);
auto minorVersion = strtol(itr, &end, 10);
if (!end) {
MOZ_ASSERT(false, "Failed to parse GL's minor version number.");
return false;
@ -277,7 +273,7 @@ ParseGLVersion(GLContext* gl, unsigned int* version)
return false;
}
*version = (unsigned int)(majorVersion * 100 + minorVersion * 10);
*out_version = (uint32_t)majorVersion * 100 + (uint32_t)minorVersion * 10;
return true;
}
@ -506,8 +502,7 @@ GLContext::InitWithPrefix(const char *prefix, bool trygl)
mInitialized = LoadSymbols(&symbols[0], trygl, prefix);
MakeCurrent();
if (mInitialized) {
unsigned int version = 0;
uint32_t version = 0;
ParseGLVersion(this, &version);
#ifdef MOZ_GL_DEBUG

View File

@ -276,7 +276,7 @@ public:
* Example :
* If this a OpenGL 2.1, that will return 210
*/
inline unsigned int Version() const {
inline uint32_t Version() const {
return mVersion;
}
@ -318,7 +318,7 @@ protected:
* mVersion store the OpenGL's version, multiplied by 100. For example, if
* the context is an OpenGL 2.1 context, mVersion value will be 210.
*/
unsigned int mVersion;
uint32_t mVersion;
nsCString mVersionString;
ContextProfile mProfile;
@ -660,6 +660,16 @@ public:
}
};
bool GetPotentialInteger(GLenum pname, GLint* param) {
LocalErrorScope localError(*this);
fGetIntegerv(pname, param);
GLenum err = localError.GetError();
MOZ_ASSERT_IF(err != LOCAL_GL_NO_ERROR, err == LOCAL_GL_INVALID_ENUM);
return err == LOCAL_GL_NO_ERROR;
}
private:
static void GLAPIENTRY StaticDebugCallback(GLenum source,
GLenum type,