Bug 911853 - OpenGL types should be explicit-size integer types - r=jgilbert

This commit is contained in:
Benoit Jacob 2013-09-04 08:14:41 -04:00
parent 70cb68de38
commit 20d3a7a3bc
2 changed files with 10 additions and 12 deletions

View File

@ -48,8 +48,6 @@
#include "SurfaceTypes.h" #include "SurfaceTypes.h"
#include "GLScreenBuffer.h" #include "GLScreenBuffer.h"
typedef char realGLboolean;
#include "GLContextSymbols.h" #include "GLContextSymbols.h"
#include "mozilla/mozalloc.h" #include "mozilla/mozalloc.h"

View File

@ -12,16 +12,16 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h> #include <stdint.h>
typedef unsigned int GLenum; typedef uint32_t GLenum;
typedef unsigned int GLbitfield; typedef uint32_t GLbitfield;
typedef unsigned int GLuint; typedef uint32_t GLuint;
typedef int GLint; typedef int32_t GLint;
typedef int GLsizei; typedef int32_t GLsizei;
typedef char realGLboolean; typedef int8_t realGLboolean;
typedef signed char GLbyte; typedef int8_t GLbyte;
typedef short GLshort; typedef int16_t GLshort;
typedef unsigned char GLubyte; typedef uint8_t GLubyte;
typedef unsigned short GLushort; typedef uint16_t GLushort;
typedef float GLfloat; typedef float GLfloat;
typedef float GLclampf; typedef float GLclampf;
#ifndef GLdouble_defined #ifndef GLdouble_defined