问题描述:

I'm using Marmalade C++ and Open GL ES 2.0. Several tutorials I am working through and I keep getting the same error:

EXC_BAD_ACCESS on the call to glGetShaderInfo()

Here's my code. How do I proceed in a situation like this?

static int eglInit()

{

if (!IwGLInit() )

{

s3eDebugErrorShow(S3E_MESSAGE_CONTINUE, "eglInit failed");

return 1;

}

return 0;

}

const char* vShaderStr =

"attribute vec4 vPosition;\n"

"void main()\n"

"{\n"

"gl_Position = vPosition;\n"

"};\n";

const char* fShaderStr =

"precision mediump float;\n"

"void main()\n"

"{\n"

"gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n"

"}\n";

GLuint LoadShader(GLenum type, const char *shaderSrc)

{

GLuint shader;

GLint compiled;

// Create the shader object

shader = glCreateShader(type);

if (shader == 0)

return 0;

// Load the shader source

glShaderSource(shader, 1, &shaderSrc, NULL);

// Compile the shader

glCompileShader(shader);

glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);

if (!compiled)

{

GLint infoLen = 0;

glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);

if (infoLen > 1)

{

char* infoLog = (char*)malloc(sizeof(char) * infoLen);

glGetShaderInfoLog(shader, infoLen, NULL, infoLog);

printf("Error compiling shader:\n%s\n", infoLog);

free(infoLog);

}

glDeleteShader(shader);

return 0;

}

return shader;

}

int main()

{

if (eglInit())

return 1;

printf("Screen BPP: %d\n", s3eSurfaceGetInt(S3E_SURFACE_PIXEL_TYPE) & S3E_SURFACE_PIXEL_SIZE_MASK);

printf("\n");

printf( "Vendor : %s\n", (char*)glGetString(GL_VENDOR));

printf( "Renderer : %s\n", (char*)glGetString(GL_RENDERER));

printf( "Version : %s\n", (char*)glGetString(GL_VERSION));

printf( "Extensions : %s\n", (char*)glGetString(GL_EXTENSIONS));

printf("\n");

GLuint vertexShader;

vertexShader = LoadShader(GL_VERTEX_SHADER, vShaderStr);

bool quit = false;

int numFrames = 0;

while (!quit) {

s3eKeyboardUpdate();

s3eDeviceYield(0);

if (s3eDeviceCheckQuitRequest())

quit = 1;

if (s3eKeyboardGetState(s3eKeyEsc) & S3E_KEY_STATE_PRESSED)

quit = 1;

numFrames++;

}

//Shutdown GL system

IwGLTerminate();

return 0;

}

网友答案:

In:

glGetShaderInfoLog(shader, infoLen, NULL, infoLog);

you are passing a NULL as a third parameter, which should accept a GLsizei* instead. This parameter (the third) is populated with the real length of the info log and should therefore be a valid pointer.

To solve the issue you can just do:

GLsizei info_length = 0;
glGetShaderInfoLog(shader, infoLen, &info_length, infoLog);
相关阅读:
Top