GPUShader: Bind shader for interface creation
This will fix the bindpoints not being set at interface creation
This commit is contained in:
parent
ea92f8207a
commit
beef67d1df
|
@ -331,6 +331,8 @@ GPUShader *GPU_shader_load_from_binary(const char *binary,
|
|||
glGetProgramiv(program, GL_LINK_STATUS, &success);
|
||||
|
||||
if (success) {
|
||||
glUseProgram(program);
|
||||
|
||||
GPUShader *shader = MEM_callocN(sizeof(*shader), __func__);
|
||||
shader->interface = GPU_shaderinterface_create(program);
|
||||
shader->program = program;
|
||||
|
@ -572,6 +574,7 @@ GPUShader *GPU_shader_create_ex(const char *vertexcode,
|
|||
return NULL;
|
||||
}
|
||||
|
||||
glUseProgram(shader->program);
|
||||
shader->interface = GPU_shaderinterface_create(shader->program);
|
||||
|
||||
return shader;
|
||||
|
|
|
@ -220,6 +220,12 @@ static int sampler_binding(int32_t program,
|
|||
|
||||
GPUShaderInterface *GPU_shaderinterface_create(int32_t program)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
GLint curr_program;
|
||||
glGetIntegerv(GL_CURRENT_PROGRAM, &curr_program);
|
||||
BLI_assert(curr_program == program);
|
||||
#endif
|
||||
|
||||
GLint max_attr_name_len = 0, attr_len = 0;
|
||||
glGetProgramiv(program, GL_ACTIVE_ATTRIBUTE_MAX_LENGTH, &max_attr_name_len);
|
||||
glGetProgramiv(program, GL_ACTIVE_ATTRIBUTES, &attr_len);
|
||||
|
|
Loading…
Reference in New Issue