Fix T88365: GPUTexture.read returning a buffer with wrong size

The pixel components were not being considered.
This commit is contained in:
Germano Cavalcante 2021-05-18 10:01:29 -03:00
parent 729c579030
commit 92178c7e0e
Notes: blender-bot 2023-02-13 18:40:08 +01:00
Referenced by issue #88365, GPUTexture.read does not respect texture color-depth
1 changed files with 8 additions and 3 deletions

View File

@ -349,11 +349,12 @@ PyDoc_STRVAR(pygpu_texture_read_doc,
static PyObject *pygpu_texture_read(BPyGPUTexture *self)
{
BPYGPU_TEXTURE_CHECK_OBJ(self);
eGPUTextureFormat tex_format = GPU_texture_format(self->tex);
/* #GPU_texture_read is restricted in combining 'data_format' with 'tex_format'.
* So choose data_format here. */
eGPUDataFormat best_data_format;
switch (GPU_texture_format(self->tex)) {
switch (tex_format) {
case GPU_DEPTH_COMPONENT24:
case GPU_DEPTH_COMPONENT16:
case GPU_DEPTH_COMPONENT32F:
@ -389,8 +390,12 @@ static PyObject *pygpu_texture_read(BPyGPUTexture *self)
}
void *buf = GPU_texture_read(self->tex, best_data_format, 0);
const Py_ssize_t shape[2] = {GPU_texture_height(self->tex), GPU_texture_width(self->tex)};
return (PyObject *)BPyGPU_Buffer_CreatePyObject(best_data_format, shape, ARRAY_SIZE(shape), buf);
const Py_ssize_t shape[3] = {GPU_texture_height(self->tex),
GPU_texture_width(self->tex),
GPU_texture_component_len(tex_format)};
int shape_len = (shape[2] == 1) ? 2 : 3;
return (PyObject *)BPyGPU_Buffer_CreatePyObject(best_data_format, shape, shape_len, buf);
}
#ifdef BPYGPU_USE_GPUOBJ_FREE_METHOD