Fix T78529: Blend file corrupted during save caused by high Cubemap Size

This just avoid the corruption. A better fix still need to be finished.

See P1564
This commit is contained in:
Clément Foucault 2020-08-05 02:25:31 +02:00
parent 6be8b6af40
commit 6390b530d0
Notes: blender-bot 2023-02-14 02:58:19 +01:00
Referenced by issue #78529, Blend file corrupted during save caused by high Cubemap Size
Referenced by issue #77348, Blender LTS: Maintenance Task 2.83
2 changed files with 11 additions and 2 deletions

View File

@ -6032,7 +6032,7 @@ static void direct_link_lightcache_texture(BlendDataReader *reader, LightCacheTe
if (lctex->data) {
BLO_read_data_address(reader, &lctex->data);
if (BLO_read_requires_endian_switch(reader)) {
if (lctex->data && BLO_read_requires_endian_switch(reader)) {
int data_size = lctex->components * lctex->tex_size[0] * lctex->tex_size[1] *
lctex->tex_size[2];
@ -6044,6 +6044,10 @@ static void direct_link_lightcache_texture(BlendDataReader *reader, LightCacheTe
}
}
}
if (lctex->data == NULL) {
zero_v3_int(lctex->tex_size);
}
}
static void direct_link_lightcache(BlendDataReader *reader, LightCache *cache)

View File

@ -2497,7 +2497,12 @@ static void write_lightcache_texture(BlendWriter *writer, LightCacheTexture *tex
else if (tex->data_type == LIGHTCACHETEX_UINT) {
data_size *= sizeof(uint);
}
BLO_write_raw(writer, data_size, tex->data);
/* FIXME: We can't save more than what 32bit systems can handle.
* The solution would be to split the texture but it is too late for 2.90. (see T78529) */
if (data_size < INT_MAX) {
BLO_write_raw(writer, data_size, tex->data);
}
}
}