Fix failing alembic test after IDProperty UI data refactor
The default float IDProperty min value rB8b9a3b94fc148d19 for when there is no UI data was FLT_MIN instead of -FLT_MAX, which meant that animated custom property values couldn't be less than zero unless they had their UI data values edited previously. That's a mistake I won't make again! Also change the int minimums from -INT_MAX to INT_MIN to sanitize the whole situation.
This commit is contained in:
parent
400605c3a6
commit
276a862ebc
|
@ -1665,9 +1665,9 @@ IDPropertyUIData *IDP_ui_data_ensure(IDProperty *prop)
|
|||
}
|
||||
case IDP_UI_DATA_TYPE_INT: {
|
||||
IDPropertyUIDataInt *ui_data = MEM_callocN(sizeof(IDPropertyUIDataInt), __func__);
|
||||
ui_data->min = -INT_MAX;
|
||||
ui_data->min = INT_MIN;
|
||||
ui_data->max = INT_MAX;
|
||||
ui_data->soft_min = -INT_MAX;
|
||||
ui_data->soft_min = INT_MIN;
|
||||
ui_data->soft_max = INT_MAX;
|
||||
ui_data->step = 1;
|
||||
prop->ui_data = (IDPropertyUIData *)ui_data;
|
||||
|
|
|
@ -1333,7 +1333,7 @@ void RNA_property_float_range(PointerRNA *ptr, PropertyRNA *prop, float *hardmin
|
|||
*hardmax = (float)ui_data->max;
|
||||
}
|
||||
else {
|
||||
*hardmin = FLT_MIN;
|
||||
*hardmin = -FLT_MAX;
|
||||
*hardmax = FLT_MAX;
|
||||
}
|
||||
return;
|
||||
|
@ -1377,7 +1377,7 @@ void RNA_property_float_ui_range(PointerRNA *ptr,
|
|||
*precision = (float)ui_data->precision;
|
||||
}
|
||||
else {
|
||||
*softmin = FLT_MIN;
|
||||
*softmin = -FLT_MAX;
|
||||
*softmax = FLT_MAX;
|
||||
*step = 1.0f;
|
||||
*precision = 3.0f;
|
||||
|
|
Loading…
Reference in New Issue