NLA: Extract ..get_inverted_upper_snapshot()

Extracts `nlasnapshot_blend_get_inverted_upper_snapshot()` from
`BKE_animsys_nla_remap_keyframe_values()`

This introduces a new struct member:
`NlaEvalChannelSnapshot->remap_domain` and marks which values of
`blended_snapshot` are processed for remapping/used-for-inverting.
Effectively, it marks which values have successfully been remapped and
can be further used for remapping.

`nlasnapshot_blend_get_inverted_upper_snapshot()`:
output snapshot `r_upper_snapshot` has each channel's `remap_domain`
written to which effectively marks the successfully remapped values.
The only reason a value is not in the remap domain is if inversion
failed or it wasn't marked to be remapped.

`..get_inverted_upper_snapshot()` has a variant `nlasnapshot_blend()`
from {D10220}, but this patch doesn't depend on it at all. A third
variant will later be added `..get_inverted_lower_snapshot()`.
Altogether, these three functions allow solving for any of
(lower_snapshot, upper_snapshot, blended_snapshot) given the other two.
The function `..get_inverted_lower_snapshot()` will also similarly
process the remap domain of the blended and lower snapshot.

added assertions within `nlasnapshot_blend()` and
`..get_inverted_upper_snapshot()` to future proof branches dealing with
blendmode and mixmodes. (suggested by sybren)

No user functional changes

Reviewed By: sybren

Differential Revision: https://developer.blender.org/D10222
This commit is contained in:
Wayde Moss 2021-05-11 16:04:18 -04:00 committed by Wayde Moss
parent 65244ac1c3
commit cbeeca8167
2 changed files with 433 additions and 91 deletions

View File

@ -1040,6 +1040,7 @@ static NlaEvalChannelSnapshot *nlaevalchan_snapshot_new(NlaEvalChannel *nec)
nec_snapshot->channel = nec;
nec_snapshot->length = length;
nlavalidmask_init(&nec_snapshot->blend_domain, length);
nlavalidmask_init(&nec_snapshot->remap_domain, length);
return nec_snapshot;
}
@ -1050,6 +1051,7 @@ static void nlaevalchan_snapshot_free(NlaEvalChannelSnapshot *nec_snapshot)
BLI_assert(!nec_snapshot->is_base);
nlavalidmask_free(&nec_snapshot->blend_domain);
nlavalidmask_free(&nec_snapshot->remap_domain);
MEM_freeN(nec_snapshot);
}
@ -1649,6 +1651,355 @@ static bool nla_combine_quaternion_get_inverted_strip_values(const float lower_v
return true;
}
/* ---------------------- */
/* Assert necs and necs->channel is nonNull. */
static void nlaevalchan_assert_nonNull(NlaEvalChannelSnapshot *necs)
{
UNUSED_VARS_NDEBUG(necs);
BLI_assert(necs != NULL && necs->channel != NULL);
}
/* Assert that the channels given can be blended or combined together. */
static void nlaevalchan_assert_blendOrcombine_compatible(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
NlaEvalChannelSnapshot *blended_necs)
{
UNUSED_VARS_NDEBUG(lower_necs, upper_necs, blended_necs);
BLI_assert(!ELEM(NULL, lower_necs, blended_necs));
BLI_assert(upper_necs == NULL || lower_necs->length == upper_necs->length);
BLI_assert(lower_necs->length == blended_necs->length);
}
/* Assert that the channels given can be blended or combined together as a quaternion. */
static void nlaevalchan_assert_blendOrcombine_compatible_quaternion(
NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
NlaEvalChannelSnapshot *blended_necs)
{
nlaevalchan_assert_blendOrcombine_compatible(lower_necs, upper_necs, blended_necs);
BLI_assert(lower_necs->length == 4);
}
static void nlaevalchan_copy_values(NlaEvalChannelSnapshot *dst, NlaEvalChannelSnapshot *src)
{
memcpy(dst->values, src->values, src->length * sizeof(float));
}
/** Copies lower necs to blended necs if upper necs is NULL or has zero influence.
* \return true if copied. */
static bool nlaevalchan_blendOrcombine_try_copy_lower(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
const float upper_influence,
NlaEvalChannelSnapshot *r_blended_necs)
{
const bool has_influence = !IS_EQF(upper_influence, 0.0f);
if (upper_necs != NULL && has_influence) {
return false;
}
nlaevalchan_copy_values(r_blended_necs, lower_necs);
return true;
}
/** Based on blendmode, blend lower necs with upper necs into blended necs.
*
* Each upper value's blend domain determines whether to blend or to copy directly
* from lower.
*/
static void nlaevalchan_blend_value(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
const int upper_blendmode,
const float upper_influence,
NlaEvalChannelSnapshot *r_blended_necs)
{
nlaevalchan_assert_blendOrcombine_compatible(lower_necs, upper_necs, r_blended_necs);
if (nlaevalchan_blendOrcombine_try_copy_lower(
lower_necs, upper_necs, upper_influence, r_blended_necs)) {
return;
}
const int length = lower_necs->length;
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, j)) {
r_blended_necs->values[j] = lower_necs->values[j];
continue;
}
r_blended_necs->values[j] = nla_blend_value(
upper_blendmode, lower_necs->values[j], upper_necs->values[j], upper_influence);
}
}
/** Based on mixmode, provided by one the necs, combines lower necs with upper necs into blended
* necs.
*
* Each upper value's blend domain determines whether to blend or to copy directly
* from lower.
*/
static void nlaevalchan_combine_value(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
const float upper_influence,
NlaEvalChannelSnapshot *r_blended_necs)
{
nlaevalchan_assert_blendOrcombine_compatible(lower_necs, upper_necs, r_blended_necs);
if (nlaevalchan_blendOrcombine_try_copy_lower(
lower_necs, upper_necs, upper_influence, r_blended_necs)) {
return;
}
/* Assumes every base is the same. */
float *base_values = lower_necs->channel->base_snapshot.values;
const int length = lower_necs->length;
const char mix_mode = lower_necs->channel->mix_mode;
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, j)) {
r_blended_necs->values[j] = lower_necs->values[j];
continue;
}
r_blended_necs->values[j] = nla_combine_value(
mix_mode, base_values[j], lower_necs->values[j], upper_necs->values[j], upper_influence);
}
}
/** Quaternion combines lower necs with upper necs into blended necs.
*
* Each upper value's blend domain determines whether to blend or to copy directly
* from lower.
*/
static void nlaevalchan_combine_quaternion(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
const float upper_influence,
NlaEvalChannelSnapshot *r_blended_necs)
{
nlaevalchan_assert_blendOrcombine_compatible_quaternion(lower_necs, upper_necs, r_blended_necs);
if (nlaevalchan_blendOrcombine_try_copy_lower(
lower_necs, upper_necs, upper_influence, r_blended_necs)) {
return;
}
/** No need to check per index. We limit to all or nothing combining for quaternions. */
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, 0)) {
nlaevalchan_copy_values(r_blended_necs, lower_necs);
return;
}
nla_combine_quaternion(
lower_necs->values, upper_necs->values, upper_influence, r_blended_necs->values);
}
/** Based on blendmode and mixmode, blend lower necs with upper necs into blended necs.
*
* Each upper value's blend domain determines whether to blend or to copy directly
* from lower.
*
* \param lower_necs: Never NULL.
* \param upper_necs: Can be NULL.
* \param upper_blendmode: Enum value in eNlaStrip_Blend_Mode.
* \param upper_influence: Value in range [0, 1].
* \param upper_necs: Never NULL.
*
*/
static void nlaevalchan_blendOrcombine(NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *upper_necs,
const int upper_blendmode,
const float upper_influence,
NlaEvalChannelSnapshot *r_blended_necs)
{
nlaevalchan_assert_nonNull(r_blended_necs);
switch (upper_blendmode) {
case NLASTRIP_MODE_COMBINE: {
switch (r_blended_necs->channel->mix_mode) {
case NEC_MIX_QUATERNION: {
nlaevalchan_combine_quaternion(lower_necs, upper_necs, upper_influence, r_blended_necs);
return;
}
case NEC_MIX_ADD:
case NEC_MIX_AXIS_ANGLE:
case NEC_MIX_MULTIPLY: {
nlaevalchan_combine_value(lower_necs, upper_necs, upper_influence, r_blended_necs);
return;
}
default:
BLI_assert("Mix mode should've been handled");
}
return;
}
case NLASTRIP_MODE_ADD:
case NLASTRIP_MODE_SUBTRACT:
case NLASTRIP_MODE_MULTIPLY:
case NLASTRIP_MODE_REPLACE: {
nlaevalchan_blend_value(
lower_necs, upper_necs, upper_blendmode, upper_influence, r_blended_necs);
return;
}
default:
BLI_assert("Blend mode should've been handled");
}
}
/** Based on blendmode, solve for the upper values such that when lower blended with upper then we
* get blended values as a result.
*
* Only processes blended values in the remap domain. Successfully remapped upper values are placed
* in the remap domain so caller knows which values are usable.
*/
static void nlaevalchan_blend_value_get_inverted_upper_evalchan(
NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *blended_necs,
const int upper_blendmode,
const float upper_influence,
NlaEvalChannelSnapshot *r_upper_necs)
{
nlaevalchan_assert_nonNull(r_upper_necs);
nlaevalchan_assert_blendOrcombine_compatible(lower_necs, r_upper_necs, blended_necs);
const int length = lower_necs->length;
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(blended_necs->remap_domain.ptr, j)) {
BLI_BITMAP_DISABLE(r_upper_necs->remap_domain.ptr, j);
continue;
}
const bool success = nla_blend_get_inverted_strip_value(upper_blendmode,
lower_necs->values[j],
blended_necs->values[j],
upper_influence,
&r_upper_necs->values[j]);
BLI_BITMAP_SET(r_upper_necs->remap_domain.ptr, j, success);
}
}
/** Based on mixmode, solve for the upper values such that when lower combined with upper then we
* get blended values as a result.
*
* Only processes blended values in the remap domain. Successfully remapped upper values are placed
* in the remap domain so caller knows which values are usable.
*/
static void nlaevalchan_combine_value_get_inverted_upper_evalchan(
NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *blended_necs,
const float upper_influence,
NlaEvalChannelSnapshot *r_upper_necs)
{
nlaevalchan_assert_nonNull(r_upper_necs);
nlaevalchan_assert_blendOrcombine_compatible(lower_necs, r_upper_necs, blended_necs);
/* Assumes every channel's base is the same. */
float *base_values = lower_necs->channel->base_snapshot.values;
const int length = lower_necs->length;
const char mix_mode = lower_necs->channel->mix_mode;
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(blended_necs->remap_domain.ptr, j)) {
BLI_BITMAP_DISABLE(r_upper_necs->remap_domain.ptr, j);
continue;
}
const bool success = nla_combine_get_inverted_strip_value(mix_mode,
base_values[j],
lower_necs->values[j],
blended_necs->values[j],
upper_influence,
&r_upper_necs->values[j]);
BLI_BITMAP_SET(r_upper_necs->remap_domain.ptr, j, success);
}
}
/** Solve for the upper values such that when lower quaternion combined with upper then we get
* blended values as a result.
*
* All blended values must be in the remap domain. If successfully remapped, then all upper values
* are placed in the remap domain so caller knows the result is usable.
*/
static void nlaevalchan_combine_quaternion_get_inverted_upper_evalchan(
NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *blended_necs,
const float upper_influence,
NlaEvalChannelSnapshot *r_upper_necs)
{
nlaevalchan_assert_nonNull(r_upper_necs);
nlaevalchan_assert_blendOrcombine_compatible_quaternion(lower_necs, r_upper_necs, blended_necs);
/* Must check each domain index individually in case animator had a non-combine NLA strip with a
* subset of quaternion channels and remapping through any of them failed and thus potentially
* has undefined values. */
for (int j = 0; j < 4; j++) {
if (!BLI_BITMAP_TEST_BOOL(blended_necs->remap_domain.ptr, j)) {
BLI_bitmap_set_all(r_upper_necs->remap_domain.ptr, false, 4);
return;
}
}
const bool success = nla_combine_quaternion_get_inverted_strip_values(
lower_necs->values, blended_necs->values, upper_influence, r_upper_necs->values);
BLI_bitmap_set_all(r_upper_necs->remap_domain.ptr, success, 4);
}
/** Based on blendmode and mix mode, solve for the upper values such that when lower blended or
* combined with upper then we get blended values as a result.
*
* Only processes blended values in the remap domain. Successfully remapped upper values are placed
* in the remap domain so caller knows which values are usable.
*
* \param lower_necs: Never NULL.
* \param blended_necs: Never NULL.
* \param upper_blendmode: Enum value in eNlaStrip_Blend_Mode.
* \param upper_influence: Value in range [0, 1].
* \param r_upper_necs: Never NULL.
*/
static void nlaevalchan_blendOrcombine_get_inverted_upper_evalchan(
NlaEvalChannelSnapshot *lower_necs,
NlaEvalChannelSnapshot *blended_necs,
const int upper_blendmode,
const float upper_influence,
NlaEvalChannelSnapshot *r_upper_necs)
{
nlaevalchan_assert_nonNull(r_upper_necs);
if (IS_EQF(upper_influence, 0.0f)) {
BLI_bitmap_set_all(r_upper_necs->remap_domain.ptr, false, r_upper_necs->length);
return;
}
switch (upper_blendmode) {
case NLASTRIP_MODE_COMBINE: {
switch (r_upper_necs->channel->mix_mode) {
case NEC_MIX_QUATERNION: {
nlaevalchan_combine_quaternion_get_inverted_upper_evalchan(
lower_necs, blended_necs, upper_influence, r_upper_necs);
return;
}
case NEC_MIX_ADD:
case NEC_MIX_AXIS_ANGLE:
case NEC_MIX_MULTIPLY: {
nlaevalchan_combine_value_get_inverted_upper_evalchan(
lower_necs, blended_necs, upper_influence, r_upper_necs);
return;
}
default:
BLI_assert("Mix mode should've been handled");
}
return;
}
case NLASTRIP_MODE_ADD:
case NLASTRIP_MODE_SUBTRACT:
case NLASTRIP_MODE_MULTIPLY:
case NLASTRIP_MODE_REPLACE: {
nlaevalchan_blend_value_get_inverted_upper_evalchan(
lower_necs, blended_necs, upper_blendmode, upper_influence, r_upper_necs);
return;
}
default:
BLI_assert("Blend mode should've been handled");
}
}
/* ---------------------- */
/* F-Modifier stack joining/separation utilities -
* should we generalize these for BLI_listbase.h interface? */
@ -2513,11 +2864,7 @@ void nlasnapshot_blend(NlaEvalData *eval_data,
{
nlaeval_snapshot_ensure_size(r_blended_snapshot, eval_data->num_channels);
const bool zero_upper_influence = IS_EQF(upper_influence, 0.0f);
LISTBASE_FOREACH (NlaEvalChannel *, nec, &eval_data->channels) {
const int length = nec->base_snapshot.length;
NlaEvalChannelSnapshot *upper_necs = nlaeval_snapshot_get(upper_snapshot, nec->index);
NlaEvalChannelSnapshot *lower_necs = nlaeval_snapshot_get(lower_snapshot, nec->index);
if (upper_necs == NULL && lower_necs == NULL) {
@ -2530,49 +2877,43 @@ void nlasnapshot_blend(NlaEvalData *eval_data,
}
NlaEvalChannelSnapshot *result_necs = nlaeval_snapshot_ensure_channel(r_blended_snapshot, nec);
nlaevalchan_blendOrcombine(
lower_necs, upper_necs, upper_blendmode, upper_influence, result_necs);
}
}
/** Always copy \a lower_snapshot to result, irrelevant of whether \a upper_snapshot has a
* corresponding channel. This only matters when \a lower_snapshot not the same as
* \a r_blended_snapshot. */
memcpy(result_necs->values, lower_necs->values, length * sizeof(float));
if (upper_necs == NULL || zero_upper_influence) {
/** Using \a blended_snapshot and \a lower_snapshot, we can solve for the \a r_upper_snapshot.
*
* Only channels that exist within \a blended_snapshot are inverted.
*
* For \a r_upper_snapshot, disables \a NlaEvalChannelSnapshot->remap_domain for failed inversions.
* Only values within the \a remap_domain are processed.
*/
void nlasnapshot_blend_get_inverted_upper_snapshot(NlaEvalData *eval_data,
NlaEvalSnapshot *lower_snapshot,
NlaEvalSnapshot *blended_snapshot,
const short upper_blendmode,
const float upper_influence,
NlaEvalSnapshot *r_upper_snapshot)
{
nlaeval_snapshot_ensure_size(r_upper_snapshot, eval_data->num_channels);
LISTBASE_FOREACH (NlaEvalChannel *, nec, &eval_data->channels) {
NlaEvalChannelSnapshot *blended_necs = nlaeval_snapshot_get(blended_snapshot, nec->index);
if (blended_necs == NULL) {
/** We assume the caller only wants a subset of channels to be inverted, those that exist
* within \a blended_snapshot. */
continue;
}
if (upper_blendmode == NLASTRIP_MODE_COMBINE) {
const int mix_mode = nec->mix_mode;
if (mix_mode == NEC_MIX_QUATERNION) {
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, 0)) {
continue;
}
nla_combine_quaternion(
lower_necs->values, upper_necs->values, upper_influence, result_necs->values);
}
else {
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, j)) {
continue;
}
result_necs->values[j] = nla_combine_value(mix_mode,
nec->base_snapshot.values[j],
lower_necs->values[j],
upper_necs->values[j],
upper_influence);
}
}
NlaEvalChannelSnapshot *lower_necs = nlaeval_snapshot_get(lower_snapshot, nec->index);
if (lower_necs == NULL) {
lower_necs = nlaeval_snapshot_find_channel(lower_snapshot->base, nec);
}
else {
for (int j = 0; j < length; j++) {
if (!BLI_BITMAP_TEST_BOOL(upper_necs->blend_domain.ptr, j)) {
continue;
}
result_necs->values[j] = nla_blend_value(
upper_blendmode, lower_necs->values[j], upper_necs->values[j], upper_influence);
}
}
NlaEvalChannelSnapshot *result_necs = nlaeval_snapshot_ensure_channel(r_upper_snapshot, nec);
nlaevalchan_blendOrcombine_get_inverted_upper_evalchan(
lower_necs, blended_necs, upper_blendmode, upper_influence, result_necs);
}
}
@ -2670,74 +3011,64 @@ bool BKE_animsys_nla_remap_keyframe_values(struct NlaKeyframingContext *context,
return false;
}
/* Find the evaluation channel for the NLA stack below current strip. */
/** Create \a blended_snapshot and fill with input \a values. */
NlaEvalData *eval_data = &context->lower_eval_data;
NlaEvalSnapshot blended_snapshot;
nlaeval_snapshot_init(&blended_snapshot, eval_data, NULL);
NlaEvalChannelKey key = {
.ptr = *prop_ptr,
.prop = prop,
};
/**
* Remove lower NLA stack effects.
*
* Using the tweak strip's blended result and the lower snapshot value, we can solve for the
* tweak strip value it must evaluate to.
*/
NlaEvalData *const lower_eval_data = &context->lower_eval_data;
NlaEvalChannel *const lower_nec = nlaevalchan_verify_key(lower_eval_data, NULL, &key);
if ((lower_nec->base_snapshot.length != count)) {
NlaEvalChannel *nec = nlaevalchan_verify_key(eval_data, NULL, &key);
BLI_assert(nec);
if (nec->base_snapshot.length != count) {
BLI_assert(!"invalid value count");
nlaeval_snapshot_free_data(&blended_snapshot);
return false;
}
/* Invert the blending operation to compute the desired strip values. */
NlaEvalChannelSnapshot *const lower_nec_snapshot = nlaeval_snapshot_find_channel(
&lower_eval_data->eval_snapshot, lower_nec);
NlaEvalChannelSnapshot *blended_necs = nlaeval_snapshot_ensure_channel(&blended_snapshot, nec);
memcpy(blended_necs->values, values, sizeof(float) * count);
BLI_bitmap_set_all(blended_necs->remap_domain.ptr, true, count);
float *lower_values = lower_nec_snapshot->values;
/** Remove lower NLA stack effects. */
nlasnapshot_blend_get_inverted_upper_snapshot(eval_data,
&context->lower_eval_data.eval_snapshot,
&blended_snapshot,
blend_mode,
influence,
&blended_snapshot);
if (blend_mode == NLASTRIP_MODE_COMBINE) {
/* Quaternion combine handles all sub-channels as a unit. */
if (lower_nec->mix_mode == NEC_MIX_QUATERNION) {
if (r_force_all == NULL) {
return false;
}
/** Write results into \a values. */
bool successful_remap = true;
if (blended_necs->channel->mix_mode == NEC_MIX_QUATERNION &&
blend_mode == NLASTRIP_MODE_COMBINE) {
if (r_force_all != NULL) {
*r_force_all = true;
if (!nla_combine_quaternion_get_inverted_strip_values(
lower_values, values, influence, values)) {
return false;
}
index = -1;
}
else {
float *base_values = lower_nec->base_snapshot.values;
for (int i = 0; i < count; i++) {
if (ELEM(index, i, -1)) {
if (!nla_combine_get_inverted_strip_value(lower_nec->mix_mode,
base_values[i],
lower_values[i],
values[i],
influence,
&values[i])) {
return false;
}
}
}
}
}
else {
for (int i = 0; i < count; i++) {
if (ELEM(index, i, -1)) {
if (!nla_blend_get_inverted_strip_value(
blend_mode, lower_values[i], values[i], influence, &values[i])) {
return false;
}
}
successful_remap = false;
}
}
return true;
for (int i = 0; i < count; i++) {
if (!ELEM(index, i, -1)) {
continue;
}
if (!BLI_BITMAP_TEST_BOOL(blended_necs->remap_domain.ptr, i)) {
successful_remap = false;
}
values[i] = blended_necs->values[i];
}
nlaeval_snapshot_free_data(&blended_snapshot);
return successful_remap;
}
/**

View File

@ -82,6 +82,10 @@ typedef struct NlaEvalChannelSnapshot {
/** For an upper snapshot channel, marks values that should be blended. */
NlaValidMask blend_domain;
/** Only used for keyframe remapping. Any values not in the \a remap_domain will not be used
* for keyframe remapping. */
NlaValidMask remap_domain;
int length; /* Number of values in the property. */
bool is_base; /* Base snapshot of the channel. */
@ -196,6 +200,13 @@ void nlasnapshot_blend(NlaEvalData *eval_data,
const float upper_influence,
NlaEvalSnapshot *r_blended_snapshot);
void nlasnapshot_blend_get_inverted_upper_snapshot(NlaEvalData *eval_data,
NlaEvalSnapshot *lower_snapshot,
NlaEvalSnapshot *blended_snapshot,
const short upper_blendmode,
const float upper_influence,
NlaEvalSnapshot *r_upper_snapshot);
#ifdef __cplusplus
}
#endif