Code refactor: motion blur cleanups, preparing for next commit.

This commit is contained in:
Brecht Van Lommel 2018-03-10 00:37:07 +01:00
parent 6c4ddfc5d9
commit 267d892326
17 changed files with 125 additions and 138 deletions

View File

@ -633,10 +633,10 @@ static void ExportCurveSegments(Scene *scene, Mesh *mesh, ParticleCurveData *CDa
}
}
static void ExportCurveSegmentsMotion(Mesh *mesh, ParticleCurveData *CData, int time_index)
static void ExportCurveSegmentsMotion(Mesh *mesh, ParticleCurveData *CData, int motion_step)
{
VLOG(1) << "Exporting curve motion segments for mesh " << mesh->name
<< ", time index " << time_index;
<< ", motion step " << motion_step;
/* find attribute */
Attribute *attr_mP = mesh->curve_attributes.find(ATTR_STD_MOTION_VERTEX_POSITION);
@ -651,7 +651,7 @@ static void ExportCurveSegmentsMotion(Mesh *mesh, ParticleCurveData *CData, int
/* export motion vectors for curve keys */
size_t numkeys = mesh->curve_keys.size();
float4 *mP = attr_mP->data_float4() + time_index*numkeys;
float4 *mP = attr_mP->data_float4() + motion_step*numkeys;
bool have_motion = false;
int i = 0;
@ -702,12 +702,12 @@ static void ExportCurveSegmentsMotion(Mesh *mesh, ParticleCurveData *CData, int
}
mesh->curve_attributes.remove(ATTR_STD_MOTION_VERTEX_POSITION);
}
else if(time_index > 0) {
VLOG(1) << "Filling in new motion vertex position for time_index "
<< time_index;
else if(motion_step > 0) {
VLOG(1) << "Filling in new motion vertex position for motion_step "
<< motion_step;
/* motion, fill up previous steps that we might have skipped because
* they had no motion, but we need them anyway now */
for(int step = 0; step < time_index; step++) {
for(int step = 0; step < motion_step; step++) {
float4 *mP = attr_mP->data_float4() + step*numkeys;
for(int key = 0; key < numkeys; key++) {
@ -888,7 +888,7 @@ void BlenderSync::sync_curves(Mesh *mesh,
BL::Mesh& b_mesh,
BL::Object& b_ob,
bool motion,
int time_index)
int motion_step)
{
if(!motion) {
/* Clear stored curve data */
@ -951,7 +951,7 @@ void BlenderSync::sync_curves(Mesh *mesh,
}
else {
if(motion)
ExportCurveSegmentsMotion(mesh, &CData, time_index);
ExportCurveSegmentsMotion(mesh, &CData, motion_step);
else
ExportCurveSegments(scene, mesh, &CData);
}

View File

@ -1248,36 +1248,10 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
if(mesh_synced.find(mesh) == mesh_synced.end())
return;
/* for motion pass always compute, for motion blur it can be disabled */
int time_index = 0;
if(scene->need_motion() == Scene::MOTION_BLUR) {
if(!mesh->use_motion_blur)
return;
/* see if this mesh needs motion data at this time */
vector<float> object_times = object->motion_times();
bool found = false;
foreach(float object_time, object_times) {
if(motion_time == object_time) {
found = true;
break;
}
else
time_index++;
}
if(!found)
return;
}
else {
if(motion_time == -1.0f)
time_index = 0;
else if(motion_time == 1.0f)
time_index = 1;
else
return;
/* Find time matching motion step required by mesh. */
int motion_step = mesh->motion_step(motion_time);
if(motion_step < 0) {
return;
}
/* skip empty meshes */
@ -1319,9 +1293,9 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
float3 *P = &mesh->verts[0];
float3 *N = (attr_N)? attr_N->data_float3(): NULL;
memcpy(attr_mP->data_float3() + time_index*numverts, P, sizeof(float3)*numverts);
memcpy(attr_mP->data_float3() + motion_step*numverts, P, sizeof(float3)*numverts);
if(attr_mN)
memcpy(attr_mN->data_float3() + time_index*numverts, N, sizeof(float3)*numverts);
memcpy(attr_mN->data_float3() + motion_step*numverts, N, sizeof(float3)*numverts);
}
}
@ -1331,7 +1305,7 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
if(attr_mP) {
float3 *keys = &mesh->curve_keys[0];
memcpy(attr_mP->data_float3() + time_index*numkeys, keys, sizeof(float3)*numkeys);
memcpy(attr_mP->data_float3() + motion_step*numkeys, keys, sizeof(float3)*numkeys);
}
}
@ -1354,8 +1328,8 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
new_attribute = true;
}
/* Load vertex data from mesh. */
float3 *mP = attr_mP->data_float3() + time_index*numverts;
float3 *mN = (attr_mN)? attr_mN->data_float3() + time_index*numverts: NULL;
float3 *mP = attr_mP->data_float3() + motion_step*numverts;
float3 *mN = (attr_mN)? attr_mN->data_float3() + motion_step*numverts: NULL;
/* NOTE: We don't copy more that existing amount of vertices to prevent
* possible memory corruption.
*/
@ -1384,13 +1358,13 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
if(attr_mN)
mesh->attributes.remove(ATTR_STD_MOTION_VERTEX_NORMAL);
}
else if(time_index > 0) {
else if(motion_step > 0) {
VLOG(1) << "Filling deformation motion for object " << b_ob.name();
/* motion, fill up previous steps that we might have skipped because
* they had no motion, but we need them anyway now */
float3 *P = &mesh->verts[0];
float3 *N = (attr_N)? attr_N->data_float3(): NULL;
for(int step = 0; step < time_index; step++) {
for(int step = 0; step < motion_step; step++) {
memcpy(attr_mP->data_float3() + step*numverts, P, sizeof(float3)*numverts);
if(attr_mN)
memcpy(attr_mN->data_float3() + step*numverts, N, sizeof(float3)*numverts);
@ -1400,7 +1374,7 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
else {
if(b_mesh.vertices.length() != numverts) {
VLOG(1) << "Topology differs, discarding motion blur for object "
<< b_ob.name() << " at time " << time_index;
<< b_ob.name() << " at time " << motion_step;
memcpy(mP, &mesh->verts[0], sizeof(float3)*numverts);
if(mN != NULL) {
memcpy(mN, attr_N->data_float3(), sizeof(float3)*numverts);
@ -1411,7 +1385,7 @@ void BlenderSync::sync_mesh_motion(BL::Object& b_ob,
/* hair motion */
if(numkeys)
sync_curves(mesh, b_mesh, b_ob, true, time_index);
sync_curves(mesh, b_mesh, b_ob, true, motion_step);
/* free derived mesh */
b_data.meshes.remove(b_mesh, false, true, false);

View File

@ -396,7 +396,6 @@ Object *BlenderSync::sync_object(BL::Object& b_parent,
/* motion blur */
if(scene->need_motion() == Scene::MOTION_BLUR && object->mesh) {
Mesh *mesh = object->mesh;
mesh->use_motion_blur = false;
if(object_use_motion(b_parent, b_ob)) {
@ -405,9 +404,9 @@ Object *BlenderSync::sync_object(BL::Object& b_parent,
mesh->use_motion_blur = true;
}
vector<float> times = object->motion_times();
foreach(float time, times)
motion_times.insert(time);
for(size_t step = 0; step < mesh->motion_steps - 1; step++) {
motion_times.insert(mesh->motion_time(step));
}
}
}
@ -694,6 +693,11 @@ void BlenderSync::sync_motion(BL::RenderSettings& b_render,
/* note iteration over motion_times set happens in sorted order */
foreach(float relative_time, motion_times) {
/* center time is already handled. */
if(relative_time == 0.0f) {
continue;
}
VLOG(1) << "Synchronizing motion for the relative time "
<< relative_time << ".";

View File

@ -118,7 +118,7 @@ private:
BL::Mesh& b_mesh,
BL::Object& b_ob,
bool motion,
int time_index = 0);
int motion_step = 0);
Object *sync_object(BL::Object& b_parent,
int persistent_id[OBJECT_PERSISTENT_ID_SIZE],
BL::DupliObject& b_dupli_ob,

View File

@ -32,8 +32,8 @@ enum ObjectTransform {
};
enum ObjectVectorTransform {
OBJECT_VECTOR_MOTION_PRE = 0,
OBJECT_VECTOR_MOTION_POST = 1
OBJECT_PASS_MOTION_PRE = 0,
OBJECT_PASS_MOTION_POST = 1
};
/* Object to world space transformation */
@ -68,10 +68,10 @@ ccl_device_inline Transform lamp_fetch_transform(KernelGlobals *kg, int lamp, bo
/* Object to world space transformation for motion vectors */
ccl_device_inline Transform object_fetch_vector_transform(KernelGlobals *kg, int object, enum ObjectVectorTransform type)
ccl_device_inline Transform object_fetch_motion_pass_transform(KernelGlobals *kg, int object, enum ObjectVectorTransform type)
{
int offset = object*OBJECT_VECTOR_SIZE + (int)type;
return kernel_tex_fetch(__objects_vector, offset);
int offset = object*OBJECT_MOTION_PASS_SIZE + (int)type;
return kernel_tex_fetch(__object_motion_pass, offset);
}
/* Motion blurred object transformations */

View File

@ -193,10 +193,10 @@ ccl_device_inline float4 primitive_motion_vector(KernelGlobals *kg, ShaderData *
* transformation was set match the world/object space of motion_pre/post */
Transform tfm;
tfm = object_fetch_vector_transform(kg, sd->object, OBJECT_VECTOR_MOTION_PRE);
tfm = object_fetch_motion_pass_transform(kg, sd->object, OBJECT_PASS_MOTION_PRE);
motion_pre = transform_point(&tfm, motion_pre);
tfm = object_fetch_vector_transform(kg, sd->object, OBJECT_VECTOR_MOTION_POST);
tfm = object_fetch_motion_pass_transform(kg, sd->object, OBJECT_PASS_MOTION_POST);
motion_post = transform_point(&tfm, motion_post);
float3 motion_center;
@ -207,10 +207,10 @@ ccl_device_inline float4 primitive_motion_vector(KernelGlobals *kg, ShaderData *
ProjectionTransform projection = kernel_data.cam.worldtoraster;
motion_center = transform_perspective(&projection, center);
projection = kernel_data.cam.perspective_motion.pre;
projection = kernel_data.cam.perspective_pre;
motion_pre = transform_perspective(&projection, motion_pre);
projection = kernel_data.cam.perspective_motion.post;
projection = kernel_data.cam.perspective_post;
motion_post = transform_perspective(&projection, motion_post);
}
else {
@ -220,13 +220,13 @@ ccl_device_inline float4 primitive_motion_vector(KernelGlobals *kg, ShaderData *
motion_center.x *= kernel_data.cam.width;
motion_center.y *= kernel_data.cam.height;
tfm = kernel_data.cam.pass_motion.pre;
tfm = kernel_data.cam.motion_pass_pre;
motion_pre = normalize(transform_point(&tfm, motion_pre));
motion_pre = float2_to_float3(direction_to_panorama(&kernel_data.cam, motion_pre));
motion_pre.x *= kernel_data.cam.width;
motion_pre.y *= kernel_data.cam.height;
tfm = kernel_data.cam.pass_motion.post;
tfm = kernel_data.cam.motion_pass_post;
motion_post = normalize(transform_point(&tfm, motion_post));
motion_post = float2_to_float3(direction_to_panorama(&kernel_data.cam, motion_post));
motion_post.x *= kernel_data.cam.width;

View File

@ -54,13 +54,13 @@ ccl_device void camera_sample_perspective(KernelGlobals *kg, float raster_x, flo
* interpolated field of view.
*/
if(ray->time < 0.5f) {
ProjectionTransform rastertocamera_pre = kernel_data.cam.perspective_motion.pre;
ProjectionTransform rastertocamera_pre = kernel_data.cam.perspective_pre;
float3 Pcamera_pre =
transform_perspective(&rastertocamera_pre, raster);
Pcamera = interp(Pcamera_pre, Pcamera, ray->time * 2.0f);
}
else {
ProjectionTransform rastertocamera_post = kernel_data.cam.perspective_motion.post;
ProjectionTransform rastertocamera_post = kernel_data.cam.perspective_post;
float3 Pcamera_post =
transform_perspective(&rastertocamera_post, raster);
Pcamera = interp(Pcamera, Pcamera_post, (ray->time - 0.5f) * 2.0f);

View File

@ -32,7 +32,8 @@ KERNEL_TEX(float2, __prim_time)
/* objects */
KERNEL_TEX(KernelObject, __objects)
KERNEL_TEX(Transform, __objects_vector)
KERNEL_TEX(Transform, __object_motion_pass)
KERNEL_TEX(uint, __object_flag)
/* triangles */
KERNEL_TEX(uint, __tri_shader)
@ -66,7 +67,6 @@ KERNEL_TEX(KernelParticle, __particles)
/* shaders */
KERNEL_TEX(uint4, __svm_nodes)
KERNEL_TEX(KernelShader, __shaders)
KERNEL_TEX(uint, __object_flag)
/* lookup tables */
KERNEL_TEX(float, __lookup_table)

View File

@ -35,10 +35,10 @@
CCL_NAMESPACE_BEGIN
/* Constants */
#define OBJECT_VECTOR_SIZE 2
#define FILTER_TABLE_SIZE 1024
#define RAMP_TABLE_SIZE 256
#define SHUTTER_TABLE_SIZE 256
#define OBJECT_MOTION_PASS_SIZE 2
#define FILTER_TABLE_SIZE 1024
#define RAMP_TABLE_SIZE 256
#define SHUTTER_TABLE_SIZE 256
#define BSSRDF_MIN_RADIUS 1e-8f
#define BSSRDF_MAX_HITS 4
@ -1205,10 +1205,12 @@ typedef struct KernelCamera {
/* Stores changes in the projeciton matrix. Use for camera zoom motion
* blur and motion pass output for perspective camera. */
PerspectiveMotionTransform perspective_motion;
ProjectionTransform perspective_pre;
ProjectionTransform perspective_post;
/* Transforms for motion pass. */
MotionTransform pass_motion;
Transform motion_pass_pre;
Transform motion_pass_post;
int shutter_table_offset;

View File

@ -302,23 +302,6 @@ void Camera::update(Scene *scene)
frustum_top_normal = normalize(make_float3(0.0f, v.z, -v.y));
}
/* TODO(sergey): Support other types of camera. */
if(type == CAMERA_PERSPECTIVE) {
/* TODO(sergey): Move to an utility function and de-duplicate with
* calculation above.
*/
ProjectionTransform screentocamera_pre =
projection_inverse(projection_perspective(fov_pre,
nearclip,
farclip));
ProjectionTransform screentocamera_post =
projection_inverse(projection_perspective(fov_post,
nearclip,
farclip));
perspective_motion.pre = screentocamera_pre * rastertoscreen;
perspective_motion.post = screentocamera_post * rastertoscreen;
}
/* Compute kernel camera data. */
KernelCamera *kcam = &kernel_camera;
@ -341,22 +324,22 @@ void Camera::update(Scene *scene)
/* TODO(sergey): Support perspective (zoom, fov) motion. */
if(type == CAMERA_PANORAMA) {
if(use_motion) {
kcam->pass_motion.pre = transform_inverse(motion.pre);
kcam->pass_motion.post = transform_inverse(motion.post);
kcam->motion_pass_pre = transform_inverse(motion.pre);
kcam->motion_pass_post = transform_inverse(motion.post);
}
else {
kcam->pass_motion.pre = kcam->worldtocamera;
kcam->pass_motion.post = kcam->worldtocamera;
kcam->motion_pass_pre = kcam->worldtocamera;
kcam->motion_pass_post = kcam->worldtocamera;
}
}
else {
if(use_motion) {
kcam->perspective_motion.pre = cameratoraster * transform_inverse(motion.pre);
kcam->perspective_motion.post = cameratoraster * transform_inverse(motion.post);
kcam->perspective_pre = cameratoraster * transform_inverse(motion.pre);
kcam->perspective_post = cameratoraster * transform_inverse(motion.post);
}
else {
kcam->perspective_motion.pre = worldtoraster;
kcam->perspective_motion.post = worldtoraster;
kcam->perspective_pre = worldtoraster;
kcam->perspective_post = worldtoraster;
}
}
}
@ -365,8 +348,23 @@ void Camera::update(Scene *scene)
transform_motion_decompose(&kcam->motion, &motion, &matrix);
kcam->have_motion = 1;
}
if(use_perspective_motion) {
kcam->perspective_motion = perspective_motion;
/* TODO(sergey): Support other types of camera. */
if(use_perspective_motion && type == CAMERA_PERSPECTIVE) {
/* TODO(sergey): Move to an utility function and de-duplicate with
* calculation above.
*/
ProjectionTransform screentocamera_pre =
projection_inverse(projection_perspective(fov_pre,
nearclip,
farclip));
ProjectionTransform screentocamera_post =
projection_inverse(projection_perspective(fov_post,
nearclip,
farclip));
kcam->perspective_pre = screentocamera_pre * rastertoscreen;
kcam->perspective_post = screentocamera_post * rastertoscreen;
kcam->have_perspective_motion = 1;
}
}

View File

@ -144,7 +144,6 @@ public:
MotionTransform motion;
bool use_motion, use_perspective_motion;
float fov_pre, fov_post;
PerspectiveMotionTransform perspective_motion;
/* computed camera parameters */
ProjectionTransform screentoworld;

View File

@ -1117,6 +1117,32 @@ bool Mesh::has_true_displacement() const
return false;
}
float Mesh::motion_time(int step) const
{
return (motion_steps > 1) ? 2.0f * step / (motion_steps - 1) - 1.0f : 0.0f;
}
int Mesh::motion_step(float time) const
{
if(motion_steps > 1) {
int attr_step = 0;
for(int step = 0; step < motion_steps; step++) {
float step_time = motion_time(step);
if(step_time == time) {
return attr_step;
}
/* Center step is stored in a separate attribute. */
if(step != motion_steps / 2) {
attr_step++;
}
}
}
return -1;
}
bool Mesh::need_build_bvh() const
{
return !transform_applied || has_surface_bssrdf;

View File

@ -304,6 +304,11 @@ public:
bool has_motion_blur() const;
bool has_true_displacement() const;
/* Convert between normalized -1..1 motion time and index
* in the VERTEX_MOTION attribute. */
float motion_time(int step) const;
int motion_step(float time) const;
/* Check whether the mesh should have own BVH built separately. Briefly,
* own BVH is needed for mesh, if:
*

View File

@ -57,7 +57,8 @@ struct UpdateObjectTransformState {
/* Packed object arrays. Those will be filled in. */
uint *object_flag;
KernelObject *objects;
Transform *objects_vector;
Transform *object_motion_pass;
DecomposedTransform *object_motion;
/* Flags which will be synchronized to Integrator. */
bool have_motion;
@ -274,27 +275,7 @@ void Object::tag_update(Scene *scene)
scene->object_manager->need_update = true;
}
vector<float> Object::motion_times()
{
/* compute times at which we sample motion for this object */
vector<float> times;
if(!mesh || mesh->motion_steps == 1)
return times;
int motion_steps = mesh->motion_steps;
for(int step = 0; step < motion_steps; step++) {
if(step != motion_steps / 2) {
float time = 2.0f * step / (motion_steps - 1) - 1.0f;
times.push_back(time);
}
}
return times;
}
bool Object::is_traceable()
bool Object::is_traceable() const
{
/* Mesh itself can be empty,can skip all such objects. */
if(!bounds.valid() || bounds.size() == make_float3(0.0f, 0.0f, 0.0f)) {
@ -332,7 +313,7 @@ void ObjectManager::device_update_object_transform(UpdateObjectTransformState *s
int object_index)
{
KernelObject& kobject = state->objects[object_index];
Transform *objects_vector = state->objects_vector;
Transform *object_motion_pass = state->object_motion_pass;
Mesh *mesh = ob->mesh;
uint flag = 0;
@ -434,8 +415,8 @@ void ObjectManager::device_update_object_transform(UpdateObjectTransformState *s
mtfm.post = mtfm.post * itfm;
}
objects_vector[object_index*OBJECT_VECTOR_SIZE+0] = mtfm.pre;
objects_vector[object_index*OBJECT_VECTOR_SIZE+1] = mtfm.post;
object_motion_pass[object_index*OBJECT_MOTION_PASS_SIZE+0] = mtfm.pre;
object_motion_pass[object_index*OBJECT_MOTION_PASS_SIZE+1] = mtfm.post;
}
else if(state->need_motion == Scene::MOTION_BLUR) {
if(ob->use_motion) {
@ -528,10 +509,10 @@ void ObjectManager::device_update_transforms(DeviceScene *dscene,
state.objects = dscene->objects.alloc(scene->objects.size());
state.object_flag = dscene->object_flag.alloc(scene->objects.size());
state.objects_vector = NULL;
state.object_motion_pass = NULL;
if(state.need_motion == Scene::MOTION_PASS) {
state.objects_vector = dscene->objects_vector.alloc(OBJECT_VECTOR_SIZE*scene->objects.size());
state.object_motion_pass = dscene->object_motion_pass.alloc(OBJECT_MOTION_PASS_SIZE*scene->objects.size());
}
/* Particle system device offsets
@ -574,7 +555,7 @@ void ObjectManager::device_update_transforms(DeviceScene *dscene,
dscene->objects.copy_to_device();
if(state.need_motion == Scene::MOTION_PASS) {
dscene->objects_vector.copy_to_device();
dscene->object_motion_pass.copy_to_device();
}
dscene->data.bvh.have_motion = state.have_motion;
@ -724,7 +705,7 @@ void ObjectManager::device_update_mesh_offsets(Device *, DeviceScene *dscene, Sc
void ObjectManager::device_free(Device *, DeviceScene *dscene)
{
dscene->objects.free();
dscene->objects_vector.free();
dscene->object_motion_pass.free();
dscene->object_flag.free();
}

View File

@ -70,12 +70,10 @@ public:
void compute_bounds(bool motion_blur);
void apply_transform(bool apply_to_motion);
vector<float> motion_times();
/* Check whether object is traceable and it worth adding it to
* kernel scene.
*/
bool is_traceable();
bool is_traceable() const;
/* Combine object's visibility with all possible internal run-time
* determined flags which denotes trace-time visibility.

View File

@ -60,7 +60,8 @@ DeviceScene::DeviceScene(Device *device)
curve_keys(device, "__curve_keys", MEM_TEXTURE),
patches(device, "__patches", MEM_TEXTURE),
objects(device, "__objects", MEM_TEXTURE),
objects_vector(device, "__objects_vector", MEM_TEXTURE),
object_motion_pass(device, "__object_motion_pass", MEM_TEXTURE),
object_flag(device, "__object_flag", MEM_TEXTURE),
attributes_map(device, "__attributes_map", MEM_TEXTURE),
attributes_float(device, "__attributes_float", MEM_TEXTURE),
attributes_float3(device, "__attributes_float3", MEM_TEXTURE),
@ -72,7 +73,6 @@ DeviceScene::DeviceScene(Device *device)
particles(device, "__particles", MEM_TEXTURE),
svm_nodes(device, "__svm_nodes", MEM_TEXTURE),
shaders(device, "__shaders", MEM_TEXTURE),
object_flag(device, "__object_flag", MEM_TEXTURE),
lookup_table(device, "__lookup_table", MEM_TEXTURE),
sobol_directions(device, "__sobol_directions", MEM_TEXTURE)
{

View File

@ -87,7 +87,8 @@ public:
/* objects */
device_vector<KernelObject> objects;
device_vector<Transform> objects_vector;
device_vector<Transform> object_motion_pass;
device_vector<uint> object_flag;
/* attributes */
device_vector<uint4> attributes_map;
@ -107,7 +108,6 @@ public:
/* shaders */
device_vector<int4> svm_nodes;
device_vector<KernelShader> shaders;
device_vector<uint> object_flag;
/* lookup tables */
device_vector<float> lookup_table;