Fix T60379: Cycles viewport adaptive subdivision hangs after updates.

The camera world to raster computation was using wrong values. Also fixes
update when changing subdivision scene settings.
This commit is contained in:
Brecht Van Lommel 2019-04-04 20:06:22 +02:00
parent 59d0582a75
commit b2e2db94bd
Notes: blender-bot 2023-02-14 04:07:53 +01:00
Referenced by issue #60379, Cycles preview lockup / memory allocation loop while changing adaptive displacement parameters.
7 changed files with 38 additions and 15 deletions

View File

@ -516,8 +516,6 @@ static void xml_read_mesh(const XMLReadState& state, xml_node node)
xml_read_float(&sdparams.dicing_rate, node, "dicing_rate");
sdparams.dicing_rate = std::max(0.1f, sdparams.dicing_rate);
state.scene->camera->update(state.scene);
sdparams.camera = state.scene->camera;
sdparams.objecttoworld = state.tfm;
}

View File

@ -927,8 +927,6 @@ static void create_subd_mesh(Scene *scene,
sdparams.dicing_rate = max(0.1f, RNA_float_get(&cobj, "dicing_rate") * dicing_rate);
sdparams.max_level = max_subdivisions;
scene->dicing_camera->update(scene);
sdparams.camera = scene->dicing_camera;
sdparams.objecttoworld = get_transform(b_ob.matrix_world());
}

View File

@ -85,10 +85,11 @@ void BlenderSync::sync_recalc(BL::Depsgraph& b_depsgraph)
* so we can do it later on if doing it immediate is not suitable. */
bool has_updated_objects = b_depsgraph.id_type_updated(BL::DriverTarget::id_type_OBJECT);
bool dicing_prop_changed = false;
if(experimental) {
/* Mark all meshes as needing to be exported again if dicing changed. */
PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
bool dicing_prop_changed = false;
float updated_dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate")
: RNA_float_get(&cscene, "dicing_rate");
@ -104,6 +105,15 @@ void BlenderSync::sync_recalc(BL::Depsgraph& b_depsgraph)
max_subdivisions = updated_max_subdivisions;
dicing_prop_changed = true;
}
if(dicing_prop_changed) {
for(const pair<void*, Mesh*>& iter: mesh_map.key_to_scene_data()) {
Mesh *mesh = iter.second;
if(mesh->subdivision_type != Mesh::SUBDIVISION_NONE) {
mesh_map.set_recalc(iter.first);
}
}
}
}
/* Iterate over all IDs in this depsgraph. */
@ -133,7 +143,7 @@ void BlenderSync::sync_recalc(BL::Depsgraph& b_depsgraph)
if(object_is_mesh(b_ob)) {
if(updated_geometry ||
(dicing_prop_changed && object_subdivision_type(b_ob, preview, experimental) != Mesh::SUBDIVISION_NONE))
(object_subdivision_type(b_ob, preview, experimental) != Mesh::SUBDIVISION_NONE))
{
BL::ID key = BKE_object_is_modified(b_ob)? b_ob: b_ob.data();
mesh_map.set_recalc(key);

View File

@ -628,6 +628,11 @@ public:
b_recalc.insert(id.ptr.data);
}
void set_recalc(void *id_ptr)
{
b_recalc.insert(id_ptr);
}
bool has_recalc()
{
return !(b_recalc.empty());
@ -723,6 +728,11 @@ public:
return deleted;
}
const map<K, T*>& key_to_scene_data()
{
return b_map;
}
protected:
vector<T*> *scene_data;
map<K, T*> b_map;

View File

@ -169,6 +169,8 @@ Camera::Camera()
cameratoworld = transform_identity();
worldtoraster = projection_identity();
full_rastertocamera = projection_identity();
dx = make_float3(0.0f, 0.0f, 0.0f);
dy = make_float3(0.0f, 0.0f, 0.0f);
@ -251,7 +253,7 @@ void Camera::update(Scene *scene)
ProjectionTransform screentocamera = projection_inverse(cameratoscreen);
rastertocamera = screentocamera * rastertoscreen;
ProjectionTransform full_rastertocamera = screentocamera * full_rastertoscreen;
full_rastertocamera = screentocamera * full_rastertoscreen;
cameratoraster = screentoraster * cameratoscreen;
cameratoworld = matrix;
@ -627,7 +629,7 @@ float Camera::world_to_raster_size(float3 P)
if(offscreen_dicing_scale > 1.0f) {
float3 p = transform_point(&worldtocamera, P);
float3 v = transform_perspective(&rastertocamera, make_float3(width, height, 0.0f));
float3 v = transform_perspective(&full_rastertocamera, make_float3(full_width, full_height, 0.0f));
/* Create point clamped to frustum */
float3 c;
@ -644,8 +646,8 @@ float Camera::world_to_raster_size(float3 P)
}
else if(type == CAMERA_PERSPECTIVE) {
/* Calculate as if point is directly ahead of the camera. */
float3 raster = make_float3(0.5f*width, 0.5f*height, 0.0f);
float3 Pcamera = transform_perspective(&rastertocamera, raster);
float3 raster = make_float3(0.5f*full_width, 0.5f*full_height, 0.0f);
float3 Pcamera = transform_perspective(&full_rastertocamera, raster);
/* dDdx */
float3 Ddiff = transform_direction(&cameratoworld, Pcamera);
@ -728,22 +730,21 @@ float Camera::world_to_raster_size(float3 P)
* point directly ahead seems to produce good enough results. */
#if 0
float2 dir = direction_to_panorama(&kernel_camera, kernel_camera_motion.data(), normalize(D));
float3 raster = transform_perspective(&cameratoraster, make_float3(dir.x, dir.y, 0.0f));
float3 raster = transform_perspective(&full_cameratoraster, make_float3(dir.x, dir.y, 0.0f));
ray.t = 1.0f;
camera_sample_panorama(&kernel_camera, kernel_camera_motion.data(), raster.x, raster.y, 0.0f, 0.0f, &ray);
if(ray.t == 0.0f) {
/* No differentials, just use from directly ahead. */
camera_sample_panorama(&kernel_camera, kernel_camera_motion.data(), 0.5f*width, 0.5f*height, 0.0f, 0.0f, &ray);
camera_sample_panorama(&kernel_camera, kernel_camera_motion.data(), 0.5f*full_width, 0.5f*full_height, 0.0f, 0.0f, &ray);
}
#else
camera_sample_panorama(&kernel_camera, kernel_camera_motion.data(), 0.5f*width, 0.5f*height, 0.0f, 0.0f, &ray);
camera_sample_panorama(&kernel_camera, kernel_camera_motion.data(), 0.5f*full_width, 0.5f*full_height, 0.0f, 0.0f, &ray);
#endif
differential_transfer(&ray.dP, ray.dP, ray.D, ray.dD, ray.D, dist);
return max(len(ray.dP.dx) * (float(width)/float(full_width)),
len(ray.dP.dy) * (float(height)/float(full_height)));
return max(len(ray.dP.dx),len(ray.dP.dy));
}
return res;

View File

@ -160,6 +160,8 @@ public:
ProjectionTransform rastertocamera;
ProjectionTransform cameratoraster;
ProjectionTransform full_rastertocamera;
float3 dx;
float3 dy;

View File

@ -2144,6 +2144,9 @@ void MeshManager::device_update(Device *device, DeviceScene *dscene, Scene *scen
/* Tessellate meshes that are using subdivision */
if(total_tess_needed) {
Camera *dicing_camera = scene->dicing_camera;
dicing_camera->update(scene);
size_t i = 0;
foreach(Mesh *mesh, scene->meshes) {
if(mesh->need_update &&
@ -2159,6 +2162,7 @@ void MeshManager::device_update(Device *device, DeviceScene *dscene, Scene *scen
progress.set_status("Updating Mesh", msg);
mesh->subd_params->camera = dicing_camera;
DiagSplit dsplit(*mesh->subd_params);
mesh->tessellate(&dsplit);