Cycles: add offscreen dicing scale and dicing camera.

The offscreen dicing scale helps to significantly reduce memory usage,
by reducing the dicing rate for objects the further they are outside of
the camera view.

The dicing camera can be specified now, to keep the geometry fixed and
avoid crawling artifacts in animation. It is also useful for debugging,
to see the tesselation from a different camera location.

Differential Revision: https://developer.blender.org/D2891
This commit is contained in:
Mai Lavelle 2018-01-12 00:50:34 +01:00 committed by Brecht Van Lommel
parent 2dbcc17897
commit cce280dd67
8 changed files with 179 additions and 25 deletions

View File

@ -388,6 +388,23 @@ class CyclesRenderSettings(bpy.types.PropertyGroup):
default=12,
)
cls.dicing_camera = PointerProperty(
name="Dicing Camera",
description="Camera to use as reference point when subdividing geometry, useful to avoid crawling "
"artifacts in animations when the scene camera is moving",
type=bpy.types.Object,
poll=lambda self, obj: obj.type == 'CAMERA',
)
cls.offscreen_dicing_scale = FloatProperty(
name="Offscreen Dicing Scale",
description="Multiplier for dicing rate of geometry outside of the camera view. The dicing rate "
"of objects is gradually increased the further they are outside the camera view. "
"Lower values provide higher quality reflections and shadows for off screen objects, "
"while higher values use less memory",
min=1.0, soft_max=25.0,
default=4.0,
)
cls.film_exposure = FloatProperty(
name="Exposure",
description="Image brightness scale",

View File

@ -225,31 +225,32 @@ class CYCLES_RENDER_PT_geometry(CyclesButtonsPanel, Panel):
cscene = scene.cycles
ccscene = scene.cycles_curves
row = layout.row()
row.label("Volume Sampling:")
row = layout.row()
row.prop(cscene, "volume_step_size")
row.prop(cscene, "volume_max_steps")
layout.separator()
if cscene.feature_set == 'EXPERIMENTAL':
layout.label("Subdivision Rate:")
split = layout.split()
col = split.column()
sub = col.column(align=True)
sub.label("Volume Sampling:")
sub.prop(cscene, "volume_step_size")
sub.prop(cscene, "volume_max_steps")
col = split.column()
sub = col.column(align=True)
sub.label("Subdivision Rate:")
sub.prop(cscene, "dicing_rate", text="Render")
sub.prop(cscene, "preview_dicing_rate", text="Preview")
sub.separator()
sub.prop(cscene, "max_subdivisions")
else:
row = layout.row()
row.label("Volume Sampling:")
row = layout.row()
row.prop(cscene, "volume_step_size")
row.prop(cscene, "volume_max_steps")
col = split.column()
col.prop(cscene, "offscreen_dicing_scale", text="Offscreen Scale")
col.prop(cscene, "max_subdivisions")
layout.prop(cscene, "dicing_camera")
layout.separator()
layout.label("Hair:")
layout.prop(ccscene, "use_curves", text="Use Hair")
col = layout.column()
col.active = ccscene.use_curves

View File

@ -81,6 +81,8 @@ struct BlenderCamera {
BoundBox2D viewport_camera_border;
Transform matrix;
float offscreen_dicing_scale;
};
static void blender_camera_init(BlenderCamera *bcam,
@ -104,6 +106,7 @@ static void blender_camera_init(BlenderCamera *bcam,
bcam->pano_viewplane.top = 1.0f;
bcam->viewport_camera_border.right = 1.0f;
bcam->viewport_camera_border.top = 1.0f;
bcam->offscreen_dicing_scale = 1.0f;
/* render resolution */
bcam->full_width = render_resolution_x(b_render);
@ -353,7 +356,11 @@ static void blender_camera_viewplane(BlenderCamera *bcam,
}
}
static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int height, const char *viewname)
static void blender_camera_sync(Camera *cam,
BlenderCamera *bcam,
int width, int height,
const char *viewname,
PointerRNA *cscene)
{
/* copy camera to compare later */
Camera prevcam = *cam;
@ -466,6 +473,9 @@ static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int
cam->border = bcam->border;
cam->viewport_camera_border = bcam->viewport_camera_border;
bcam->offscreen_dicing_scale = RNA_float_get(cscene, "offscreen_dicing_scale");
cam->offscreen_dicing_scale = bcam->offscreen_dicing_scale;
/* set update flag */
if(cam->modified(prevcam))
cam->tag_update();
@ -525,7 +535,21 @@ void BlenderSync::sync_camera(BL::RenderSettings& b_render,
/* sync */
Camera *cam = scene->camera;
blender_camera_sync(cam, &bcam, width, height, viewname);
blender_camera_sync(cam, &bcam, width, height, viewname, &cscene);
/* dicing camera */
b_ob = BL::Object(RNA_pointer_get(&cscene, "dicing_camera"));
if(b_ob) {
BL::Array<float, 16> b_ob_matrix;
blender_camera_from_object(&bcam, b_engine, b_ob);
b_engine.camera_model_matrix(b_ob, bcam.use_spherical_stereo, b_ob_matrix);
bcam.matrix = get_transform(b_ob_matrix);
blender_camera_sync(scene->dicing_camera, &bcam, width, height, viewname, &cscene);
}
else {
*scene->dicing_camera = *cam;
}
}
void BlenderSync::sync_camera_motion(BL::RenderSettings& b_render,
@ -818,7 +842,22 @@ void BlenderSync::sync_view(BL::SpaceView3D& b_v3d,
b_v3d,
b_rv3d,
width, height);
blender_camera_sync(scene->camera, &bcam, width, height, "");
PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
blender_camera_sync(scene->camera, &bcam, width, height, "", &cscene);
/* dicing camera */
BL::Object b_ob = BL::Object(RNA_pointer_get(&cscene, "dicing_camera"));
if(b_ob) {
BL::Array<float, 16> b_ob_matrix;
blender_camera_from_object(&bcam, b_engine, b_ob);
b_engine.camera_model_matrix(b_ob, bcam.use_spherical_stereo, b_ob_matrix);
bcam.matrix = get_transform(b_ob_matrix);
blender_camera_sync(scene->dicing_camera, &bcam, width, height, "", &cscene);
}
else {
*scene->dicing_camera = *scene->camera;
}
}
BufferParams BlenderSync::get_buffer_params(BL::RenderSettings& b_render,

View File

@ -920,8 +920,8 @@ static void create_subd_mesh(Scene *scene,
sdparams.dicing_rate = max(0.1f, RNA_float_get(&cobj, "dicing_rate") * dicing_rate);
sdparams.max_level = max_subdivisions;
scene->camera->update();
sdparams.camera = scene->camera;
scene->dicing_camera->update();
sdparams.camera = scene->dicing_camera;
sdparams.objecttoworld = get_transform(b_ob.matrix_world());
}

View File

@ -128,6 +128,8 @@ NODE_DEFINE(Camera)
SOCKET_FLOAT(border.bottom, "Border Bottom", 0);
SOCKET_FLOAT(border.top, "Border Top", 0);
SOCKET_FLOAT(offscreen_dicing_scale, "Offscreen Dicing Scale", 1.0f);
return type;
}
@ -273,6 +275,13 @@ void Camera::update()
full_dx = transform_direction(&cameratoworld, full_dx);
full_dy = transform_direction(&cameratoworld, full_dy);
if(type == CAMERA_PERSPECTIVE) {
float3 v = transform_perspective(&full_rastertocamera, make_float3(full_width, full_height, 1.0f));
frustum_right_normal = normalize(make_float3(v.z, 0.0f, -v.x));
frustum_top_normal = normalize(make_float3(0.0f, v.z, -v.y));
}
/* TODO(sergey): Support other types of camera. */
if(type == CAMERA_PERSPECTIVE) {
/* TODO(sergey): Move to an utility function and de-duplicate with
@ -581,8 +590,27 @@ BoundBox Camera::viewplane_bounds_get()
float Camera::world_to_raster_size(float3 P)
{
float res = 1.0f;
if(type == CAMERA_ORTHOGRAPHIC) {
return min(len(full_dx), len(full_dy));
res = min(len(full_dx), len(full_dy));
if(offscreen_dicing_scale > 1.0f) {
float3 p = transform_perspective(&worldtocamera, P);
float3 v = transform_perspective(&rastertocamera, make_float3(width, height, 0.0f));
/* Create point clamped to frustum */
float3 c;
c.x = max(-v.x, min(v.x, p.x));
c.y = max(-v.y, min(v.y, p.y));
c.z = max(0.0f, p.z);
float f_dist = len(p - c) / sqrtf((v.x*v.x+v.y*v.y)*0.5f);
if(f_dist > 0.0f) {
res += res * f_dist * (offscreen_dicing_scale - 1.0f);
}
}
}
else if(type == CAMERA_PERSPECTIVE) {
/* Calculate as if point is directly ahead of the camera. */
@ -597,14 +625,74 @@ float Camera::world_to_raster_size(float3 P)
/* dPdx */
float dist = len(transform_point(&worldtocamera, P));
float3 D = normalize(Ddiff);
return len(dist*dDdx - dot(dist*dDdx, D)*D);
res = len(dist*dDdx - dot(dist*dDdx, D)*D);
/* Decent approx distance to frustum (doesn't handle corners correctly, but not that big of a deal) */
float f_dist = 0.0f;
if(offscreen_dicing_scale > 1.0f) {
float3 p = transform_point(&worldtocamera, P);
/* Distance from the four planes */
float r = dot(p, frustum_right_normal);
float t = dot(p, frustum_top_normal);
p = make_float3(-p.x, -p.y, p.z);
float l = dot(p, frustum_right_normal);
float b = dot(p, frustum_top_normal);
p = make_float3(-p.x, -p.y, p.z);
if(r <= 0.0f && l <= 0.0f && t <= 0.0f && b <= 0.0f) {
/* Point is inside frustum */
f_dist = 0.0f;
}
else if(r > 0.0f && l > 0.0f && t > 0.0f && b > 0.0f) {
/* Point is behind frustum */
f_dist = len(p);
}
else {
/* Point may be behind or off to the side, need to check */
float3 along_right = make_float3(-frustum_right_normal.z, 0.0f, frustum_right_normal.x);
float3 along_left = make_float3(frustum_right_normal.z, 0.0f, frustum_right_normal.x);
float3 along_top = make_float3(0.0f, -frustum_top_normal.z, frustum_top_normal.y);
float3 along_bottom = make_float3(0.0f, frustum_top_normal.z, frustum_top_normal.y);
float dist[] = {r, l, t, b};
float3 along[] = {along_right, along_left, along_top, along_bottom};
bool test_o = false;
float *d = dist;
float3 *a = along;
for(int i = 0; i < 4; i++, d++, a++) {
/* Test if we should check this side at all */
if(*d > 0.0f) {
if(dot(p, *a) >= 0.0f) {
/* We are in front of the back edge of this side of the frustum */
f_dist = max(f_dist, *d);
}
else {
/* Possibly far enough behind the frustum to use distance to origin instead of edge */
test_o = true;
}
}
}
if(test_o) {
f_dist = (f_dist > 0) ? min(f_dist, len(p)) : len(p);
}
}
if(f_dist > 0.0f) {
res += len(dDdx - dot(dDdx, D)*D) * f_dist * (offscreen_dicing_scale - 1.0f);
}
}
}
else {
// TODO(mai): implement for CAMERA_PANORAMA
assert(!"pixel width calculation for panoramic projection not implemented yet");
}
return 1.0f;
return res;
}
CCL_NAMESPACE_END

View File

@ -129,6 +129,8 @@ public:
BoundBox2D viewplane;
/* width and height change during preview, so we need these for calculating dice rates. */
int full_width, full_height;
/* controls how fast the dicing rate falls off for geometry out side of view */
float offscreen_dicing_scale;
/* border */
BoundBox2D border;
@ -163,6 +165,9 @@ public:
float3 full_dx;
float3 full_dy;
float3 frustum_right_normal;
float3 frustum_top_normal;
/* update */
bool need_update;
bool need_device_update;

View File

@ -85,6 +85,7 @@ Scene::Scene(const SceneParams& params_, Device *device)
memset(&dscene.data, 0, sizeof(dscene.data));
camera = new Camera();
dicing_camera = new Camera();
lookup_tables = new LookupTables();
film = new Film();
background = new Background();
@ -155,6 +156,7 @@ void Scene::free_memory(bool final)
if(final) {
delete lookup_tables;
delete camera;
delete dicing_camera;
delete film;
delete background;
delete integrator;
@ -359,6 +361,7 @@ void Scene::reset()
/* ensure all objects are updated */
camera->tag_update();
dicing_camera->tag_update();
film->tag_update(this);
background->tag_update(this);
integrator->tag_update(this);

View File

@ -165,6 +165,7 @@ class Scene {
public:
/* data */
Camera *camera;
Camera *dicing_camera;
LookupTables *lookup_tables;
Film *film;
Background *background;