Merge branch 'temp-xr-virtual-camera-experiment' into xr-dev

This commit is contained in:
Peter Kim 2022-11-23 13:51:44 +09:00
commit 3e725b55cf
25 changed files with 312 additions and 9 deletions

View File

@ -1216,6 +1216,7 @@ void BKE_nodetree_remove_layer_n(struct bNodeTree *ntree, struct Scene *scene, i
#define SH_NODE_COMBINE_COLOR 711
#define SH_NODE_SEPARATE_COLOR 712
#define SH_NODE_MIX 713
#define SH_NODE_VIRTUAL_MONITOR 714
/** \} */

View File

@ -41,6 +41,8 @@
#include "DEG_depsgraph_query.h"
#include "GPU_viewport.h"
#include "MEM_guardedalloc.h"
#include "BLO_read_write.h"
@ -83,10 +85,21 @@ static void camera_copy_data(Main *UNUSED(bmain), ID *id_dst, const ID *id_src,
}
/** Free (or release) any data used by this camera (does not free the camera itself). */
static void camera_free_runtime_data(Camera *camera)
{
if (camera->runtime.virtual_monitor_offscreen) {
GPU_offscreen_free(camera->runtime.virtual_monitor_offscreen);
camera->runtime.virtual_monitor_offscreen = NULL;
}
/* GPU texture is owned by the GPUOffscreen instance. */
camera->runtime.offscreen_color_texture = NULL;
}
static void camera_free_data(ID *id)
{
Camera *cam = (Camera *)id;
BLI_freelistN(&cam->bg_images);
camera_free_runtime_data(cam);
}
static void camera_foreach_id(ID *id, LibraryForeachIDData *data)
@ -137,6 +150,8 @@ static void camera_blend_read_data(BlendDataReader *reader, ID *id)
bgpic->flag &= ~CAM_BGIMG_FLAG_OVERRIDE_LIBRARY_LOCAL;
}
}
ca->runtime.virtual_monitor_offscreen = NULL;
ca->runtime.offscreen_color_texture = NULL;
}
static void camera_blend_read_lib(BlendLibReader *reader, ID *id)

View File

@ -4578,6 +4578,8 @@ static void registerShaderNodes()
register_node_type_sh_tex_pointdensity();
register_node_type_sh_tex_ies();
register_node_type_sh_tex_white_noise();
register_node_type_sh_virtual_camera();
}
static void registerTextureNodes()

View File

@ -99,6 +99,7 @@ void DRW_draw_render_loop_offscreen(struct Depsgraph *depsgraph,
bool is_image_render,
bool draw_background,
bool do_color_management,
bool is_virtual_camera,
struct GPUOffScreen *ofs,
struct GPUViewport *viewport);
void DRW_draw_render_loop_2d_ex(struct Depsgraph *depsgraph,

View File

@ -861,6 +861,12 @@ void EEVEE_materials_cache_populate(EEVEE_Data *vedata,
continue;
}
/* Virtual cameras can only be used in the main scene. */
if (DRW_state_is_virtual_camera() &&
GPU_material_flag_get(gpumat_array[i], GPU_MATFLAG_VIRTUAL_MONITOR)) {
continue;
}
/* Do not render surface if we are rendering a volume object
* and do not have a surface closure. */
if (use_volume_material &&

View File

@ -885,6 +885,10 @@ bool DRW_state_is_scene_render(void);
*/
bool DRW_state_is_opengl_render(void);
bool DRW_state_is_playback(void);
/**
* Whether we are rendering a virtual camera, false when rendering the main camera/viewport.
*/
bool DRW_state_is_virtual_camera(void);
/**
* Is the user navigating the region.
*/

View File

@ -1803,6 +1803,7 @@ void DRW_draw_render_loop_offscreen(struct Depsgraph *depsgraph,
const bool is_image_render,
const bool draw_background,
const bool do_color_management,
const bool is_virtual_camera,
GPUOffScreen *ofs,
GPUViewport *viewport)
{
@ -1826,6 +1827,7 @@ void DRW_draw_render_loop_offscreen(struct Depsgraph *depsgraph,
drw_state_prepare_clean_for_draw(&DST);
DST.options.is_image_render = is_image_render;
DST.options.draw_background = draw_background;
DST.options.is_virtual_camera = is_virtual_camera;
DRW_draw_render_loop_ex(depsgraph, engine_type, region, v3d, render_viewport, NULL);
if (draw_background) {
@ -2917,6 +2919,11 @@ bool DRW_state_is_opengl_render(void)
return DST.options.is_image_render && !DST.options.is_scene_render;
}
bool DRW_state_is_virtual_camera(void)
{
return DST.options.is_virtual_camera;
}
bool DRW_state_is_playback(void)
{
if (DST.draw_ctx.evil_C != NULL) {

View File

@ -612,6 +612,7 @@ typedef struct DRWManager {
uint is_depth : 1;
uint is_image_render : 1;
uint is_scene_render : 1;
uint is_virtual_camera : 1;
uint draw_background : 1;
uint draw_text : 1;
} options;

View File

@ -1835,6 +1835,11 @@ void DRW_shgroup_add_material_resources(DRWShadingGroup *grp, GPUMaterial *mater
DRW_shgroup_uniform_texture_ex(
grp, tex->sampler_name, *tex->sky, eGPUSamplerState(tex->sampler_state));
}
else if (tex->camera && tex->camera[0]) {
/* VirtualCamera */
DRW_shgroup_uniform_texture_ex(
grp, tex->sampler_name, *tex->camera, eGPUSamplerState(tex->sampler_state));
}
}
GPUUniformBuf *ubo = GPU_material_uniform_buffer_get(material);

View File

@ -39,6 +39,7 @@ void ED_view3d_draw_offscreen(struct Depsgraph *depsgraph,
const char *viewname,
bool do_color_management,
bool restore_rv3d_mats,
bool is_virtual_camera,
struct GPUOffScreen *ofs,
struct GPUViewport *viewport);
/**

View File

@ -432,6 +432,20 @@ static void node_shader_buts_tex_environment(uiLayout *layout, bContext *C, Poin
node_buts_image_user(layout, C, &iuserptr, &imaptr, &iuserptr, false, true);
}
static void node_shader_buts_virtual_monitor(uiLayout *layout, bContext *C, PointerRNA *ptr)
{
uiTemplateID(layout,
C,
ptr,
"camera",
nullptr,
nullptr,
nullptr,
UI_TEMPLATE_ID_FILTER_ALL,
false,
nullptr);
}
static void node_shader_buts_tex_environment_ex(uiLayout *layout, bContext *C, PointerRNA *ptr)
{
PointerRNA iuserptr = RNA_pointer_get(ptr, "image_user");
@ -499,6 +513,9 @@ static void node_shader_set_butfunc(bNodeType *ntype)
ntype->draw_buttons = node_shader_buts_tex_environment;
ntype->draw_buttons_ex = node_shader_buts_tex_environment_ex;
break;
case SH_NODE_VIRTUAL_MONITOR:
ntype->draw_buttons = node_shader_buts_virtual_monitor;
break;
case SH_NODE_DISPLACEMENT:
case SH_NODE_VECTOR_DISPLACEMENT:
ntype->draw_buttons = node_shader_buts_displacement;

View File

@ -7,6 +7,7 @@
#include <cmath>
#include "BLI_float4x4.hh"
#include "BLI_jitter_2d.h"
#include "BLI_listbase.h"
#include "BLI_math.h"
@ -15,6 +16,7 @@
#include "BLI_string.h"
#include "BLI_string_utils.h"
#include "BLI_threads.h"
#include "BLI_vector.hh"
#include "BKE_armature.h"
#include "BKE_camera.h"
@ -26,6 +28,7 @@
#include "BKE_key.h"
#include "BKE_layer.h"
#include "BKE_main.h"
#include "BKE_node.h"
#include "BKE_object.h"
#include "BKE_paint.h"
#include "BKE_scene.h"
@ -40,6 +43,7 @@
#include "DNA_brush_types.h"
#include "DNA_camera_types.h"
#include "DNA_key_types.h"
#include "DNA_material_types.h"
#include "DNA_mesh_types.h"
#include "DNA_object_types.h"
#include "DNA_view3d_types.h"
@ -61,6 +65,7 @@
#include "GPU_batch.h"
#include "GPU_batch_presets.h"
#include "GPU_debug.h"
#include "GPU_framebuffer.h"
#include "GPU_immediate.h"
#include "GPU_immediate_util.h"
@ -87,6 +92,9 @@
#include "view3d_intern.h" /* own include */
using blender::float4;
using blender::float4x4;
using blender::int2;
using blender::Vector;
#define M_GOLDEN_RATIO_CONJUGATE 0.618033988749895f
@ -1535,14 +1543,120 @@ void view3d_draw_region_info(const bContext *C, ARegion *region)
/** \name Draw Viewport Contents
* \{ */
static void view3d_virtual_camera_update(
Scene *scene, Depsgraph *depsgraph, View3D *v3d, ARegion *region, Object *object)
{
BLI_assert(object->type == OB_CAMERA);
int2 resolution(1920 / 2, 1080 / 2);
RegionView3D *old_rv3d = static_cast<RegionView3D *>(region->regiondata);
RegionView3D rv3d;
memcpy(&rv3d, old_rv3d, sizeof(RegionView3D));
region->regiondata = &rv3d;
Object *old_camera = v3d->camera;
v3d->camera = object;
rv3d.persp = RV3D_CAMOB;
Camera *camera = static_cast<Camera *>(object->data);
if (camera->runtime.virtual_monitor_offscreen == nullptr) {
camera->runtime.virtual_monitor_offscreen = GPU_offscreen_create(
UNPACK2(resolution), true, GPU_RGBA16F, nullptr);
}
float4x4 winmat;
// TODO: Multi view support?
CameraParams params;
BKE_camera_params_init(&params);
/* fallback for non camera objects */
params.clip_start = v3d->clip_start;
params.clip_end = v3d->clip_end;
BKE_camera_params_from_object(&params, object);
BKE_camera_params_compute_viewplane(&params, UNPACK2(resolution), scene->r.xasp, scene->r.yasp);
BKE_camera_params_compute_matrix(&params);
copy_m4_m4(winmat.ptr(), params.winmat);
GPUOffScreen *offscreen = camera->runtime.virtual_monitor_offscreen;
GPU_offscreen_bind(offscreen, true);
ED_view3d_draw_offscreen(depsgraph,
scene,
OB_MATERIAL,
v3d,
region,
UNPACK2(resolution),
nullptr,
winmat.ptr(),
false,
true,
nullptr,
false,
true,
true,
offscreen,
nullptr);
GPU_offscreen_unbind(offscreen, true);
camera->runtime.offscreen_color_texture = GPU_offscreen_color_texture(
camera->runtime.virtual_monitor_offscreen);
v3d->camera = old_camera;
region->regiondata = old_rv3d;
}
static void view3d_draw_virtual_camera(Scene *scene,
Depsgraph *depsgraph,
View3D *v3d,
ARegion *region)
{
/* TODO: Bad call! */
Main *bmain = DEG_get_bmain(depsgraph);
/* Collect all cameras in the scene that is used inside a virtual monitor. This should be
* optimized by a tagging system. There are far more materials then cameras in a typical scene.
*/
Vector<Object *> virtual_cameras;
LISTBASE_FOREACH (Material *, material, &bmain->materials) {
if (!material->nodetree) {
continue;
}
LISTBASE_FOREACH (bNode *, node, &material->nodetree->nodes) {
if (node->type != SH_NODE_VIRTUAL_MONITOR) {
continue;
}
Object *ob = static_cast<Object *>(static_cast<void *>(node->id));
if (ob == nullptr || ob->type != OB_CAMERA) {
continue;
}
virtual_cameras.append(ob);
}
}
if (virtual_cameras.is_empty()) {
/* No cameras used as virtual monitor, so skip updating. */
return;
}
GPU_debug_group_begin("VirtualCameras");
for (Object *object : virtual_cameras) {
view3d_virtual_camera_update(scene, depsgraph, v3d, region, object);
}
GPU_debug_group_end();
}
static void view3d_draw_view(const bContext *C, ARegion *region)
{
Scene *scene = CTX_data_scene(C);
Depsgraph *depsgraph = CTX_data_expect_evaluated_depsgraph(C);
View3D *v3d = CTX_wm_view3d(C);
view3d_draw_virtual_camera(scene, depsgraph, v3d, region);
ED_view3d_draw_setup_view(CTX_wm_manager(C),
CTX_wm_window(C),
CTX_data_expect_evaluated_depsgraph(C),
CTX_data_scene(C),
depsgraph,
scene,
region,
CTX_wm_view3d(C),
v3d,
nullptr,
nullptr,
nullptr);
@ -1640,6 +1754,7 @@ void ED_view3d_draw_offscreen(Depsgraph *depsgraph,
const char *viewname,
const bool do_color_management,
const bool restore_rv3d_mats,
const bool is_virtual_camera,
GPUOffScreen *ofs,
GPUViewport *viewport)
{
@ -1715,6 +1830,7 @@ void ED_view3d_draw_offscreen(Depsgraph *depsgraph,
is_image_render,
draw_background,
do_color_management,
is_virtual_camera,
ofs,
viewport);
DRW_cache_free_old_subdiv();
@ -1830,6 +1946,8 @@ void ED_view3d_draw_offscreen_simple(Depsgraph *depsgraph,
/* Actually not used since we pass in the projection matrix. */
v3d.lens = 0;
view3d_draw_virtual_camera(scene, depsgraph, &v3d, &ar);
ED_view3d_draw_offscreen(depsgraph,
scene,
drawtype,
@ -1844,6 +1962,7 @@ void ED_view3d_draw_offscreen_simple(Depsgraph *depsgraph,
viewname,
do_color_management,
true,
false,
ofs,
viewport);
}
@ -1967,6 +2086,7 @@ ImBuf *ED_view3d_draw_offscreen_imbuf(Depsgraph *depsgraph,
viewname,
do_color_management,
restore_rv3d_mats,
false,
ofs,
nullptr);

View File

@ -451,6 +451,7 @@ set(GLSL_SRC
shaders/material/gpu_shader_material_vector_rotate.glsl
shaders/material/gpu_shader_material_velvet.glsl
shaders/material/gpu_shader_material_vertex_color.glsl
shaders/material/gpu_shader_material_virtual_camera.glsl
shaders/material/gpu_shader_material_volume_absorption.glsl
shaders/material/gpu_shader_material_volume_principled.glsl
shaders/material/gpu_shader_material_volume_scatter.glsl

View File

@ -35,6 +35,7 @@ struct Material;
struct Scene;
struct bNode;
struct bNodeTree;
struct Camera;
typedef struct GPUMaterial GPUMaterial;
typedef struct GPUNode GPUNode;
@ -82,6 +83,8 @@ typedef enum eGPUMaterialFlag {
GPU_MATFLAG_OBJECT_INFO = (1 << 10),
GPU_MATFLAG_AOV = (1 << 11),
GPU_MATFLAG_VIRTUAL_MONITOR = (1 << 12),
GPU_MATFLAG_BARYCENTRIC = (1 << 20),
/* Optimization to only add the branches of the principled shader that are necessary. */
@ -178,6 +181,9 @@ GPUNodeLink *GPU_image_sky(GPUMaterial *mat,
const float *pixels,
float *layer,
eGPUSamplerState sampler_state);
GPUNodeLink *GPU_image_camera(GPUMaterial *mat,
struct Camera *camera,
eGPUSamplerState sampler_state);
GPUNodeLink *GPU_color_band(GPUMaterial *mat, int size, float *pixels, float *row);
/**
@ -322,6 +328,7 @@ typedef struct GPUMaterialTexture {
bool iuser_available;
struct GPUTexture **colorband;
struct GPUTexture **sky;
struct GPUTexture **camera;
char sampler_name[32]; /* Name of sampler in GLSL. */
char tiled_mapping_name[32]; /* Name of tile mapping sampler in GLSL. */
int users;

View File

@ -400,6 +400,10 @@ void GPUCodegen::generate_resources()
const char *name = info.name_buffer.append_sampler_name(tex->sampler_name);
info.sampler(0, ImageType::FLOAT_2D_ARRAY, name, Frequency::BATCH);
}
else if (tex->camera && tex->camera[0]) {
const char *name = info.name_buffer.append_sampler_name(tex->sampler_name);
info.sampler(slot++, ImageType::FLOAT_2D, name, Frequency::BATCH);
}
else if (tex->tiled_mapping_name[0] != '\0') {
const char *name = info.name_buffer.append_sampler_name(tex->sampler_name);
info.sampler(slot++, ImageType::FLOAT_2D_ARRAY, name, Frequency::BATCH);

View File

@ -12,6 +12,7 @@
#include "MEM_guardedalloc.h"
#include "DNA_camera_types.h"
#include "DNA_node_types.h"
#include "BLI_ghash.h"
@ -19,6 +20,7 @@
#include "BLI_string.h"
#include "BLI_utildefines.h"
#include "GPU_framebuffer.h"
#include "GPU_texture.h"
#include "GPU_vertex_format.h"
@ -114,6 +116,7 @@ static void gpu_node_input_link(GPUNode *node, GPUNodeLink *link, const eGPUType
link->users++;
break;
case GPU_NODE_LINK_IMAGE:
case GPU_NODE_LINK_IMAGE_CAMERA:
case GPU_NODE_LINK_IMAGE_TILED:
case GPU_NODE_LINK_IMAGE_SKY:
case GPU_NODE_LINK_COLORBAND:
@ -476,6 +479,7 @@ static GPUMaterialTexture *gpu_node_graph_add_texture(GPUNodeGraph *graph,
ImageUser *iuser,
struct GPUTexture **colorband,
struct GPUTexture **sky,
struct GPUTexture **camera,
GPUNodeLinkType link_type,
eGPUSamplerState sampler_state)
{
@ -484,7 +488,7 @@ static GPUMaterialTexture *gpu_node_graph_add_texture(GPUNodeGraph *graph,
GPUMaterialTexture *tex = static_cast<GPUMaterialTexture *>(graph->textures.first);
for (; tex; tex = tex->next) {
if (tex->ima == ima && tex->colorband == colorband && tex->sky == sky &&
tex->sampler_state == sampler_state) {
tex->camera == camera && tex->sampler_state == sampler_state) {
break;
}
num_textures++;
@ -500,6 +504,7 @@ static GPUMaterialTexture *gpu_node_graph_add_texture(GPUNodeGraph *graph,
}
tex->colorband = colorband;
tex->sky = sky;
tex->camera = camera;
tex->sampler_state = sampler_state;
BLI_snprintf(tex->sampler_name, sizeof(tex->sampler_name), "samp%d", num_textures);
if (ELEM(link_type, GPU_NODE_LINK_IMAGE_TILED, GPU_NODE_LINK_IMAGE_TILED_MAPPING)) {
@ -631,7 +636,24 @@ GPUNodeLink *GPU_image(GPUMaterial *mat,
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_IMAGE;
link->texture = gpu_node_graph_add_texture(
graph, ima, iuser, nullptr, nullptr, link->link_type, sampler_state);
graph, ima, iuser, nullptr, nullptr, nullptr, link->link_type, sampler_state);
return link;
}
GPUNodeLink *GPU_image_camera(GPUMaterial *mat, Camera *camera, eGPUSamplerState sampler_state)
{
GPUNodeGraph *graph = gpu_material_node_graph(mat);
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_IMAGE_CAMERA;
link->texture = gpu_node_graph_add_texture(graph,
nullptr,
nullptr,
nullptr,
nullptr,
&camera->runtime.offscreen_color_texture,
link->link_type,
sampler_state);
GPU_material_flag_set(mat, GPU_MATFLAG_VIRTUAL_MONITOR);
return link;
}
@ -648,7 +670,7 @@ GPUNodeLink *GPU_image_sky(GPUMaterial *mat,
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_IMAGE_SKY;
link->texture = gpu_node_graph_add_texture(
graph, nullptr, nullptr, nullptr, sky, link->link_type, sampler_state);
graph, nullptr, nullptr, nullptr, sky, nullptr, link->link_type, sampler_state);
return link;
}
@ -661,7 +683,7 @@ GPUNodeLink *GPU_image_tiled(GPUMaterial *mat,
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_IMAGE_TILED;
link->texture = gpu_node_graph_add_texture(
graph, ima, iuser, nullptr, nullptr, link->link_type, sampler_state);
graph, ima, iuser, nullptr, nullptr, nullptr, link->link_type, sampler_state);
return link;
}
@ -671,7 +693,7 @@ GPUNodeLink *GPU_image_tiled_mapping(GPUMaterial *mat, Image *ima, ImageUser *iu
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_IMAGE_TILED_MAPPING;
link->texture = gpu_node_graph_add_texture(
graph, ima, iuser, nullptr, nullptr, link->link_type, GPU_SAMPLER_MAX);
graph, ima, iuser, nullptr, nullptr, nullptr, link->link_type, GPU_SAMPLER_MAX);
return link;
}
@ -684,7 +706,7 @@ GPUNodeLink *GPU_color_band(GPUMaterial *mat, int size, float *pixels, float *ro
GPUNodeLink *link = gpu_node_link_create();
link->link_type = GPU_NODE_LINK_COLORBAND;
link->texture = gpu_node_graph_add_texture(
graph, nullptr, nullptr, colorband, nullptr, link->link_type, GPU_SAMPLER_MAX);
graph, nullptr, nullptr, colorband, nullptr, nullptr, link->link_type, GPU_SAMPLER_MAX);
return link;
}

View File

@ -50,6 +50,7 @@ typedef enum {
GPU_NODE_LINK_IMAGE_TILED,
GPU_NODE_LINK_IMAGE_TILED_MAPPING,
GPU_NODE_LINK_IMAGE_SKY,
GPU_NODE_LINK_IMAGE_CAMERA,
GPU_NODE_LINK_OUTPUT,
GPU_NODE_LINK_UNIFORM,
GPU_NODE_LINK_DIFFERENTIATE_FLOAT_FN,

View File

@ -0,0 +1,11 @@
void node_virtual_camera_empty(vec3 co, out vec4 color, out float alpha)
{
color = vec4(0.0);
alpha = 0.0;
}
void node_virtual_camera(vec3 co, sampler2D ima, out vec4 color, out float alpha)
{
color = texture(ima, co.xy);
alpha = color.a;
}

View File

@ -71,6 +71,10 @@ typedef struct Camera_Runtime {
float drw_depth[2];
float drw_focusmat[4][4];
float drw_normalmat[4][4];
struct GPUOffScreen *virtual_monitor_offscreen;
/* Local reference to not owning gpu texture. Used to have a reliable pointer to the texture. */
struct GPUTexture *offscreen_color_texture;
} Camera_Runtime;
typedef struct Camera {

View File

@ -5920,6 +5920,22 @@ static void def_sh_tex_coord(StructRNA *srna)
RNA_def_property_update(prop, NC_NODE | NA_EDITED, "rna_Node_update");
}
static void def_sh_virtual_monitor(StructRNA *srna)
{
PropertyRNA *prop;
prop = RNA_def_property(srna, "camera", PROP_POINTER, PROP_NONE);
RNA_def_property_flag(prop, PROP_EDITABLE);
RNA_def_property_override_flag(prop, PROPOVERRIDE_OVERRIDABLE_LIBRARY);
RNA_def_property_pointer_sdna(prop, NULL, "id");
RNA_def_property_pointer_funcs(prop, NULL, NULL, NULL, "rna_Camera_object_poll");
RNA_def_property_struct_type(prop, "Object");
RNA_def_property_ui_text(prop, "Camera", "");
RNA_def_property_update(prop, NC_NODE | NA_EDITED, "rna_Node_update");
/* TODO: add poll function show all cameras in active scene and view layer, excluding scene
* camera. */
}
static void def_sh_vect_transform(StructRNA *srna)
{
static const EnumPropertyItem prop_vect_type_items[] = {

View File

@ -129,6 +129,8 @@ void register_node_type_sh_bump(void);
void register_node_type_sh_tex_ies(void);
void register_node_type_sh_tex_white_noise(void);
void register_node_type_sh_virtual_camera(void);
void register_node_type_sh_custom_group(bNodeType *ntype);
struct bNodeTreeExec *ntreeShaderBeginExecTree(struct bNodeTree *ntree);

View File

@ -123,6 +123,7 @@ DefNode(ShaderNode, SH_NODE_CURVE_FLOAT, def_float_curve, "CUR
DefNode(ShaderNode, SH_NODE_COMBINE_COLOR, def_sh_combsep_color, "COMBINE_COLOR", CombineColor, "Combine Color", "Create a color from individual components using multiple models")
DefNode(ShaderNode, SH_NODE_SEPARATE_COLOR, def_sh_combsep_color, "SEPARATE_COLOR", SeparateColor, "Separate Color", "Split a color into its individual components using multiple models")
DefNode(ShaderNode, SH_NODE_MIX, def_sh_mix, "MIX", Mix, "Mix", "Mix values by a factor")
DefNode(ShaderNode, SH_NODE_VIRTUAL_MONITOR, def_sh_virtual_monitor, "VIRTUAL_MONITOR", VirtualMonitor, "Virtual Monitor", "Use the feed from a virtual camera (has to be in the same scene and view layer)")
DefNode(CompositorNode, CMP_NODE_VIEWER, def_cmp_viewer, "VIEWER", Viewer, "Viewer", "" )
DefNode(CompositorNode, CMP_NODE_RGB, 0, "RGB", RGB, "RGB", "" )

View File

@ -113,6 +113,7 @@ set(SRC
nodes/node_shader_vector_rotate.cc
nodes/node_shader_vector_transform.cc
nodes/node_shader_vertex_color.cc
nodes/node_shader_virtual_monitor.cc
nodes/node_shader_volume_absorption.cc
nodes/node_shader_volume_info.cc
nodes/node_shader_volume_principled.cc

View File

@ -0,0 +1,52 @@
/* SPDX-License-Identifier: GPL-2.0-or-later
* Copyright 2005 Blender Foundation. All rights reserved. */
#include "node_shader_util.hh"
#include "DNA_camera_types.h"
#include "DEG_depsgraph_query.h"
namespace blender::nodes::node_shader_virtual_monitor_cc {
static void sh_node_virtual_monitor_declare(NodeDeclarationBuilder &b)
{
b.add_input<decl::Vector>(N_("Vector")).implicit_field(implicit_field_inputs::position);
b.add_output<decl::Color>(N_("Color")).no_muted_links();
b.add_output<decl::Float>(N_("Alpha")).no_muted_links();
}
static int node_shader_gpu_virtual_monitor(GPUMaterial *mat,
bNode *node,
bNodeExecData * /*execdata*/,
GPUNodeStack *in,
GPUNodeStack *out)
{
Object *object = (Object *)node->id;
if (object == nullptr || object->type != OB_CAMERA) {
return GPU_stack_link(mat, node, "node_virtual_camera_empty", in, out);
}
Object *orig_object = DEG_get_original_object(object);
Camera *cam = static_cast<Camera *>(orig_object->data);
node_shader_gpu_default_tex_coord(mat, node, &in[0].link);
return GPU_stack_link(
mat, node, "node_virtual_camera", in, out, GPU_image_camera(mat, cam, GPU_SAMPLER_DEFAULT));
}
} // namespace blender::nodes::node_shader_virtual_monitor_cc
void register_node_type_sh_virtual_camera()
{
namespace file_ns = blender::nodes::node_shader_virtual_monitor_cc;
static bNodeType ntype;
sh_node_type_base(&ntype, SH_NODE_VIRTUAL_MONITOR, "Virtual Monitor", NODE_CLASS_INPUT);
ntype.declare = file_ns::sh_node_virtual_monitor_declare;
ntype.gpu_fn = file_ns::node_shader_gpu_virtual_monitor;
nodeRegisterType(&ntype);
}

View File

@ -398,6 +398,7 @@ static PyObject *pygpu_offscreen_draw_view3d(BPyGPUOffScreen *self, PyObject *ar
"",
do_color_management,
true,
false,
self->ofs,
self->viewport);