Blender  V3.3
sync.cpp
Go to the documentation of this file.
1 /* SPDX-License-Identifier: Apache-2.0
2  * Copyright 2011-2022 Blender Foundation */
3 
4 #include "scene/background.h"
5 #include "scene/camera.h"
6 #include "scene/curves.h"
7 #include "scene/film.h"
8 #include "scene/integrator.h"
9 #include "scene/light.h"
10 #include "scene/mesh.h"
11 #include "scene/object.h"
12 #include "scene/procedural.h"
13 #include "scene/scene.h"
14 #include "scene/shader.h"
15 #include "scene/shader_graph.h"
16 #include "scene/shader_nodes.h"
17 
18 #include "device/device.h"
19 
20 #include "blender/device.h"
21 #include "blender/session.h"
22 #include "blender/sync.h"
23 #include "blender/util.h"
24 
25 #include "util/debug.h"
26 #include "util/foreach.h"
27 #include "util/hash.h"
28 #include "util/log.h"
29 #include "util/opengl.h"
30 #include "util/openimagedenoise.h"
31 
33 
34 static const char *cryptomatte_prefix = "Crypto";
35 
36 /* Constructor */
37 
39  BL::BlendData &b_data,
40  BL::Scene &b_scene,
41  Scene *scene,
42  bool preview,
43  bool use_developer_ui,
44  Progress &progress)
45  : b_engine(b_engine),
46  b_data(b_data),
47  b_scene(b_scene),
48  shader_map(scene),
49  object_map(scene),
50  procedural_map(scene),
51  geometry_map(scene),
52  light_map(scene),
53  particle_system_map(scene),
54  world_map(NULL),
55  world_recalc(false),
56  scene(scene),
58  experimental(false),
59  use_developer_ui(use_developer_ui),
60  dicing_rate(1.0f),
61  max_subdivisions(12),
62  progress(progress),
63  has_updates_(true)
64 {
65  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
66  dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
67  RNA_float_get(&cscene, "dicing_rate");
68  max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
69 }
70 
72 {
73 }
74 
75 void BlenderSync::reset(BL::BlendData &b_data, BL::Scene &b_scene)
76 {
77  /* Update data and scene pointers in case they change in session reset,
78  * for example after undo.
79  * Note that we do not modify the `has_updates_` flag here because the sync
80  * reset is also used during viewport navigation. */
81  this->b_data = b_data;
82  this->b_scene = b_scene;
83 }
84 
86 {
87  has_updates_ = true;
88 }
89 
90 /* Sync */
91 
92 void BlenderSync::sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d)
93 {
94  /* Sync recalc flags from blender to cycles. Actual update is done separate,
95  * so we can do it later on if doing it immediate is not suitable. */
96 
97  if (experimental) {
98  /* Mark all meshes as needing to be exported again if dicing changed. */
99  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
100  bool dicing_prop_changed = false;
101 
102  float updated_dicing_rate = preview ? RNA_float_get(&cscene, "preview_dicing_rate") :
103  RNA_float_get(&cscene, "dicing_rate");
104 
105  if (dicing_rate != updated_dicing_rate) {
106  dicing_rate = updated_dicing_rate;
107  dicing_prop_changed = true;
108  }
109 
110  int updated_max_subdivisions = RNA_int_get(&cscene, "max_subdivisions");
111 
112  if (max_subdivisions != updated_max_subdivisions) {
113  max_subdivisions = updated_max_subdivisions;
114  dicing_prop_changed = true;
115  }
116 
117  if (dicing_prop_changed) {
118  has_updates_ = true;
119 
120  for (const pair<const GeometryKey, Geometry *> &iter : geometry_map.key_to_scene_data()) {
121  Geometry *geom = iter.second;
122  if (geom->is_mesh()) {
123  Mesh *mesh = static_cast<Mesh *>(geom);
124  if (mesh->get_subdivision_type() != Mesh::SUBDIVISION_NONE) {
125  PointerRNA id_ptr;
126  RNA_id_pointer_create((::ID *)iter.first.id, &id_ptr);
127  geometry_map.set_recalc(BL::ID(id_ptr));
128  }
129  }
130  }
131  }
132  }
133 
134  /* Iterate over all IDs in this depsgraph. */
135  for (BL::DepsgraphUpdate &b_update : b_depsgraph.updates) {
136  /* TODO(sergey): Can do more selective filter here. For example, ignore changes made to
137  * screen datablock. Note that sync_data() needs to be called after object deletion, and
138  * currently this is ensured by the scene ID tagged for update, which sets the `has_updates_`
139  * flag. */
140  has_updates_ = true;
141 
142  BL::ID b_id(b_update.id());
143 
144  /* Material */
145  if (b_id.is_a(&RNA_Material)) {
146  BL::Material b_mat(b_id);
147  shader_map.set_recalc(b_mat);
148  }
149  /* Light */
150  else if (b_id.is_a(&RNA_Light)) {
151  BL::Light b_light(b_id);
152  shader_map.set_recalc(b_light);
153  }
154  /* Object */
155  else if (b_id.is_a(&RNA_Object)) {
156  BL::Object b_ob(b_id);
157  const bool can_have_geometry = object_can_have_geometry(b_ob);
158  const bool is_light = !can_have_geometry && object_is_light(b_ob);
159 
160  if (b_ob.is_instancer() && b_update.is_updated_shading()) {
161  /* Needed for e.g. object color updates on instancer. */
162  object_map.set_recalc(b_ob);
163  }
164 
165  if (can_have_geometry || is_light) {
166  const bool updated_geometry = b_update.is_updated_geometry();
167 
168  /* Geometry (mesh, hair, volume). */
169  if (can_have_geometry) {
170  if (b_update.is_updated_transform() || b_update.is_updated_shading()) {
171  object_map.set_recalc(b_ob);
172  }
173 
174  if (updated_geometry ||
175  (object_subdivision_type(b_ob, preview, experimental) != Mesh::SUBDIVISION_NONE)) {
176  BL::ID key = BKE_object_is_modified(b_ob) ? b_ob : b_ob.data();
177  geometry_map.set_recalc(key);
178 
179  /* Sync all contained geometry instances as well when the object changed.. */
180  map<void *, set<BL::ID>>::const_iterator instance_geometries =
181  instance_geometries_by_object.find(b_ob.ptr.data);
182  if (instance_geometries != instance_geometries_by_object.end()) {
183  for (BL::ID geometry : instance_geometries->second) {
184  geometry_map.set_recalc(geometry);
185  }
186  }
187  }
188 
189  if (updated_geometry) {
190  BL::Object::particle_systems_iterator b_psys;
191  for (b_ob.particle_systems.begin(b_psys); b_psys != b_ob.particle_systems.end();
192  ++b_psys) {
193  particle_system_map.set_recalc(b_ob);
194  }
195  }
196  }
197  /* Light */
198  else if (is_light) {
199  if (b_update.is_updated_transform() || b_update.is_updated_shading()) {
200  object_map.set_recalc(b_ob);
201  light_map.set_recalc(b_ob);
202  }
203 
204  if (updated_geometry) {
205  light_map.set_recalc(b_ob);
206  }
207  }
208  }
209  }
210  /* Mesh */
211  else if (b_id.is_a(&RNA_Mesh)) {
212  BL::Mesh b_mesh(b_id);
213  geometry_map.set_recalc(b_mesh);
214  }
215  /* World */
216  else if (b_id.is_a(&RNA_World)) {
217  BL::World b_world(b_id);
218  if (world_map == b_world.ptr.data) {
219  world_recalc = true;
220  }
221  }
222  /* Volume */
223  else if (b_id.is_a(&RNA_Volume)) {
224  BL::Volume b_volume(b_id);
225  geometry_map.set_recalc(b_volume);
226  }
227  }
228 
229  if (b_v3d) {
230  BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
231 
232  if (viewport_parameters.shader_modified(new_viewport_parameters)) {
233  world_recalc = true;
234  has_updates_ = true;
235  }
236 
237  has_updates_ |= viewport_parameters.modified(new_viewport_parameters);
238  }
239 }
240 
241 void BlenderSync::sync_data(BL::RenderSettings &b_render,
242  BL::Depsgraph &b_depsgraph,
243  BL::SpaceView3D &b_v3d,
244  BL::Object &b_override,
245  int width,
246  int height,
247  void **python_thread_state)
248 {
249  /* For auto refresh images. */
250  ImageManager *image_manager = scene->image_manager;
251  const int frame = b_scene.frame_current();
252  const bool auto_refresh_update = image_manager->set_animation_frame_update(frame);
253 
254  if (!has_updates_ && !auto_refresh_update) {
255  return;
256  }
257 
258  scoped_timer timer;
259 
260  BL::ViewLayer b_view_layer = b_depsgraph.view_layer_eval();
261 
262  /* TODO(sergey): This feels weak to pass view layer to the integrator, and even weaker to have an
263  * implicit check on whether it is a background render or not. What is the nicer thing here? */
264  const bool background = !b_v3d;
265 
266  sync_view_layer(b_view_layer);
267  sync_integrator(b_view_layer, background);
268  sync_film(b_view_layer, b_v3d);
269  sync_shaders(b_depsgraph, b_v3d, auto_refresh_update);
270  sync_images();
271 
272  geometry_synced.clear(); /* use for objects and motion sync */
273 
274  if (scene->need_motion() == Scene::MOTION_PASS || scene->need_motion() == Scene::MOTION_NONE ||
275  scene->camera->get_motion_position() == MOTION_POSITION_CENTER) {
276  sync_objects(b_depsgraph, b_v3d);
277  }
278  sync_motion(b_render, b_depsgraph, b_v3d, b_override, width, height, python_thread_state);
279 
280  geometry_synced.clear();
281 
282  /* Shader sync done at the end, since object sync uses it.
283  * false = don't delete unused shaders, not supported. */
284  shader_map.post_sync(false);
285 
286  free_data_after_sync(b_depsgraph);
287 
288  VLOG_INFO << "Total time spent synchronizing data: " << timer.get_time();
289 
290  has_updates_ = false;
291 }
292 
293 /* Integrator */
294 
295 void BlenderSync::sync_integrator(BL::ViewLayer &b_view_layer, bool background)
296 {
297  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
298 
299  experimental = (get_enum(cscene, "feature_set") != 0);
300 
301  Integrator *integrator = scene->integrator;
302 
303  integrator->set_min_bounce(get_int(cscene, "min_light_bounces"));
304  integrator->set_max_bounce(get_int(cscene, "max_bounces"));
305 
306  integrator->set_max_diffuse_bounce(get_int(cscene, "diffuse_bounces"));
307  integrator->set_max_glossy_bounce(get_int(cscene, "glossy_bounces"));
308  integrator->set_max_transmission_bounce(get_int(cscene, "transmission_bounces"));
309  integrator->set_max_volume_bounce(get_int(cscene, "volume_bounces"));
310 
311  integrator->set_transparent_min_bounce(get_int(cscene, "min_transparent_bounces"));
312  integrator->set_transparent_max_bounce(get_int(cscene, "transparent_max_bounces"));
313 
314  integrator->set_volume_max_steps(get_int(cscene, "volume_max_steps"));
315  float volume_step_rate = (preview) ? get_float(cscene, "volume_preview_step_rate") :
316  get_float(cscene, "volume_step_rate");
317  integrator->set_volume_step_rate(volume_step_rate);
318 
319  integrator->set_caustics_reflective(get_boolean(cscene, "caustics_reflective"));
320  integrator->set_caustics_refractive(get_boolean(cscene, "caustics_refractive"));
321  integrator->set_filter_glossy(get_float(cscene, "blur_glossy"));
322 
323  int seed = get_int(cscene, "seed");
324  if (get_boolean(cscene, "use_animated_seed")) {
325  seed = hash_uint2(b_scene.frame_current(), get_int(cscene, "seed"));
326  if (b_scene.frame_subframe() != 0.0f) {
327  /* TODO(sergey): Ideally should be some sort of hash_merge,
328  * but this is good enough for now.
329  */
330  seed += hash_uint2((int)(b_scene.frame_subframe() * (float)INT_MAX),
331  get_int(cscene, "seed"));
332  }
333  }
334 
335  integrator->set_seed(seed);
336 
337  integrator->set_sample_clamp_direct(get_float(cscene, "sample_clamp_direct"));
338  integrator->set_sample_clamp_indirect(get_float(cscene, "sample_clamp_indirect"));
339  if (!preview) {
340  integrator->set_motion_blur(view_layer.use_motion_blur);
341  }
342 
343  integrator->set_light_sampling_threshold(get_float(cscene, "light_sampling_threshold"));
344 
345  SamplingPattern sampling_pattern = (SamplingPattern)get_enum(
346  cscene, "sampling_pattern", SAMPLING_NUM_PATTERNS, SAMPLING_PATTERN_SOBOL);
347  integrator->set_sampling_pattern(sampling_pattern);
348 
349  int samples = 1;
350  bool use_adaptive_sampling = false;
351  if (preview) {
352  samples = get_int(cscene, "preview_samples");
353  use_adaptive_sampling = RNA_boolean_get(&cscene, "use_preview_adaptive_sampling");
354  integrator->set_use_adaptive_sampling(use_adaptive_sampling);
355  integrator->set_adaptive_threshold(get_float(cscene, "preview_adaptive_threshold"));
356  integrator->set_adaptive_min_samples(get_int(cscene, "preview_adaptive_min_samples"));
357  }
358  else {
359  samples = get_int(cscene, "samples");
360  use_adaptive_sampling = RNA_boolean_get(&cscene, "use_adaptive_sampling");
361  integrator->set_use_adaptive_sampling(use_adaptive_sampling);
362  integrator->set_adaptive_threshold(get_float(cscene, "adaptive_threshold"));
363  integrator->set_adaptive_min_samples(get_int(cscene, "adaptive_min_samples"));
364  }
365 
366  float scrambling_distance = get_float(cscene, "scrambling_distance");
367  bool auto_scrambling_distance = get_boolean(cscene, "auto_scrambling_distance");
368  if (auto_scrambling_distance) {
369  if (samples == 0) {
370  /* If samples is 0, then viewport rendering is set to render infinitely. In that case we
371  * override the samples value with 4096 so the Automatic Scrambling Distance algorithm
372  * picks a Scrambling Distance value with a good balance of performance and correlation
373  * artifacts when rendering to high sample counts. */
374  samples = 4096;
375  }
376 
377  if (use_adaptive_sampling) {
378  /* If Adaptive Sampling is enabled, use "min_samples" in the Automatic Scrambling Distance
379  * algorithm to avoid artifacts common with Adaptive Sampling + Scrambling Distance. */
380  const AdaptiveSampling adaptive_sampling = integrator->get_adaptive_sampling();
381  samples = min(samples, adaptive_sampling.min_samples);
382  }
383  scrambling_distance *= 4.0f / sqrtf(samples);
384  }
385 
386  /* Only use scrambling distance in the viewport if user wants to. */
387  bool preview_scrambling_distance = get_boolean(cscene, "preview_scrambling_distance");
388  if (preview && !preview_scrambling_distance) {
389  scrambling_distance = 1.0f;
390  }
391 
392  if (scrambling_distance != 1.0f) {
393  VLOG_INFO << "Using scrambling distance: " << scrambling_distance;
394  }
395  integrator->set_scrambling_distance(scrambling_distance);
396 
397  if (get_boolean(cscene, "use_fast_gi")) {
398  if (preview) {
399  integrator->set_ao_bounces(get_int(cscene, "ao_bounces"));
400  }
401  else {
402  integrator->set_ao_bounces(get_int(cscene, "ao_bounces_render"));
403  }
404  }
405  else {
406  integrator->set_ao_bounces(0);
407  }
408 
409 #ifdef WITH_CYCLES_DEBUG
410  DirectLightSamplingType direct_light_sampling_type = (DirectLightSamplingType)get_enum(
411  cscene, "direct_light_sampling_type", DIRECT_LIGHT_SAMPLING_NUM, DIRECT_LIGHT_SAMPLING_MIS);
412  integrator->set_direct_light_sampling_type(direct_light_sampling_type);
413 #endif
414 
415  DenoiseParams denoise_params = get_denoise_params(b_scene, b_view_layer, background);
416 
417  /* No denoising support for vertex color baking, vertices packed into image
418  * buffer have no relation to neighbors. */
419  if (scene->bake_manager->get_baking() &&
420  b_scene.render().bake().target() != BL::BakeSettings::target_IMAGE_TEXTURES) {
421  denoise_params.use = false;
422  }
423 
424  integrator->set_use_denoise(denoise_params.use);
425 
426  /* Only update denoiser parameters if the denoiser is actually used. This allows to tweak
427  * denoiser parameters before enabling it without render resetting on every change. The downside
428  * is that the interface and the integrator are technically out of sync. */
429  if (denoise_params.use) {
430  integrator->set_denoiser_type(denoise_params.type);
431  integrator->set_denoise_start_sample(denoise_params.start_sample);
432  integrator->set_use_denoise_pass_albedo(denoise_params.use_pass_albedo);
433  integrator->set_use_denoise_pass_normal(denoise_params.use_pass_normal);
434  integrator->set_denoiser_prefilter(denoise_params.prefilter);
435  }
436 
437  /* UPDATE_NONE as we don't want to tag the integrator as modified (this was done by the
438  * set calls above), but we need to make sure that the dependent things are tagged. */
439  integrator->tag_update(scene, Integrator::UPDATE_NONE);
440 }
441 
442 /* Film */
443 
444 void BlenderSync::sync_film(BL::ViewLayer &b_view_layer, BL::SpaceView3D &b_v3d)
445 {
446  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
447  PointerRNA crl = RNA_pointer_get(&b_view_layer.ptr, "cycles");
448 
449  Film *film = scene->film;
450 
451  if (b_v3d) {
452  const BlenderViewportParameters new_viewport_parameters(b_v3d, use_developer_ui);
453  film->set_display_pass(new_viewport_parameters.display_pass);
454  film->set_show_active_pixels(new_viewport_parameters.show_active_pixels);
455  }
456 
457  film->set_exposure(get_float(cscene, "film_exposure"));
458  film->set_filter_type(
459  (FilterType)get_enum(cscene, "pixel_filter_type", FILTER_NUM_TYPES, FILTER_BLACKMAN_HARRIS));
460  float filter_width = (film->get_filter_type() == FILTER_BOX) ? 1.0f :
461  get_float(cscene, "filter_width");
462  film->set_filter_width(filter_width);
463 
464  if (b_scene.world()) {
465  BL::WorldMistSettings b_mist = b_scene.world().mist_settings();
466 
467  film->set_mist_start(b_mist.start());
468  film->set_mist_depth(b_mist.depth());
469 
470  switch (b_mist.falloff()) {
471  case BL::WorldMistSettings::falloff_QUADRATIC:
472  film->set_mist_falloff(2.0f);
473  break;
474  case BL::WorldMistSettings::falloff_LINEAR:
475  film->set_mist_falloff(1.0f);
476  break;
477  case BL::WorldMistSettings::falloff_INVERSE_QUADRATIC:
478  film->set_mist_falloff(0.5f);
479  break;
480  }
481  }
482 
483  /* Blender viewport does not support proper shadow catcher compositing, so force an approximate
484  * mode to improve visual feedback. */
485  if (b_v3d) {
486  film->set_use_approximate_shadow_catcher(true);
487  }
488  else {
489  film->set_use_approximate_shadow_catcher(!get_boolean(crl, "use_pass_shadow_catcher"));
490  }
491 }
492 
493 /* Render Layer */
494 
496 {
497  view_layer.name = b_view_layer.name();
498 
499  /* Filter. */
500  view_layer.use_background_shader = b_view_layer.use_sky();
501  /* Always enable surfaces for baking, otherwise there is nothing to bake to. */
502  view_layer.use_surfaces = b_view_layer.use_solid() || scene->bake_manager->get_baking();
503  view_layer.use_hair = b_view_layer.use_strand();
504  view_layer.use_volumes = b_view_layer.use_volumes();
505  view_layer.use_motion_blur = b_view_layer.use_motion_blur() &&
506  b_scene.render().use_motion_blur();
507 
508  /* Material override. */
509  view_layer.material_override = b_view_layer.material_override();
510 
511  /* Sample override. */
512  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
513  int use_layer_samples = get_enum(cscene, "use_layer_samples");
514 
515  view_layer.bound_samples = (use_layer_samples == 1);
516  view_layer.samples = 0;
517 
518  if (use_layer_samples != 2) {
519  int samples = b_view_layer.samples();
520  view_layer.samples = samples;
521  }
522 }
523 
524 /* Images */
525 void BlenderSync::sync_images()
526 {
527  /* Sync is a convention for this API, but currently it frees unused buffers. */
528 
529  const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
530  if (is_interface_locked == false && BlenderSession::headless == false) {
531  /* If interface is not locked, it's possible image is needed for
532  * the display.
533  */
534  return;
535  }
536  /* Free buffers used by images which are not needed for render. */
537  for (BL::Image &b_image : b_data.images) {
538  /* TODO(sergey): Consider making it an utility function to check
539  * whether image is considered builtin.
540  */
541  const bool is_builtin = b_image.packed_file() ||
542  b_image.source() == BL::Image::source_GENERATED ||
543  b_image.source() == BL::Image::source_MOVIE || b_engine.is_preview();
544  if (is_builtin == false) {
545  b_image.buffers_free();
546  }
547  /* TODO(sergey): Free builtin images not used by any shader. */
548  }
549 }
550 
551 /* Passes */
552 
554 {
555  string name = b_pass.name();
556 #define MAP_PASS(passname, passtype) \
557  if (name == passname) { \
558  return passtype; \
559  } \
560  ((void)0)
561 
562  /* NOTE: Keep in sync with defined names from DNA_scene_types.h */
563 
564  MAP_PASS("Combined", PASS_COMBINED);
565  MAP_PASS("Noisy Image", PASS_COMBINED);
566 
567  MAP_PASS("Depth", PASS_DEPTH);
568  MAP_PASS("Mist", PASS_MIST);
569  MAP_PASS("Position", PASS_POSITION);
570  MAP_PASS("Normal", PASS_NORMAL);
571  MAP_PASS("IndexOB", PASS_OBJECT_ID);
572  MAP_PASS("UV", PASS_UV);
573  MAP_PASS("Vector", PASS_MOTION);
574  MAP_PASS("IndexMA", PASS_MATERIAL_ID);
575 
576  MAP_PASS("DiffDir", PASS_DIFFUSE_DIRECT);
577  MAP_PASS("GlossDir", PASS_GLOSSY_DIRECT);
578  MAP_PASS("TransDir", PASS_TRANSMISSION_DIRECT);
579  MAP_PASS("VolumeDir", PASS_VOLUME_DIRECT);
580 
581  MAP_PASS("DiffInd", PASS_DIFFUSE_INDIRECT);
582  MAP_PASS("GlossInd", PASS_GLOSSY_INDIRECT);
584  MAP_PASS("VolumeInd", PASS_VOLUME_INDIRECT);
585 
586  MAP_PASS("DiffCol", PASS_DIFFUSE_COLOR);
587  MAP_PASS("GlossCol", PASS_GLOSSY_COLOR);
588  MAP_PASS("TransCol", PASS_TRANSMISSION_COLOR);
589 
590  MAP_PASS("Emit", PASS_EMISSION);
591  MAP_PASS("Env", PASS_BACKGROUND);
592  MAP_PASS("AO", PASS_AO);
593  MAP_PASS("Shadow", PASS_SHADOW);
594 
595  MAP_PASS("BakePrimitive", PASS_BAKE_PRIMITIVE);
596  MAP_PASS("BakeDifferential", PASS_BAKE_DIFFERENTIAL);
597 
598  MAP_PASS("Denoising Normal", PASS_DENOISING_NORMAL);
599  MAP_PASS("Denoising Albedo", PASS_DENOISING_ALBEDO);
600  MAP_PASS("Denoising Depth", PASS_DENOISING_DEPTH);
601 
602  MAP_PASS("Shadow Catcher", PASS_SHADOW_CATCHER);
603  MAP_PASS("Noisy Shadow Catcher", PASS_SHADOW_CATCHER);
604 
605  MAP_PASS("AdaptiveAuxBuffer", PASS_ADAPTIVE_AUX_BUFFER);
606  MAP_PASS("Debug Sample Count", PASS_SAMPLE_COUNT);
607 
609  return PASS_CRYPTOMATTE;
610  }
611 
612 #undef MAP_PASS
613 
614  return PASS_NONE;
615 }
616 
618  PassType type,
619  const char *name,
621 {
622  Pass *pass = scene->create_node<Pass>();
623 
624  pass->set_type(type);
625  pass->set_name(ustring(name));
626  pass->set_mode(mode);
627 
628  return pass;
629 }
630 
632 {
633  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
634 
635  /* Delete all existing passes. */
636  set<Pass *> clear_passes(scene->passes.begin(), scene->passes.end());
637  scene->delete_nodes(clear_passes);
638 
639  /* Always add combined pass. */
640  pass_add(scene, PASS_COMBINED, "Combined");
641 
642  /* Blender built-in data and light passes. */
643  for (BL::RenderPass &b_pass : b_rlay.passes) {
644  const PassType pass_type = get_blender_pass_type(b_pass);
645 
646  if (pass_type == PASS_NONE) {
647  LOG(ERROR) << "Unknown pass " << b_pass.name();
648  continue;
649  }
650 
651  if (pass_type == PASS_MOTION &&
652  (b_view_layer.use_motion_blur() && b_scene.render().use_motion_blur())) {
653  continue;
654  }
655 
656  pass_add(scene, pass_type, b_pass.name().c_str());
657  }
658 
659  PointerRNA crl = RNA_pointer_get(&b_view_layer.ptr, "cycles");
660 
661  /* Debug passes. */
662  if (get_boolean(crl, "pass_debug_sample_count")) {
663  b_engine.add_pass("Debug Sample Count", 1, "X", b_view_layer.name().c_str());
664  pass_add(scene, PASS_SAMPLE_COUNT, "Debug Sample Count");
665  }
666 
667  /* Cycles specific passes. */
668  if (get_boolean(crl, "use_pass_volume_direct")) {
669  b_engine.add_pass("VolumeDir", 3, "RGB", b_view_layer.name().c_str());
670  pass_add(scene, PASS_VOLUME_DIRECT, "VolumeDir");
671  }
672  if (get_boolean(crl, "use_pass_volume_indirect")) {
673  b_engine.add_pass("VolumeInd", 3, "RGB", b_view_layer.name().c_str());
674  pass_add(scene, PASS_VOLUME_INDIRECT, "VolumeInd");
675  }
676  if (get_boolean(crl, "use_pass_shadow_catcher")) {
677  b_engine.add_pass("Shadow Catcher", 3, "RGB", b_view_layer.name().c_str());
678  pass_add(scene, PASS_SHADOW_CATCHER, "Shadow Catcher");
679  }
680 
681  /* Cryptomatte stores two ID/weight pairs per RGBA layer.
682  * User facing parameter is the number of pairs.
683  *
684  * NOTE: Name channels lowercase rgba so that compression rules check in OpenEXR DWA code uses
685  * loseless compression. Reportedly this naming is the only one which works good from the
686  * interoperability point of view. Using xyzw naming is not portable. */
687  int crypto_depth = divide_up(min(16, b_view_layer.pass_cryptomatte_depth()), 2);
688  scene->film->set_cryptomatte_depth(crypto_depth);
689  CryptomatteType cryptomatte_passes = CRYPT_NONE;
690  if (b_view_layer.use_pass_cryptomatte_object()) {
691  for (int i = 0; i < crypto_depth; i++) {
692  string passname = cryptomatte_prefix + string_printf("Object%02d", i);
693  b_engine.add_pass(passname.c_str(), 4, "rgba", b_view_layer.name().c_str());
694  pass_add(scene, PASS_CRYPTOMATTE, passname.c_str());
695  }
696  cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_OBJECT);
697  }
698  if (b_view_layer.use_pass_cryptomatte_material()) {
699  for (int i = 0; i < crypto_depth; i++) {
700  string passname = cryptomatte_prefix + string_printf("Material%02d", i);
701  b_engine.add_pass(passname.c_str(), 4, "rgba", b_view_layer.name().c_str());
702  pass_add(scene, PASS_CRYPTOMATTE, passname.c_str());
703  }
704  cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_MATERIAL);
705  }
706  if (b_view_layer.use_pass_cryptomatte_asset()) {
707  for (int i = 0; i < crypto_depth; i++) {
708  string passname = cryptomatte_prefix + string_printf("Asset%02d", i);
709  b_engine.add_pass(passname.c_str(), 4, "rgba", b_view_layer.name().c_str());
710  pass_add(scene, PASS_CRYPTOMATTE, passname.c_str());
711  }
712  cryptomatte_passes = (CryptomatteType)(cryptomatte_passes | CRYPT_ASSET);
713  }
714  scene->film->set_cryptomatte_passes(cryptomatte_passes);
715 
716  /* Denoising passes. */
717  const bool use_denoising = get_boolean(cscene, "use_denoising") &&
718  get_boolean(crl, "use_denoising");
719  const bool store_denoising_passes = get_boolean(crl, "denoising_store_passes");
720  if (use_denoising) {
721  b_engine.add_pass("Noisy Image", 4, "RGBA", b_view_layer.name().c_str());
722  pass_add(scene, PASS_COMBINED, "Noisy Image", PassMode::NOISY);
723  if (get_boolean(crl, "use_pass_shadow_catcher")) {
724  b_engine.add_pass("Noisy Shadow Catcher", 3, "RGB", b_view_layer.name().c_str());
725  pass_add(scene, PASS_SHADOW_CATCHER, "Noisy Shadow Catcher", PassMode::NOISY);
726  }
727  }
728  if (store_denoising_passes) {
729  b_engine.add_pass("Denoising Normal", 3, "XYZ", b_view_layer.name().c_str());
730  pass_add(scene, PASS_DENOISING_NORMAL, "Denoising Normal", PassMode::NOISY);
731 
732  b_engine.add_pass("Denoising Albedo", 3, "RGB", b_view_layer.name().c_str());
733  pass_add(scene, PASS_DENOISING_ALBEDO, "Denoising Albedo", PassMode::NOISY);
734 
735  b_engine.add_pass("Denoising Depth", 1, "Z", b_view_layer.name().c_str());
736  pass_add(scene, PASS_DENOISING_DEPTH, "Denoising Depth", PassMode::NOISY);
737  }
738 
739  /* Custom AOV passes. */
740  BL::ViewLayer::aovs_iterator b_aov_iter;
741  for (b_view_layer.aovs.begin(b_aov_iter); b_aov_iter != b_view_layer.aovs.end(); ++b_aov_iter) {
742  BL::AOV b_aov(*b_aov_iter);
743  if (!b_aov.is_valid()) {
744  continue;
745  }
746 
747  string name = b_aov.name();
748  bool is_color = b_aov.type() == BL::AOV::type_COLOR;
749 
750  if (is_color) {
751  b_engine.add_pass(name.c_str(), 4, "RGBA", b_view_layer.name().c_str());
752  pass_add(scene, PASS_AOV_COLOR, name.c_str());
753  }
754  else {
755  b_engine.add_pass(name.c_str(), 1, "X", b_view_layer.name().c_str());
756  pass_add(scene, PASS_AOV_VALUE, name.c_str());
757  }
758  }
759 
760  /* Light Group passes. */
761  BL::ViewLayer::lightgroups_iterator b_lightgroup_iter;
762  for (b_view_layer.lightgroups.begin(b_lightgroup_iter);
763  b_lightgroup_iter != b_view_layer.lightgroups.end();
764  ++b_lightgroup_iter) {
765  BL::Lightgroup b_lightgroup(*b_lightgroup_iter);
766 
767  string name = string_printf("Combined_%s", b_lightgroup.name().c_str());
768 
769  b_engine.add_pass(name.c_str(), 3, "RGB", b_view_layer.name().c_str());
770  Pass *pass = pass_add(scene, PASS_COMBINED, name.c_str(), PassMode::NOISY);
771  pass->set_lightgroup(ustring(b_lightgroup.name()));
772  }
773 
774  scene->film->set_pass_alpha_threshold(b_view_layer.pass_alpha_threshold());
775 }
776 
777 void BlenderSync::free_data_after_sync(BL::Depsgraph &b_depsgraph)
778 {
779  /* When viewport display is not needed during render we can force some
780  * caches to be releases from blender side in order to reduce peak memory
781  * footprint during synchronization process.
782  */
783 
784  const bool is_interface_locked = b_engine.render() && b_engine.render().use_lock_interface();
785  const bool is_persistent_data = b_engine.render() && b_engine.render().use_persistent_data();
786  const bool can_free_caches =
787  (BlenderSession::headless || is_interface_locked) &&
788  /* Baking re-uses the depsgraph multiple times, clearing crashes
789  * reading un-evaluated mesh data which isn't aligned with the
790  * geometry we're baking, see T71012. */
791  !scene->bake_manager->get_baking() &&
792  /* Persistent data must main caches for performance and correctness. */
793  !is_persistent_data;
794 
795  if (!can_free_caches) {
796  return;
797  }
798  /* TODO(sergey): We can actually remove the whole dependency graph,
799  * but that will need some API support first.
800  */
801  for (BL::Object &b_ob : b_depsgraph.objects) {
802  b_ob.cache_release();
803  }
804 }
805 
806 /* Scene Parameters */
807 
809 {
811  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
812  const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
813 
814  if (shadingsystem == 0)
815  params.shadingsystem = SHADINGSYSTEM_SVM;
816  else if (shadingsystem == 1)
817  params.shadingsystem = SHADINGSYSTEM_OSL;
818 
819  if (background || DebugFlags().viewport_static_bvh)
820  params.bvh_type = BVH_TYPE_STATIC;
821  else
822  params.bvh_type = BVH_TYPE_DYNAMIC;
823 
824  params.use_bvh_spatial_split = RNA_boolean_get(&cscene, "debug_use_spatial_splits");
825  params.use_bvh_compact_structure = RNA_boolean_get(&cscene, "debug_use_compact_bvh");
826  params.use_bvh_unaligned_nodes = RNA_boolean_get(&cscene, "debug_use_hair_bvh");
827  params.num_bvh_time_steps = RNA_int_get(&cscene, "debug_bvh_time_steps");
828 
829  PointerRNA csscene = RNA_pointer_get(&b_scene.ptr, "cycles_curves");
830  params.hair_subdivisions = get_int(csscene, "subdivisions");
831  params.hair_shape = (CurveShapeType)get_enum(
832  csscene, "shape", CURVE_NUM_SHAPE_TYPES, CURVE_THICK);
833 
834  int texture_limit;
835  if (background) {
836  texture_limit = RNA_enum_get(&cscene, "texture_limit_render");
837  }
838  else {
839  texture_limit = RNA_enum_get(&cscene, "texture_limit");
840  }
841  if (texture_limit > 0 && b_scene.render().use_simplify()) {
842  params.texture_limit = 1 << (texture_limit + 6);
843  }
844  else {
845  params.texture_limit = 0;
846  }
847 
848  params.bvh_layout = DebugFlags().cpu.bvh_layout;
849 
850  params.background = background;
851 
852  return params;
853 }
854 
855 /* Session Parameters */
856 
857 bool BlenderSync::get_session_pause(BL::Scene &b_scene, bool background)
858 {
859  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
860  return (background) ? false : get_boolean(cscene, "preview_pause");
861 }
862 
864  BL::Preferences &b_preferences,
865  BL::Scene &b_scene,
866  bool background)
867 {
869  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
870 
871  if (background && !b_engine.is_preview()) {
872  /* Viewport and preview renders do not require temp directory and do request session
873  * parameters more often than the background render.
874  * Optimize RNA-C++ usage and memory allocation a bit by saving string access which we know is
875  * not needed for viewport render. */
876  params.temp_dir = b_engine.temporary_directory();
877  }
878 
879  /* feature set */
880  params.experimental = (get_enum(cscene, "feature_set") != 0);
881 
882  /* Headless and background rendering. */
883  params.headless = BlenderSession::headless;
884  params.background = background;
885 
886  /* Device */
887  params.threads = blender_device_threads(b_scene);
888  params.device = blender_device_info(b_preferences, b_scene, params.background);
889 
890  /* samples */
891  int samples = get_int(cscene, "samples");
892  int preview_samples = get_int(cscene, "preview_samples");
893  int sample_offset = get_int(cscene, "sample_offset");
894 
895  if (background) {
896  params.samples = samples;
897  params.sample_offset = sample_offset;
898  }
899  else {
900  params.samples = preview_samples;
901  if (params.samples == 0) {
902  params.samples = INT_MAX;
903  }
904  params.sample_offset = 0;
905  }
906 
907  /* Clamp sample offset. */
908  params.sample_offset = clamp(params.sample_offset, 0, Integrator::MAX_SAMPLES);
909 
910  /* Clamp samples. */
911  params.samples = clamp(params.samples, 0, Integrator::MAX_SAMPLES - params.sample_offset);
912 
913  /* Viewport Performance */
914  params.pixel_size = b_engine.get_preview_pixel_size(b_scene);
915 
916  if (background) {
917  params.pixel_size = 1;
918  }
919 
920  /* shading system - scene level needs full refresh */
921  const bool shadingsystem = RNA_boolean_get(&cscene, "shading_system");
922 
923  if (shadingsystem == 0)
924  params.shadingsystem = SHADINGSYSTEM_SVM;
925  else if (shadingsystem == 1)
926  params.shadingsystem = SHADINGSYSTEM_OSL;
927 
928  /* Time limit. */
929  if (background) {
930  params.time_limit = (double)get_float(cscene, "time_limit");
931  }
932  else {
933  /* For the viewport it kind of makes more sense to think in terms of the noise floor, which is
934  * usually higher than acceptable level for the final frame. */
935  /* TODO: It might be useful to support time limit in the viewport as well, but needs some
936  * extra thoughts and input. */
937  params.time_limit = 0.0;
938  }
939 
940  /* Profiling. */
941  params.use_profiling = params.device.has_profiling && !b_engine.is_preview() && background &&
943 
944  if (background) {
945  params.use_auto_tile = RNA_boolean_get(&cscene, "use_auto_tile");
946  params.tile_size = max(get_int(cscene, "tile_size"), 8);
947  }
948  else {
949  params.use_auto_tile = false;
950  }
951 
952  return params;
953 }
954 
956  BL::ViewLayer &b_view_layer,
957  bool background)
958 {
959  enum DenoiserInput {
960  DENOISER_INPUT_RGB = 1,
961  DENOISER_INPUT_RGB_ALBEDO = 2,
962  DENOISER_INPUT_RGB_ALBEDO_NORMAL = 3,
963 
964  DENOISER_INPUT_NUM,
965  };
966 
967  DenoiseParams denoising;
968  PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles");
969 
970  int input_passes = -1;
971 
972  if (background) {
973  /* Final Render Denoising */
974  denoising.use = get_boolean(cscene, "use_denoising");
975  denoising.type = (DenoiserType)get_enum(cscene, "denoiser", DENOISER_NUM, DENOISER_NONE);
976  denoising.prefilter = (DenoiserPrefilter)get_enum(
977  cscene, "denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_NONE);
978 
979  input_passes = (DenoiserInput)get_enum(
980  cscene, "denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO_NORMAL);
981 
982  if (b_view_layer) {
983  PointerRNA clayer = RNA_pointer_get(&b_view_layer.ptr, "cycles");
984  if (!get_boolean(clayer, "use_denoising")) {
985  denoising.use = false;
986  }
987  }
988  }
989  else {
990  /* Viewport Denoising */
991  denoising.use = get_boolean(cscene, "use_preview_denoising");
992  denoising.type = (DenoiserType)get_enum(
993  cscene, "preview_denoiser", DENOISER_NUM, DENOISER_NONE);
994  denoising.prefilter = (DenoiserPrefilter)get_enum(
995  cscene, "preview_denoising_prefilter", DENOISER_PREFILTER_NUM, DENOISER_PREFILTER_FAST);
996  denoising.start_sample = get_int(cscene, "preview_denoising_start_sample");
997 
998  input_passes = (DenoiserInput)get_enum(
999  cscene, "preview_denoising_input_passes", DENOISER_INPUT_NUM, DENOISER_INPUT_RGB_ALBEDO);
1000 
1001  /* Auto select fastest denoiser. */
1002  if (denoising.type == DENOISER_NONE) {
1004  denoising.type = DENOISER_OPTIX;
1005  }
1006  else if (openimagedenoise_supported()) {
1007  denoising.type = DENOISER_OPENIMAGEDENOISE;
1008  }
1009  else {
1010  denoising.use = false;
1011  }
1012  }
1013  }
1014 
1015  switch (input_passes) {
1016  case DENOISER_INPUT_RGB:
1017  denoising.use_pass_albedo = false;
1018  denoising.use_pass_normal = false;
1019  break;
1020 
1021  case DENOISER_INPUT_RGB_ALBEDO:
1022  denoising.use_pass_albedo = true;
1023  denoising.use_pass_normal = false;
1024  break;
1025 
1026  case DENOISER_INPUT_RGB_ALBEDO_NORMAL:
1027  denoising.use_pass_albedo = true;
1028  denoising.use_pass_normal = true;
1029  break;
1030 
1031  default:
1032  LOG(ERROR) << "Unhandled input passes enum " << input_passes;
1033  break;
1034  }
1035 
1036  return denoising;
1037 }
1038 
typedef double(DMatrix)[4][4]
struct Depsgraph Depsgraph
Definition: DEG_depsgraph.h:35
struct ID ID
struct Image Image
struct ViewLayer ViewLayer
struct Light Light
struct Material Material
struct Mesh Mesh
struct Object Object
struct Scene Scene
struct Volume Volume
struct World World
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei height
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum type
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei width
struct RenderEngine RenderEngine
struct RenderLayer RenderLayer
struct RenderPass RenderPass
int blender_device_threads(BL::Scene &b_scene)
DeviceInfo blender_device_info(BL::Preferences &b_preferences, BL::Scene &b_scene, bool background)
static unsigned long seed
Definition: btSoftBody.h:39
bool get_baking() const
Definition: bake.cpp:25
static bool headless
static bool print_render_stats
BlenderSync(BL::RenderEngine &b_engine, BL::BlendData &b_data, BL::Scene &b_scene, Scene *scene, bool preview, bool use_developer_ui, Progress &progress)
Definition: sync.cpp:38
static DenoiseParams get_denoise_params(BL::Scene &b_scene, BL::ViewLayer &b_view_layer, bool background)
Definition: sync.cpp:955
~BlenderSync()
Definition: sync.cpp:71
static bool get_session_pause(BL::Scene &b_scene, bool background)
Definition: sync.cpp:857
void sync_recalc(BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d)
Definition: sync.cpp:92
void tag_update()
Definition: sync.cpp:85
static SceneParams get_scene_params(BL::Scene &b_scene, bool background)
Definition: sync.cpp:808
void sync_render_passes(BL::RenderLayer &b_render_layer, BL::ViewLayer &b_view_layer)
Definition: sync.cpp:631
void sync_view_layer(BL::ViewLayer &b_view_layer)
Definition: sync.cpp:495
void sync_data(BL::RenderSettings &b_render, BL::Depsgraph &b_depsgraph, BL::SpaceView3D &b_v3d, BL::Object &b_override, int width, int height, void **python_thread_state)
Definition: sync.cpp:241
void reset(BL::BlendData &b_data, BL::Scene &b_scene)
Definition: sync.cpp:75
static SessionParams get_session_params(BL::RenderEngine &b_engine, BL::Preferences &b_userpref, BL::Scene &b_scene, bool background)
Definition: sync.cpp:863
void sync_integrator(BL::ViewLayer &b_view_layer, bool background)
Definition: sync.cpp:295
bool shader_modified(const BlenderViewportParameters &other) const
Definition: viewport.cpp:70
bool modified(const BlenderViewportParameters &other) const
Definition: viewport.cpp:84
CPU cpu
Definition: debug.h:128
DenoiserType type
Definition: denoise.h:53
int start_sample
Definition: denoise.h:56
DenoiserPrefilter prefilter
Definition: denoise.h:65
NODE_DECLARE bool use
Definition: denoise.h:50
bool use_pass_normal
Definition: denoise.h:60
bool use_pass_albedo
Definition: denoise.h:59
static vector< DeviceInfo > available_devices(uint device_type_mask=DEVICE_MASK_ALL)
Definition: film.h:29
bool is_mesh() const
bool set_animation_frame_update(int frame)
void tag_update(Scene *scene, uint32_t flag)
Definition: integrator.cpp:291
AdaptiveSampling get_adaptive_sampling() const
Definition: integrator.cpp:329
static const int MAX_SAMPLES
Definition: integrator.h:65
Definition: pass.h:48
const map< K, T * > & key_to_scene_data()
Definition: id_map.h:169
void set_recalc(const BL::ID &id)
Definition: id_map.h:56
void post_sync(bool do_delete=true)
Definition: id_map.h:147
double get_time() const
Definition: time.h:41
#define CCL_NAMESPACE_END
Definition: cuda/compat.h:9
static float get_float(PointerRNA &ptr, const char *name)
static bool get_boolean(PointerRNA &ptr, const char *name)
static int get_int(PointerRNA &ptr, const char *name)
static int get_enum(PointerRNA &ptr, const char *name, int num_values=-1, int default_value=-1)
static Mesh::SubdivisionType object_subdivision_type(BL::Object &b_ob, bool preview, bool experimental)
DebugFlags & DebugFlags()
Definition: debug.h:159
Scene scene
DenoiserPrefilter
Definition: denoise.h:26
@ DENOISER_PREFILTER_FAST
Definition: denoise.h:33
@ DENOISER_PREFILTER_NONE
Definition: denoise.h:29
@ DENOISER_PREFILTER_NUM
Definition: denoise.h:39
DenoiserType
Definition: denoise.h:12
@ DENOISER_NONE
Definition: denoise.h:17
@ DENOISER_OPTIX
Definition: denoise.h:13
@ DENOISER_OPENIMAGEDENOISE
Definition: denoise.h:14
@ DENOISER_NUM
Definition: denoise.h:15
@ DEVICE_MASK_OPTIX
Definition: device/device.h:51
FilterType
Definition: film.h:21
@ FILTER_NUM_TYPES
Definition: film.h:26
@ FILTER_BOX
Definition: film.h:22
@ FILTER_BLACKMAN_HARRIS
Definition: film.h:24
ccl_device_inline uint hash_uint2(uint kx, uint ky)
Definition: hash.h:70
uiWidgetBaseParameters params[MAX_WIDGET_BASE_BATCH]
DirectLightSamplingType
Definition: kernel/types.h:494
@ DIRECT_LIGHT_SAMPLING_MIS
Definition: kernel/types.h:495
@ DIRECT_LIGHT_SAMPLING_NUM
Definition: kernel/types.h:499
CryptomatteType
Definition: kernel/types.h:407
@ CRYPT_ASSET
Definition: kernel/types.h:411
@ CRYPT_NONE
Definition: kernel/types.h:408
@ CRYPT_OBJECT
Definition: kernel/types.h:409
@ CRYPT_MATERIAL
Definition: kernel/types.h:410
CurveShapeType
Definition: kernel/types.h:581
@ CURVE_THICK
Definition: kernel/types.h:583
@ CURVE_NUM_SHAPE_TYPES
Definition: kernel/types.h:585
@ MOTION_POSITION_CENTER
Definition: kernel/types.h:485
SamplingPattern
Definition: kernel/types.h:178
@ SAMPLING_PATTERN_SOBOL
Definition: kernel/types.h:179
@ SAMPLING_NUM_PATTERNS
Definition: kernel/types.h:182
PassType
Definition: kernel/types.h:334
@ PASS_EMISSION
Definition: kernel/types.h:339
@ PASS_POSITION
Definition: kernel/types.h:359
@ PASS_BACKGROUND
Definition: kernel/types.h:340
@ PASS_TRANSMISSION_DIRECT
Definition: kernel/types.h:350
@ PASS_VOLUME_DIRECT
Definition: kernel/types.h:353
@ PASS_UV
Definition: kernel/types.h:362
@ PASS_TRANSMISSION_COLOR
Definition: kernel/types.h:374
@ PASS_DEPTH
Definition: kernel/types.h:358
@ PASS_MIST
Definition: kernel/types.h:376
@ PASS_TRANSMISSION_INDIRECT
Definition: kernel/types.h:351
@ PASS_SHADOW_CATCHER
Definition: kernel/types.h:392
@ PASS_DENOISING_NORMAL
Definition: kernel/types.h:377
@ PASS_DIFFUSE_DIRECT
Definition: kernel/types.h:344
@ PASS_MOTION
Definition: kernel/types.h:365
@ PASS_MATERIAL_ID
Definition: kernel/types.h:364
@ PASS_AO
Definition: kernel/types.h:341
@ PASS_COMBINED
Definition: kernel/types.h:338
@ PASS_DIFFUSE_INDIRECT
Definition: kernel/types.h:345
@ PASS_ADAPTIVE_AUX_BUFFER
Definition: kernel/types.h:370
@ PASS_OBJECT_ID
Definition: kernel/types.h:363
@ PASS_AOV_COLOR
Definition: kernel/types.h:368
@ PASS_NONE
Definition: kernel/types.h:335
@ PASS_VOLUME_INDIRECT
Definition: kernel/types.h:354
@ PASS_NORMAL
Definition: kernel/types.h:360
@ PASS_CRYPTOMATTE
Definition: kernel/types.h:367
@ PASS_DIFFUSE_COLOR
Definition: kernel/types.h:372
@ PASS_SAMPLE_COUNT
Definition: kernel/types.h:371
@ PASS_GLOSSY_DIRECT
Definition: kernel/types.h:347
@ PASS_DENOISING_ALBEDO
Definition: kernel/types.h:378
@ PASS_AOV_VALUE
Definition: kernel/types.h:369
@ PASS_GLOSSY_COLOR
Definition: kernel/types.h:373
@ PASS_SHADOW
Definition: kernel/types.h:342
@ PASS_GLOSSY_INDIRECT
Definition: kernel/types.h:348
@ PASS_BAKE_DIFFERENTIAL
Definition: kernel/types.h:399
@ PASS_DENOISING_DEPTH
Definition: kernel/types.h:379
@ PASS_BAKE_PRIMITIVE
Definition: kernel/types.h:398
#define VLOG_INFO
Definition: log.h:77
#define LOG(severity)
Definition: log.h:36
#define sqrtf(x)
Definition: metal/compat.h:243
bool is_builtin(const void *UNUSED(owner), const AttributeIDRef &attribute_id)
T clamp(const T &a, const T &min, const T &max)
static const pxr::TfToken preview("preview", pxr::TfToken::Immortal)
static CCL_NAMESPACE_BEGIN bool openimagedenoise_supported()
@ BVH_TYPE_DYNAMIC
Definition: params.h:30
@ BVH_TYPE_STATIC
Definition: params.h:37
PassMode
Definition: pass.h:19
PointerRNA RNA_pointer_get(PointerRNA *ptr, const char *name)
Definition: rna_access.c:5167
void RNA_id_pointer_create(ID *id, PointerRNA *r_ptr)
Definition: rna_access.c:112
int RNA_int_get(PointerRNA *ptr, const char *name)
Definition: rna_access.c:4910
float RNA_float_get(PointerRNA *ptr, const char *name)
Definition: rna_access.c:4957
bool RNA_boolean_get(PointerRNA *ptr, const char *name)
Definition: rna_access.c:4863
int RNA_enum_get(PointerRNA *ptr, const char *name)
Definition: rna_access.c:5004
@ SHADINGSYSTEM_OSL
Definition: scene/shader.h:34
@ SHADINGSYSTEM_SVM
Definition: scene/shader.h:34
#define min(a, b)
Definition: sort.c:35
bool string_startswith(const string_view s, const string_view start)
Definition: string.cpp:100
CCL_NAMESPACE_BEGIN string string_printf(const char *format,...)
Definition: string.cpp:22
BVHLayout bvh_layout
Definition: debug.h:68
Definition: DNA_ID.h:368
@ SUBDIVISION_NONE
Definition: scene/mesh.h:120
BakeManager * bake_manager
Definition: scene.h:228
Film * film
Definition: scene.h:208
string name
Definition: scene.h:198
T * create_node(Args &&...args)
Definition: scene.h:284
MotionType need_motion() const
Definition: scene.cpp:387
vector< Pass * > passes
Definition: scene.h:218
ImageManager * image_manager
Definition: scene.h:222
@ MOTION_PASS
Definition: scene.h:259
@ MOTION_NONE
Definition: scene.h:259
void delete_nodes(const set< T * > &nodes)
Definition: scene.h:315
Integrator * integrator
Definition: scene.h:210
struct Object * camera
static PassType get_blender_pass_type(BL::RenderPass &b_pass)
Definition: sync.cpp:553
static Pass * pass_add(Scene *scene, PassType type, const char *name, PassMode mode=PassMode::DENOISED)
Definition: sync.cpp:617
static CCL_NAMESPACE_BEGIN const char * cryptomatte_prefix
Definition: sync.cpp:34
#define MAP_PASS(passname, passtype)
float max
ccl_device_inline size_t divide_up(size_t x, size_t y)
Definition: util/types.h:51