static void blender_camera_border(BlenderCamera *bcam, BL::RenderEngine b_engine, BL::RenderSettings b_render, BL::Scene b_scene, BL::SpaceView3D b_v3d, BL::RegionView3D b_rv3d, int width, int height) { bool is_camera_view; /* camera view? */ is_camera_view = b_rv3d.view_perspective() == BL::RegionView3D::view_perspective_CAMERA; if(!is_camera_view) { /* for non-camera view check whether render border is enabled for viewport * and if so use border from 3d viewport * assume viewport has got correctly clamped border already */ if(b_v3d.use_render_border()) { bcam->border.left = b_v3d.render_border_min_x(); bcam->border.right = b_v3d.render_border_max_x(); bcam->border.bottom = b_v3d.render_border_min_y(); bcam->border.top = b_v3d.render_border_max_y(); } return; } BL::Object b_ob = (b_v3d.lock_camera_and_layers())? b_scene.camera(): b_v3d.camera(); if(!b_ob) return; /* Determine camera border inside the viewport. */ BoundBox2D full_border; blender_camera_border_subset(b_engine, b_render, b_scene, b_v3d, b_rv3d, b_ob, width, height, full_border, &bcam->viewport_camera_border); if(!b_render.use_border()) { return; } bcam->border.left = b_render.border_min_x(); bcam->border.right = b_render.border_max_x(); bcam->border.bottom = b_render.border_min_y(); bcam->border.top = b_render.border_max_y(); /* Determine viewport subset matching camera border. */ blender_camera_border_subset(b_engine, b_render, b_scene, b_v3d, b_rv3d, b_ob, width, height, bcam->border, &bcam->border); bcam->border.clamp(); }
static void blender_camera_init(BlenderCamera *bcam, BL::RenderSettings& b_render) { memset(bcam, 0, sizeof(BlenderCamera)); bcam->type = CAMERA_PERSPECTIVE; bcam->zoom = 1.0f; bcam->pixelaspect = make_float2(1.0f, 1.0f); bcam->sensor_width = 32.0f; bcam->sensor_height = 18.0f; bcam->sensor_fit = BlenderCamera::AUTO; bcam->shuttertime = 1.0f; bcam->motion_position = Camera::MOTION_POSITION_CENTER; bcam->rolling_shutter_type = Camera::ROLLING_SHUTTER_NONE; bcam->rolling_shutter_duration = 0.1f; bcam->border.right = 1.0f; bcam->border.top = 1.0f; bcam->pano_viewplane.right = 1.0f; bcam->pano_viewplane.top = 1.0f; bcam->viewport_camera_border.right = 1.0f; bcam->viewport_camera_border.top = 1.0f; /* render resolution */ bcam->full_width = render_resolution_x(b_render); bcam->full_height = render_resolution_y(b_render); /* pixel aspect */ bcam->pixelaspect.x = b_render.pixel_aspect_x(); bcam->pixelaspect.y = b_render.pixel_aspect_y(); }
void BlenderSync::sync_camera(BL::Object b_override, int width, int height) { BlenderCamera bcam; blender_camera_init(&bcam, b_scene); /* pixel aspect */ BL::RenderSettings r = b_scene.render(); bcam.pixelaspect.x = r.pixel_aspect_x(); bcam.pixelaspect.y = r.pixel_aspect_y(); bcam.shuttertime = r.motion_blur_shutter(); /* border */ if(r.use_border()) { bcam.border.left = r.border_min_x(); bcam.border.right = r.border_max_x(); bcam.border.bottom = r.border_min_y(); bcam.border.top = r.border_max_y(); } /* camera object */ BL::Object b_ob = b_scene.camera(); if(b_override) b_ob = b_override; if(b_ob) { blender_camera_from_object(&bcam, b_ob); bcam.matrix = get_transform(b_ob.matrix_world()); } /* sync */ Camera *cam = scene->camera; blender_camera_sync(cam, &bcam, width, height); }
void BlenderSync::sync_camera(BL::RenderSettings b_render, BL::Object b_override, int width, int height) { BlenderCamera bcam; blender_camera_init(&bcam, b_render); /* pixel aspect */ bcam.pixelaspect.x = b_render.pixel_aspect_x(); bcam.pixelaspect.y = b_render.pixel_aspect_y(); bcam.shuttertime = b_render.motion_blur_shutter(); /* border */ if(b_render.use_border()) { bcam.border.left = b_render.border_min_x(); bcam.border.right = b_render.border_max_x(); bcam.border.bottom = b_render.border_min_y(); bcam.border.top = b_render.border_max_y(); } /* camera object */ BL::Object b_ob = b_scene.camera(); if(b_override) b_ob = b_override; if(b_ob) { BL::Array<float, 16> b_ob_matrix; blender_camera_from_object(&bcam, b_engine, b_ob); b_engine.camera_model_matrix(b_ob, b_ob_matrix); bcam.matrix = get_transform(b_ob_matrix); } /* sync */ Camera *cam = scene->camera; blender_camera_sync(cam, &bcam, width, height); }
void BlenderSync::sync_camera(BL::RenderSettings& b_render, BL::Object& b_override, int width, int height, const char *viewname) { BlenderCamera bcam; blender_camera_init(&bcam, b_render); /* pixel aspect */ bcam.pixelaspect.x = b_render.pixel_aspect_x(); bcam.pixelaspect.y = b_render.pixel_aspect_y(); bcam.shuttertime = b_render.motion_blur_shutter(); BL::CurveMapping b_shutter_curve(b_render.motion_blur_shutter_curve()); curvemapping_to_array(b_shutter_curve, bcam.shutter_curve, RAMP_TABLE_SIZE); PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles"); bcam.motion_position = (Camera::MotionPosition)get_enum(cscene, "motion_blur_position", Camera::MOTION_NUM_POSITIONS, Camera::MOTION_POSITION_CENTER); bcam.rolling_shutter_type = (Camera::RollingShutterType)get_enum(cscene, "rolling_shutter_type", Camera::ROLLING_SHUTTER_NUM_TYPES, Camera::ROLLING_SHUTTER_NONE); bcam.rolling_shutter_duration = RNA_float_get(&cscene, "rolling_shutter_duration"); /* border */ if(b_render.use_border()) { bcam.border.left = b_render.border_min_x(); bcam.border.right = b_render.border_max_x(); bcam.border.bottom = b_render.border_min_y(); bcam.border.top = b_render.border_max_y(); } /* camera object */ BL::Object b_ob = b_scene.camera(); if(b_override) b_ob = b_override; if(b_ob) { BL::Array<float, 16> b_ob_matrix; blender_camera_from_object(&bcam, b_engine, b_ob); b_engine.camera_model_matrix(b_ob, bcam.use_spherical_stereo, b_ob_matrix); bcam.matrix = get_transform(b_ob_matrix); } /* sync */ Camera *cam = scene->camera; blender_camera_sync(cam, &bcam, width, height, viewname); }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Create the Blender session and all Octane session data structures ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSession::create_session() { SessionParams session_params = BlenderSync::get_session_params(b_engine, b_userpref, b_scene, interactive); session_params.width = width; session_params.height = height; BL::RenderSettings r = b_scene.render(); motion_blur = r.use_motion_blur(); shuttertime = r.motion_blur_shutter(); mb_samples = r.motion_blur_samples(); mb_cur_sample = 0; mb_sample_in_work = 0; PointerRNA oct_scene = RNA_pointer_get(&b_scene.ptr, "octane"); mb_type = static_cast<MotionBlurType>(RNA_enum_get(&oct_scene, "mb_type")); mb_direction = static_cast<MotionBlurDirection>(RNA_enum_get(&oct_scene, "mb_direction")); mb_frame_time_sampling = motion_blur && mb_type == INTERNAL ? 1.0f / session_params.fps : -1.0f; // Reset status/progress last_status = ""; last_progress = -1.0f; // Create session string cur_path = blender_absolute_path(b_data, b_scene, b_scene.render().filepath().c_str()); cur_path += "/alembic_export.abc"; session = new Session(session_params, cur_path.c_str()); session->set_blender_session(this); session->set_pause(BlenderSync::get_session_pause_state(b_scene, interactive)); // Create scene scene = new Scene(session, interactive || !b_engine.is_animation() ? true : (b_scene.frame_current() == b_scene.frame_start())); session->scene = scene; scene->server = session->server; // Create sync sync = new BlenderSync(b_engine, b_data, b_scene, scene, interactive, session->progress); if(b_rv3d) sync->sync_view(b_v3d, b_rv3d, width, height); else sync->sync_camera(b_engine.camera_override(), width, height); // Set buffer parameters BufferParams buffer_params = BlenderSync::get_display_buffer_params(scene->camera, width, height); if(interactive || !b_engine.is_animation() || b_scene.frame_current() == b_scene.frame_start()) session->reset(buffer_params, mb_frame_time_sampling); } //create_session()
BufferParams BlenderSync::get_buffer_params(BL::RenderSettings b_render, BL::SpaceView3D b_v3d, BL::RegionView3D b_rv3d, Camera *cam, int width, int height) { BufferParams params; bool use_border = false; params.full_width = width; params.full_height = height; if(b_v3d && b_rv3d && b_rv3d.view_perspective() != BL::RegionView3D::view_perspective_CAMERA) use_border = b_v3d.use_render_border(); else use_border = b_render.use_border(); if(use_border) { /* border render */ /* the viewport may offset the border outside the view */ BoundBox2D border = cam->border.clamp(); params.full_x = (int)(border.left * (float)width); params.full_y = (int)(border.bottom * (float)height); params.width = (int)(border.right * (float)width) - params.full_x; params.height = (int)(border.top * (float)height) - params.full_y; /* survive in case border goes out of view or becomes too small */ params.width = max(params.width, 1); params.height = max(params.height, 1); } else { params.width = width; params.height = height; } return params; }
static void blender_camera_border(BlenderCamera *bcam, BL::RenderSettings b_render, BL::Scene b_scene, BL::SpaceView3D b_v3d, BL::RegionView3D b_rv3d, int width, int height) { bool is_camera_view; /* camera view? */ is_camera_view = b_rv3d.view_perspective() == BL::RegionView3D::view_perspective_CAMERA; if(!is_camera_view) { /* for non-camera view check whether render border is enabled for viewport * and if so use border from 3d viewport * assume viewport has got correctly clamped border already */ if(b_v3d.use_render_border()) { bcam->border.left = b_v3d.render_border_min_x(); bcam->border.right = b_v3d.render_border_max_x(); bcam->border.bottom = b_v3d.render_border_min_y(); bcam->border.top = b_v3d.render_border_max_y(); return; } } else if(!b_render.use_border()) return; BL::Object b_ob = (b_v3d.lock_camera_and_layers())? b_scene.camera(): b_v3d.camera(); if(!b_ob) return; bcam->border.left = b_render.border_min_x(); bcam->border.right = b_render.border_max_x(); bcam->border.bottom = b_render.border_min_y(); bcam->border.top = b_render.border_max_y(); /* determine camera viewport subset */ BoundBox2D view_box, cam_box; blender_camera_view_subset(b_render, b_scene, b_ob, b_v3d, b_rv3d, width, height, &view_box, &cam_box); /* determine viewport subset matching camera border */ cam_box = cam_box.make_relative_to(view_box); bcam->border = cam_box.subset(bcam->border).clamp(); }
CCL_NAMESPACE_BEGIN BlenderSession::BlenderSession(BL::RenderEngine b_engine_, BL::UserPreferences b_userpref_, BL::BlendData b_data_, BL::Scene b_scene_) : b_engine(b_engine_), b_userpref(b_userpref_), b_data(b_data_), b_scene(b_scene_), b_v3d(PointerRNA_NULL), b_rv3d(PointerRNA_NULL), b_rr(PointerRNA_NULL), b_rlay(PointerRNA_NULL) { /* offline render */ BL::RenderSettings r = b_scene.render(); width = (int)(r.resolution_x()*r.resolution_percentage()/100); height = (int)(r.resolution_y()*r.resolution_percentage()/100); background = true; last_redraw_time = 0.0f; create_session(); }
static void blender_camera_init(BlenderCamera *bcam, BL::Scene b_scene) { memset(bcam, 0, sizeof(BlenderCamera)); bcam->type = CAMERA_PERSPECTIVE; bcam->zoom = 1.0f; bcam->pixelaspect = make_float2(1.0f, 1.0f); bcam->sensor_width = 32.0f; bcam->sensor_height = 18.0f; bcam->sensor_fit = BlenderCamera::AUTO; bcam->shuttertime = 1.0f; bcam->border.right = 1.0f; bcam->border.top = 1.0f; bcam->pano_viewplane.right = 1.0f; bcam->pano_viewplane.top = 1.0f; /* render resolution */ BL::RenderSettings r = b_scene.render(); bcam->full_width = (int)(r.resolution_x()*r.resolution_percentage()/100); bcam->full_height = (int)(r.resolution_y()*r.resolution_percentage()/100); }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Sync rendered Camera from blender scene to Octane data ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSync::sync_camera(BL::Object b_override, int width, int height) { Camera* cam = scene->camera; Camera prevcam = *cam; BL::RenderSettings r = b_scene.render(); cam->pixelaspect.x = r.pixel_aspect_x(); cam->pixelaspect.y = r.pixel_aspect_y(); cam->use_border = r.use_border(); if(cam->use_border) { /* border render */ cam->border.x = (uint32_t)(r.border_min_x() * (float)width); cam->border.y = (uint32_t)((1.0f - r.border_max_y()) * (float)height); cam->border.z = (uint32_t)(r.border_max_x() * (float)width); cam->border.w = (uint32_t)((1.0f - r.border_min_y()) * (float)height); } else { cam->border.x = 0; cam->border.y = 0; cam->border.z = 0; cam->border.w = 0; } if(cam->modified(prevcam)) cam->tag_update(); BL::Object b_ob = b_scene.camera(); if(b_override) b_ob = b_override; if(b_ob) { cam->matrix = scene->matrix * get_transform(b_ob.matrix_world()); float2 offset = {0}; cam->zoom = 1.0f; load_camera_from_object(cam, b_ob, width, height, offset); cam->is_hidden = (scene->use_viewport_hide ? b_ob.hide() : b_ob.hide_render()); } else cam->is_hidden = true; if(cam->modified(prevcam)) cam->tag_update(); } //sync_camera()
void BlenderSync::sync_camera(BL::RenderSettings& b_render, BL::Object& b_override, int width, int height) { BlenderCamera bcam; blender_camera_init(&bcam, b_render); /* pixel aspect */ bcam.pixelaspect.x = b_render.pixel_aspect_x(); bcam.pixelaspect.y = b_render.pixel_aspect_y(); bcam.shuttertime = b_render.motion_blur_shutter(); BL::CurveMapping b_shutter_curve(b_render.motion_blur_shutter_curve()); curvemapping_to_array(b_shutter_curve, bcam.shutter_curve, RAMP_TABLE_SIZE); PointerRNA cscene = RNA_pointer_get(&b_scene.ptr, "cycles"); switch(RNA_enum_get(&cscene, "motion_blur_position")) { case 0: bcam.motion_position = Camera::MOTION_POSITION_START; break; case 1: bcam.motion_position = Camera::MOTION_POSITION_CENTER; break; case 2: bcam.motion_position = Camera::MOTION_POSITION_END; break; default: bcam.motion_position = Camera::MOTION_POSITION_CENTER; break; } switch(RNA_enum_get(&cscene, "rolling_shutter_type")) { case 0: bcam.rolling_shutter_type = Camera::ROLLING_SHUTTER_NONE; break; case 1: bcam.rolling_shutter_type = Camera::ROLLING_SHUTTER_TOP; break; default: bcam.rolling_shutter_type = Camera::ROLLING_SHUTTER_NONE; break; } bcam.rolling_shutter_duration = RNA_float_get(&cscene, "rolling_shutter_duration"); /* border */ if(b_render.use_border()) { bcam.border.left = b_render.border_min_x(); bcam.border.right = b_render.border_max_x(); bcam.border.bottom = b_render.border_min_y(); bcam.border.top = b_render.border_max_y(); } /* camera object */ BL::Object b_ob = b_scene.camera(); if(b_override) b_ob = b_override; if(b_ob) { BL::Array<float, 16> b_ob_matrix; blender_camera_from_object(&bcam, b_engine, b_ob); b_engine.camera_model_matrix(b_ob, b_ob_matrix); bcam.matrix = get_transform(b_ob_matrix); } /* sync */ Camera *cam = scene->camera; blender_camera_sync(cam, &bcam, width, height); }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Sync rendered View from blender scene to Octane camera data ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSync::get_camera_border(Camera *cam, BL::SpaceView3D b_v3d, BL::RegionView3D b_rv3d, int width, int height) { BL::RenderSettings r = b_scene.render(); /* camera view? */ if(b_rv3d.view_perspective() != BL::RegionView3D::view_perspective_CAMERA) { /* for non-camera view check whether render border is enabled for viewport * and if so use border from 3d viewport assume viewport has got correctly clamped border already */ cam->use_border = b_v3d.use_render_border(); if(cam->use_border) { cam->border.x = (uint32_t)(b_v3d.render_border_min_x() * (float)width); cam->border.y = (uint32_t)((1.0f - b_v3d.render_border_max_y()) * (float)height); cam->border.z = (uint32_t)(b_v3d.render_border_max_x() * (float)width); cam->border.w = (uint32_t)((1.0f - b_v3d.render_border_min_y()) * (float)height); return; } } else { cam->use_border = r.use_border(); if(!cam->use_border) return; BL::Object b_ob = (b_v3d.lock_camera_and_layers()) ? b_scene.camera() : b_v3d.camera(); if(!b_ob) return; float aspectratio, xaspect, yaspect; bool horizontal_fit; // Get View plane float xratio = (float)width * cam->pixelaspect.x; float yratio = (float)height * cam->pixelaspect.y; if(cam->sensor_fit == Camera::AUTO) horizontal_fit = (xratio > yratio); else if(cam->sensor_fit == Camera::HORIZONTAL) horizontal_fit = true; else horizontal_fit = false; if(horizontal_fit) { aspectratio = xratio / yratio; xaspect = aspectratio; yaspect = 1.0f; } else { aspectratio = yratio / xratio; xaspect = 1.0f; yaspect = aspectratio; } BoundBox2D view_box(-xaspect, xaspect, -yaspect, yaspect); view_box = view_box * cam->zoom; //float view_dx = 2.0f * (aspectratio * cam->lens_shift_x + cam->offset_x * xaspect * 2.0f); //float view_dy = 2.0f * (aspectratio * cam->lens_shift_y + cam->offset_y * yaspect * 2.0f); //view_box.left += view_dx; //view_box.right += view_dx; //view_box.bottom += view_dy; //view_box.top += view_dy; view_box = view_box / aspectratio; // Get camera plane BL::ID b_ob_data = b_ob.data(); BL::Camera b_camera(b_ob_data); xratio = (float)r.resolution_x() * r.resolution_percentage() / 100; yratio = (float)r.resolution_y() * r.resolution_percentage() / 100; if(b_camera.sensor_fit() == BL::Camera::sensor_fit_AUTO) horizontal_fit = (xratio > yratio); else if(b_camera.sensor_fit() == BL::Camera::sensor_fit_HORIZONTAL) horizontal_fit = true; else horizontal_fit = false; if(horizontal_fit) { aspectratio = xratio / yratio; xaspect = aspectratio; yaspect = 1.0f; } else { aspectratio = yratio / xratio; xaspect = 1.0f; yaspect = aspectratio; } BoundBox2D cam_box(-xaspect, xaspect, -yaspect, yaspect); //float cam_dx = 2.0f * aspectratio * b_camera.shift_x(); //float cam_dy = 2.0f * aspectratio * b_camera.shift_y(); //cam_box.left += cam_dx; //cam_box.right += cam_dx; //cam_box.bottom += cam_dy; //cam_box.top += cam_dy; cam_box = cam_box / aspectratio; // Get render region cam_box = cam_box.make_relative_to(view_box); BoundBox2D orig_border(r.border_min_x(), r.border_max_x(), r.border_min_y(), r.border_max_y()); BoundBox2D border = cam_box.subset(orig_border).clamp(); cam->border.x = (uint32_t)(border.left * (float)width); cam->border.y = (uint32_t)((1.0f - border.top) * (float)height); cam->border.z = (uint32_t)(border.right * (float)width); cam->border.w = (uint32_t)((1.0f - border.bottom) * (float)height); } } //get_camera_border()
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Get additional Octane scene settings. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSync::sync_kernel() { PointerRNA oct_scene = RNA_pointer_get(&b_scene.ptr, "octane"); Kernel *kernel = scene->kernel; Kernel prevkernel = *kernel; BL::RenderSettings r = b_scene.render(); if(r.use_motion_blur()) { float fps = (float)b_scene.render().fps() / b_scene.render().fps_base(); float shuttertime = r.motion_blur_shutter(); BlenderSession::MotionBlurType mb_type = static_cast<BlenderSession::MotionBlurType>(RNA_enum_get(&oct_scene, "mb_type")); float mb_frame_time_sampling = mb_type == BlenderSession::INTERNAL ? 1.0f / fps : 0.0f; kernel->oct_node->fShutterTime = mb_frame_time_sampling != 0.0f ? shuttertime / mb_frame_time_sampling : 0.0f; BlenderSession::MotionBlurDirection mb_direction = static_cast<BlenderSession::MotionBlurDirection>(RNA_enum_get(&oct_scene, "mb_direction")); switch(mb_direction) { case BlenderSession::BEFORE: kernel->oct_node->mbAlignment = ::OctaneEngine::Kernel::BEFORE; break; case BlenderSession::AFTER: kernel->oct_node->mbAlignment = ::OctaneEngine::Kernel::AFTER; break; case BlenderSession::SYMMETRIC: kernel->oct_node->mbAlignment = ::OctaneEngine::Kernel::SYMMETRIC; break; default: break; } } else kernel->oct_node->fShutterTime = 0.0f; kernel->oct_node->type = static_cast< ::OctaneEngine::Kernel::KernelType>(RNA_enum_get(&oct_scene, "kernel_type")); kernel->oct_node->infoChannelType = channel_translator[RNA_enum_get(&oct_scene, "info_channel_type")]; ::Octane::RenderPassId cur_pass_type = Passes::pass_type_translator[RNA_enum_get(&oct_scene, "cur_pass_type")]; if(cur_pass_type == ::Octane::RenderPassId::RENDER_PASS_BEAUTY) { kernel->oct_node->iMaxSamples = interactive ? get_int(oct_scene, "max_preview_samples") : get_int(oct_scene, "max_samples"); //kernel->oct_node->iMaxPreviewSamples = get_int(oct_scene, "max_preview_samples"); //if(kernel->oct_node->iMaxPreviewSamples == 0) kernel->oct_node->iMaxPreviewSamples = 16000; } else if(cur_pass_type == ::Octane::RenderPassId::RENDER_PASS_AMBIENT_OCCLUSION) { kernel->oct_node->iMaxSamples = get_int(oct_scene, "pass_ao_max_samples"); //kernel->oct_node->iMaxPreviewSamples = kernel->oct_node->iMaxSamples; } else { kernel->oct_node->iMaxSamples = get_int(oct_scene, "pass_max_samples"); //kernel->oct_node->iMaxPreviewSamples = kernel->oct_node->iMaxSamples; } if(scene->session->b_session && scene->session->b_session->motion_blur && scene->session->b_session->mb_type == BlenderSession::SUBFRAME && scene->session->b_session->mb_samples > 1) kernel->oct_node->iMaxSamples = kernel->oct_node->iMaxSamples / scene->session->b_session->mb_samples; if(kernel->oct_node->iMaxSamples < 1) kernel->oct_node->iMaxSamples = 1; kernel->oct_node->fFilterSize = get_float(oct_scene, "filter_size"); kernel->oct_node->fRayEpsilon = get_float(oct_scene, "ray_epsilon"); kernel->oct_node->bAlphaChannel = get_boolean(oct_scene, "alpha_channel"); kernel->oct_node->bAlphaShadows = get_boolean(oct_scene, "alpha_shadows"); kernel->oct_node->bBumpNormalMapping = get_boolean(oct_scene, "bump_normal_mapping"); kernel->oct_node->bBkFaceHighlight = get_boolean(oct_scene, "wf_bkface_hl"); kernel->oct_node->fPathTermPower = get_float(oct_scene, "path_term_power"); kernel->oct_node->bKeepEnvironment = get_boolean(oct_scene, "keep_environment"); kernel->oct_node->fCausticBlur = get_float(oct_scene, "caustic_blur"); kernel->oct_node->iMaxDiffuseDepth = get_int(oct_scene, "max_diffuse_depth"); kernel->oct_node->iMaxGlossyDepth = get_int(oct_scene, "max_glossy_depth"); kernel->oct_node->fCoherentRatio = get_float(oct_scene, "coherent_ratio"); kernel->oct_node->bStaticNoise = get_boolean(oct_scene, "static_noise"); kernel->oct_node->iSpecularDepth = get_int(oct_scene, "specular_depth"); kernel->oct_node->iGlossyDepth = get_int(oct_scene, "glossy_depth"); kernel->oct_node->fAODist = get_float(oct_scene, "ao_dist"); kernel->oct_node->GIMode = static_cast< ::OctaneEngine::Kernel::DirectLightMode>(RNA_enum_get(&oct_scene, "gi_mode")); kernel->oct_node->iDiffuseDepth = get_int(oct_scene, "diffuse_depth"); kernel->oct_node->sAoTexture = get_string(oct_scene, "ao_texture"); kernel->oct_node->fExploration = get_float(oct_scene, "exploration"); kernel->oct_node->fGIClamp = get_float(oct_scene, "gi_clamp"); kernel->oct_node->fDLImportance = get_float(oct_scene, "direct_light_importance"); kernel->oct_node->iMaxRejects = get_int(oct_scene, "max_rejects"); kernel->oct_node->iParallelism = get_int(oct_scene, "parallelism"); kernel->oct_node->fZdepthMax = get_float(oct_scene, "zdepth_max"); kernel->oct_node->fUVMax = get_float(oct_scene, "uv_max"); kernel->oct_node->iSamplingMode = RNA_enum_get(&oct_scene, "sampling_mode"); kernel->oct_node->fMaxSpeed = get_float(oct_scene, "max_speed"); kernel->oct_node->bLayersEnable = get_boolean(oct_scene, "layers_enable"); kernel->oct_node->iLayersCurrent = get_int(oct_scene, "layers_current"); kernel->oct_node->bLayersInvert = get_boolean(oct_scene, "layers_invert"); kernel->oct_node->layersMode = static_cast< ::OctaneEngine::Kernel::LayersMode>(RNA_enum_get(&oct_scene, "layers_mode")); kernel->oct_node->iParallelSamples = get_int(oct_scene, "parallel_samples"); kernel->oct_node->iMaxTileSamples = get_int(oct_scene, "max_tile_samples"); kernel->oct_node->bMinimizeNetTraffic = get_boolean(oct_scene, "minimize_net_traffic"); kernel->oct_node->bDeepImageEnable = get_boolean(oct_scene, "deep_image"); kernel->oct_node->iMaxDepthSamples = get_int(oct_scene, "max_depth_samples"); kernel->oct_node->fDepthTolerance = get_float(oct_scene, "depth_tolerance"); kernel->oct_node->iWorkChunkSize = get_int(oct_scene, "work_chunk_size"); kernel->oct_node->bAoAlphaShadows = get_boolean(oct_scene, "ao_alpha_shadows"); kernel->oct_node->fOpacityThreshold = get_float(oct_scene, "opacity_threshold"); if(kernel->modified(prevkernel)) kernel->tag_update(); // GPUs int iValues[8] = {0, 0, 0, 0, 0, 0, 0, 0}; RNA_boolean_get_array(&oct_scene, "devices", iValues); kernel->uiGPUs = 0; for(int i = 0; i < 8; ++i) if(iValues[i]) kernel->uiGPUs |= 0x01 << i; if(kernel->uiGPUs != prevkernel.uiGPUs) kernel->tag_updateGPUs(); } //sync_kernel()
void BlenderSession::render() { /* get buffer parameters */ SessionParams session_params = BlenderSync::get_session_params(b_userpref, b_scene, background); BufferParams buffer_params = BlenderSync::get_buffer_params(b_scene, b_rv3d, width, height); int w = buffer_params.width, h = buffer_params.height; /* create render result */ RenderResult *rrp = RE_engine_begin_result((RenderEngine*)b_engine.ptr.data, 0, 0, w, h); PointerRNA rrptr; RNA_pointer_create(NULL, &RNA_RenderResult, rrp, &rrptr); b_rr = BL::RenderResult(rrptr); BL::RenderSettings r = b_scene.render(); BL::RenderResult::layers_iterator b_iter; BL::RenderLayers b_rr_layers(r.ptr); int active = 0; /* render each layer */ for(b_rr.layers.begin(b_iter); b_iter != b_rr.layers.end(); ++b_iter, ++active) { /* single layer render */ if(r.use_single_layer()) active = b_rr_layers.active_index(); /* set layer */ b_rlay = *b_iter; /* add passes */ vector<Pass> passes; Pass::add(PASS_COMBINED, passes); if(session_params.device.advanced_shading) { BL::RenderLayer::passes_iterator b_pass_iter; for(b_rlay.passes.begin(b_pass_iter); b_pass_iter != b_rlay.passes.end(); ++b_pass_iter) { BL::RenderPass b_pass(*b_pass_iter); PassType pass_type = get_pass_type(b_pass); if(pass_type != PASS_NONE) Pass::add(pass_type, passes); } } buffer_params.passes = passes; scene->film->passes = passes; scene->film->tag_update(scene); /* update session */ session->reset(buffer_params, session_params.samples); /* update scene */ sync->sync_data(b_v3d, active); /* render */ session->start(); session->wait(); if(session->progress.get_cancel()) break; /* write result */ write_render_result(); } /* delete render result */ RE_engine_end_result((RenderEngine*)b_engine.ptr.data, (RenderResult*)b_rr.ptr.data); }