////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Fill the Octane Camera properties from Blender data ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSync::load_camera_from_object(Camera* cam, BL::Object b_ob, int width, int height, float2& offset, bool skip_panorama) { BL::ID b_ob_data = b_ob.data(); if(b_ob_data.is_a(&RNA_Camera)) { BL::Camera b_camera(b_ob_data); PointerRNA oct_camera = RNA_pointer_get(&b_camera.ptr, "octane"); switch(b_camera.type()) { case BL::Camera::type_ORTHO: cam->type = CAMERA_PERSPECTIVE; cam->ortho = true; break; case BL::Camera::type_PANO: if(!skip_panorama) cam->type = CAMERA_PANORAMA; else cam->type = CAMERA_PERSPECTIVE; cam->ortho = false; break; case BL::Camera::type_PERSP: default: cam->type = CAMERA_PERSPECTIVE; cam->ortho = false; break; } cam->near_clip_depth = b_camera.clip_start(); cam->far_clip_depth = b_camera.clip_end(); cam->set_focal_depth(b_ob, b_camera); get_cam_settings(cam, oct_camera); cam->lens_shift_x = b_camera.shift_x() / cam->zoom; cam->lens_shift_y = b_camera.shift_y() / cam->zoom; cam->sensorwidth = b_camera.sensor_width(); cam->sensorheight = b_camera.sensor_height(); cam->offset_x = offset.x * 2.0f / cam->zoom; cam->offset_y = offset.y * 2.0f / cam->zoom; if(b_camera.sensor_fit() == BL::Camera::sensor_fit_AUTO) cam->sensor_fit = Camera::AUTO; else if(b_camera.sensor_fit() == BL::Camera::sensor_fit_HORIZONTAL) cam->sensor_fit = Camera::HORIZONTAL; else cam->sensor_fit = Camera::VERTICAL; if(cam->ortho) { float ortho_scale; get_camera_ortho_scale(cam, b_camera, width, height, &ortho_scale); cam->fov = ortho_scale * cam->zoom; } else { float sensor_size; get_camera_sensor_size(cam, width, height, &sensor_size); cam->fov = 2.0f * atanf((0.5f * sensor_size * cam->zoom) / b_camera.lens()) *180.0f / M_PI_F; } // Position cam->eye_point.x = cam->matrix.x.w; cam->eye_point.y = cam->matrix.y.w; cam->eye_point.z = cam->matrix.z.w; float3 dir = transform_direction(&cam->matrix, make_float3(0.0f, 0.0f, -1.0f)); cam->look_at.x = cam->eye_point.x + dir.x; cam->look_at.y = cam->eye_point.y + dir.y; cam->look_at.z = cam->eye_point.z + dir.z; cam->up = normalize(transform_direction(&cam->matrix, make_float3(0.0f, 1.0f, 0.0f))); } else { //TODO: Implement it for Lamp } } //camera_from_object()
static void blender_camera_from_object(BlenderCamera *bcam, BL::RenderEngine& b_engine, BL::Object& b_ob, bool skip_panorama = false) { BL::ID b_ob_data = b_ob.data(); if(b_ob_data.is_a(&RNA_Camera)) { BL::Camera b_camera(b_ob_data); PointerRNA ccamera = RNA_pointer_get(&b_camera.ptr, "cycles"); bcam->nearclip = b_camera.clip_start(); bcam->farclip = b_camera.clip_end(); switch(b_camera.type()) { case BL::Camera::type_ORTHO: bcam->type = CAMERA_ORTHOGRAPHIC; break; case BL::Camera::type_PANO: if(!skip_panorama) bcam->type = CAMERA_PANORAMA; else bcam->type = CAMERA_PERSPECTIVE; break; case BL::Camera::type_PERSP: default: bcam->type = CAMERA_PERSPECTIVE; break; } switch(RNA_enum_get(&ccamera, "panorama_type")) { case 1: bcam->panorama_type = PANORAMA_FISHEYE_EQUIDISTANT; break; case 2: bcam->panorama_type = PANORAMA_FISHEYE_EQUISOLID; break; case 3: bcam->panorama_type = PANORAMA_MIRRORBALL; break; case 0: default: bcam->panorama_type = PANORAMA_EQUIRECTANGULAR; break; } bcam->fisheye_fov = RNA_float_get(&ccamera, "fisheye_fov"); bcam->fisheye_lens = RNA_float_get(&ccamera, "fisheye_lens"); bcam->latitude_min = RNA_float_get(&ccamera, "latitude_min"); bcam->latitude_max = RNA_float_get(&ccamera, "latitude_max"); bcam->longitude_min = RNA_float_get(&ccamera, "longitude_min"); bcam->longitude_max = RNA_float_get(&ccamera, "longitude_max"); bcam->ortho_scale = b_camera.ortho_scale(); bcam->lens = b_camera.lens(); /* allow f/stop number to change aperture_size but still * give manual control over aperture radius */ int aperture_type = RNA_enum_get(&ccamera, "aperture_type"); if(aperture_type == 1) { float fstop = RNA_float_get(&ccamera, "aperture_fstop"); fstop = max(fstop, 1e-5f); if(bcam->type == CAMERA_ORTHOGRAPHIC) bcam->aperturesize = 1.0f/(2.0f*fstop); else bcam->aperturesize = (bcam->lens*1e-3f)/(2.0f*fstop); } else bcam->aperturesize = RNA_float_get(&ccamera, "aperture_size"); bcam->apertureblades = RNA_int_get(&ccamera, "aperture_blades"); bcam->aperturerotation = RNA_float_get(&ccamera, "aperture_rotation"); bcam->focaldistance = blender_camera_focal_distance(b_engine, b_ob, b_camera); bcam->aperture_ratio = RNA_float_get(&ccamera, "aperture_ratio"); bcam->shift.x = b_engine.camera_shift_x(b_ob); bcam->shift.y = b_camera.shift_y(); bcam->sensor_width = b_camera.sensor_width(); bcam->sensor_height = b_camera.sensor_height(); if(b_camera.sensor_fit() == BL::Camera::sensor_fit_AUTO) bcam->sensor_fit = BlenderCamera::AUTO; else if(b_camera.sensor_fit() == BL::Camera::sensor_fit_HORIZONTAL) bcam->sensor_fit = BlenderCamera::HORIZONTAL; else bcam->sensor_fit = BlenderCamera::VERTICAL; } else { /* from lamp not implemented yet */ } }
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Sync rendered View from blender scene to Octane camera data ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void BlenderSync::get_camera_border(Camera *cam, BL::SpaceView3D b_v3d, BL::RegionView3D b_rv3d, int width, int height) { BL::RenderSettings r = b_scene.render(); /* camera view? */ if(b_rv3d.view_perspective() != BL::RegionView3D::view_perspective_CAMERA) { /* for non-camera view check whether render border is enabled for viewport * and if so use border from 3d viewport assume viewport has got correctly clamped border already */ cam->use_border = b_v3d.use_render_border(); if(cam->use_border) { cam->border.x = (uint32_t)(b_v3d.render_border_min_x() * (float)width); cam->border.y = (uint32_t)((1.0f - b_v3d.render_border_max_y()) * (float)height); cam->border.z = (uint32_t)(b_v3d.render_border_max_x() * (float)width); cam->border.w = (uint32_t)((1.0f - b_v3d.render_border_min_y()) * (float)height); return; } } else { cam->use_border = r.use_border(); if(!cam->use_border) return; BL::Object b_ob = (b_v3d.lock_camera_and_layers()) ? b_scene.camera() : b_v3d.camera(); if(!b_ob) return; float aspectratio, xaspect, yaspect; bool horizontal_fit; // Get View plane float xratio = (float)width * cam->pixelaspect.x; float yratio = (float)height * cam->pixelaspect.y; if(cam->sensor_fit == Camera::AUTO) horizontal_fit = (xratio > yratio); else if(cam->sensor_fit == Camera::HORIZONTAL) horizontal_fit = true; else horizontal_fit = false; if(horizontal_fit) { aspectratio = xratio / yratio; xaspect = aspectratio; yaspect = 1.0f; } else { aspectratio = yratio / xratio; xaspect = 1.0f; yaspect = aspectratio; } BoundBox2D view_box(-xaspect, xaspect, -yaspect, yaspect); view_box = view_box * cam->zoom; //float view_dx = 2.0f * (aspectratio * cam->lens_shift_x + cam->offset_x * xaspect * 2.0f); //float view_dy = 2.0f * (aspectratio * cam->lens_shift_y + cam->offset_y * yaspect * 2.0f); //view_box.left += view_dx; //view_box.right += view_dx; //view_box.bottom += view_dy; //view_box.top += view_dy; view_box = view_box / aspectratio; // Get camera plane BL::ID b_ob_data = b_ob.data(); BL::Camera b_camera(b_ob_data); xratio = (float)r.resolution_x() * r.resolution_percentage() / 100; yratio = (float)r.resolution_y() * r.resolution_percentage() / 100; if(b_camera.sensor_fit() == BL::Camera::sensor_fit_AUTO) horizontal_fit = (xratio > yratio); else if(b_camera.sensor_fit() == BL::Camera::sensor_fit_HORIZONTAL) horizontal_fit = true; else horizontal_fit = false; if(horizontal_fit) { aspectratio = xratio / yratio; xaspect = aspectratio; yaspect = 1.0f; } else { aspectratio = yratio / xratio; xaspect = 1.0f; yaspect = aspectratio; } BoundBox2D cam_box(-xaspect, xaspect, -yaspect, yaspect); //float cam_dx = 2.0f * aspectratio * b_camera.shift_x(); //float cam_dy = 2.0f * aspectratio * b_camera.shift_y(); //cam_box.left += cam_dx; //cam_box.right += cam_dx; //cam_box.bottom += cam_dy; //cam_box.top += cam_dy; cam_box = cam_box / aspectratio; // Get render region cam_box = cam_box.make_relative_to(view_box); BoundBox2D orig_border(r.border_min_x(), r.border_max_x(), r.border_min_y(), r.border_max_y()); BoundBox2D border = cam_box.subset(orig_border).clamp(); cam->border.x = (uint32_t)(border.left * (float)width); cam->border.y = (uint32_t)((1.0f - border.top) * (float)height); cam->border.z = (uint32_t)(border.right * (float)width); cam->border.w = (uint32_t)((1.0f - border.bottom) * (float)height); } } //get_camera_border()