BlenderSession::BlenderSession(BL::RenderEngine b_engine_, BL::UserPreferences b_userpref_,
	BL::BlendData b_data_, BL::Scene b_scene_)
: b_engine(b_engine_), b_userpref(b_userpref_), b_data(b_data_), b_render(b_engine_.render()), b_scene(b_scene_),
  b_v3d(PointerRNA_NULL), b_rv3d(PointerRNA_NULL), python_thread_state(NULL)
{
	/* offline render */

	width = render_resolution_x(b_render);
	height = render_resolution_y(b_render);

	background = true;
	last_redraw_time = 0.0;
	start_resize_time = 0.0;
}
static float blender_camera_focal_distance(BL::RenderEngine b_engine, BL::Object b_ob, BL::Camera b_camera)
{
	BL::Object b_dof_object = b_camera.dof_object();

	if(!b_dof_object)
		return b_camera.dof_distance();
	
	/* for dof object, return distance along camera Z direction */
	BL::Array<float, 16> b_ob_matrix;
	b_engine.camera_model_matrix(b_ob, b_ob_matrix);
	Transform obmat = get_transform(b_ob_matrix);
	Transform dofmat = get_transform(b_dof_object.matrix_world());
	Transform mat = transform_inverse(obmat) * dofmat;

	return fabsf(transform_get_column(&mat, 3).z);
}
Exemple #3
0
static float blender_camera_focal_distance(BL::RenderEngine b_engine, BL::Object b_ob, BL::Camera b_camera)
{
	BL::Object b_dof_object = b_camera.dof_object();

	if(!b_dof_object)
		return b_camera.dof_distance();
	
	/* for dof object, return distance along camera Z direction */
	BL::Array<float, 16> b_ob_matrix;
	b_engine.camera_model_matrix(b_ob, b_ob_matrix);
	Transform obmat = get_transform(b_ob_matrix);
	Transform dofmat = get_transform(b_dof_object.matrix_world());
	float3 view_dir = normalize(transform_get_column(&obmat, 2));
	float3 dof_dir = transform_get_column(&obmat, 3) - transform_get_column(&dofmat, 3);
	return fabsf(dot(view_dir, dof_dir));
}
static void blender_camera_from_object(BlenderCamera *bcam,
                                       BL::RenderEngine& b_engine,
                                       BL::Object& b_ob,
                                       bool skip_panorama = false)
{
	BL::ID b_ob_data = b_ob.data();

	if(b_ob_data.is_a(&RNA_Camera)) {
		BL::Camera b_camera(b_ob_data);
		PointerRNA ccamera = RNA_pointer_get(&b_camera.ptr, "cycles");

		bcam->nearclip = b_camera.clip_start();
		bcam->farclip = b_camera.clip_end();

		switch(b_camera.type())
		{
			case BL::Camera::type_ORTHO:
				bcam->type = CAMERA_ORTHOGRAPHIC;
				break;
			case BL::Camera::type_PANO:
				if(!skip_panorama)
					bcam->type = CAMERA_PANORAMA;
				else
					bcam->type = CAMERA_PERSPECTIVE;
				break;
			case BL::Camera::type_PERSP:
			default:
				bcam->type = CAMERA_PERSPECTIVE;
				break;
		}	

		switch(RNA_enum_get(&ccamera, "panorama_type"))
		{
			case 1:
				bcam->panorama_type = PANORAMA_FISHEYE_EQUIDISTANT;
				break;
			case 2:
				bcam->panorama_type = PANORAMA_FISHEYE_EQUISOLID;
				break;
			case 3:
				bcam->panorama_type = PANORAMA_MIRRORBALL;
				break;
			case 0:
			default:
				bcam->panorama_type = PANORAMA_EQUIRECTANGULAR;
				break;
		}	

		bcam->fisheye_fov = RNA_float_get(&ccamera, "fisheye_fov");
		bcam->fisheye_lens = RNA_float_get(&ccamera, "fisheye_lens");
		bcam->latitude_min = RNA_float_get(&ccamera, "latitude_min");
		bcam->latitude_max = RNA_float_get(&ccamera, "latitude_max");
		bcam->longitude_min = RNA_float_get(&ccamera, "longitude_min");
		bcam->longitude_max = RNA_float_get(&ccamera, "longitude_max");

		bcam->ortho_scale = b_camera.ortho_scale();

		bcam->lens = b_camera.lens();

		/* allow f/stop number to change aperture_size but still
		 * give manual control over aperture radius */
		int aperture_type = RNA_enum_get(&ccamera, "aperture_type");

		if(aperture_type == 1) {
			float fstop = RNA_float_get(&ccamera, "aperture_fstop");
			fstop = max(fstop, 1e-5f);

			if(bcam->type == CAMERA_ORTHOGRAPHIC)
				bcam->aperturesize = 1.0f/(2.0f*fstop);
			else
				bcam->aperturesize = (bcam->lens*1e-3f)/(2.0f*fstop);
		}
		else
			bcam->aperturesize = RNA_float_get(&ccamera, "aperture_size");

		bcam->apertureblades = RNA_int_get(&ccamera, "aperture_blades");
		bcam->aperturerotation = RNA_float_get(&ccamera, "aperture_rotation");
		bcam->focaldistance = blender_camera_focal_distance(b_engine, b_ob, b_camera);
		bcam->aperture_ratio = RNA_float_get(&ccamera, "aperture_ratio");

		bcam->shift.x = b_engine.camera_shift_x(b_ob);
		bcam->shift.y = b_camera.shift_y();

		bcam->sensor_width = b_camera.sensor_width();
		bcam->sensor_height = b_camera.sensor_height();

		if(b_camera.sensor_fit() == BL::Camera::sensor_fit_AUTO)
			bcam->sensor_fit = BlenderCamera::AUTO;
		else if(b_camera.sensor_fit() == BL::Camera::sensor_fit_HORIZONTAL)
			bcam->sensor_fit = BlenderCamera::HORIZONTAL;
		else
			bcam->sensor_fit = BlenderCamera::VERTICAL;
	}
	else {
		/* from lamp not implemented yet */
	}
}
static void end_render_result(BL::RenderEngine b_engine, BL::RenderResult b_rr, bool cancel, bool do_merge_results)
{
	b_engine.end_result(b_rr, (int)cancel, (int)do_merge_results);
}
static BL::RenderResult begin_render_result(BL::RenderEngine b_engine, int x, int y, int w, int h, const char *layername, const char *viewname)
{
	return b_engine.begin_result(x, y, w, h, layername, viewname);
}
Exemple #7
0
static void end_render_result(BL::RenderEngine b_engine, BL::RenderResult b_rr, bool cancel = false)
{
	b_engine.end_result(b_rr, (int)cancel);
}