示例#1
0
文件: film.cpp 项目: mdtrooper/goxel
static vector<float> filter_table(FilterType type, float width)
{
	vector<float> filter_table(FILTER_TABLE_SIZE);
	float (*filter_func)(float, float) = NULL;

	switch(type) {
		case FILTER_BOX:
			filter_func = filter_func_box;
			break;
		case FILTER_GAUSSIAN:
			filter_func = filter_func_gaussian;
			width *= 3.0f;
			break;
		case FILTER_BLACKMAN_HARRIS:
			filter_func = filter_func_blackman_harris;
			width *= 2.0f;
			break;
		default:
			assert(0);
	}

	/* Create importance sampling table. */

	/* TODO(sergey): With the even filter table size resolution we can not
	 * really make it nice symmetric importance map without sampling full range
	 * (meaning, we would need to sample full filter range and not use the
	 * make_symmetric argument).
	 *
	 * Current code matches exactly initial filter table code, but we should
	 * consider either making FILTER_TABLE_SIZE odd value or sample full filter.
	 */

	util_cdf_inverted(FILTER_TABLE_SIZE,
	                  0.0f,
	                  width * 0.5f,
	                  function_bind(filter_func, _1, width),
	                  true,
	                  filter_table);

	return filter_table;
}
示例#2
0
void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
{
	Scene::MotionType need_motion = scene->need_motion(device->info.advanced_shading);

	update();

	if(previous_need_motion != need_motion) {
		/* scene's motion model could have been changed since previous device
		 * camera update this could happen for example in case when one render
		 * layer has got motion pass and another not */
		need_device_update = true;
	}

	if(!need_device_update)
		return;
	
	KernelCamera *kcam = &dscene->data.cam;

	/* store matrices */
	kcam->screentoworld = screentoworld;
	kcam->rastertoworld = rastertoworld;
	kcam->rastertocamera = rastertocamera;
	kcam->cameratoworld = cameratoworld;
	kcam->worldtocamera = worldtocamera;
	kcam->worldtoscreen = worldtoscreen;
	kcam->worldtoraster = worldtoraster;
	kcam->worldtondc = worldtondc;

	/* camera motion */
	kcam->have_motion = 0;
	kcam->have_perspective_motion = 0;

	if(need_motion == Scene::MOTION_PASS) {
		/* TODO(sergey): Support perspective (zoom, fov) motion. */
		if(type == CAMERA_PANORAMA) {
			if(use_motion) {
				kcam->motion.pre = transform_inverse(motion.pre);
				kcam->motion.post = transform_inverse(motion.post);
			}
			else {
				kcam->motion.pre = kcam->worldtocamera;
				kcam->motion.post = kcam->worldtocamera;
			}
		}
		else {
			if(use_motion) {
				kcam->motion.pre = cameratoraster * transform_inverse(motion.pre);
				kcam->motion.post = cameratoraster * transform_inverse(motion.post);
			}
			else {
				kcam->motion.pre = worldtoraster;
				kcam->motion.post = worldtoraster;
			}
		}
	}
#ifdef __CAMERA_MOTION__
	else if(need_motion == Scene::MOTION_BLUR) {
		if(use_motion) {
			transform_motion_decompose((DecompMotionTransform*)&kcam->motion, &motion, &matrix);
			kcam->have_motion = 1;
		}
		if(use_perspective_motion) {
			kcam->perspective_motion = perspective_motion;
			kcam->have_perspective_motion = 1;
		}
	}
#endif

	/* depth of field */
	kcam->aperturesize = aperturesize;
	kcam->focaldistance = focaldistance;
	kcam->blades = (blades < 3)? 0.0f: blades;
	kcam->bladesrotation = bladesrotation;

	/* motion blur */
#ifdef __CAMERA_MOTION__
	kcam->shuttertime = (need_motion == Scene::MOTION_BLUR) ? shuttertime: -1.0f;

	if(need_motion == Scene::MOTION_BLUR) {
		vector<float> shutter_table;
		util_cdf_inverted(SHUTTER_TABLE_SIZE,
		                  0.0f,
		                  1.0f,
		                  function_bind(shutter_curve_eval, _1, shutter_curve),
		                  false,
		                  shutter_table);
		shutter_table_offset = scene->lookup_tables->add_table(dscene,
		                                                       shutter_table);
		kcam->shutter_table_offset = (int)shutter_table_offset;
	}
	else if(shutter_table_offset != TABLE_OFFSET_INVALID) {
		scene->lookup_tables->remove_table(shutter_table_offset);
		shutter_table_offset = TABLE_OFFSET_INVALID;
	}
#else
	kcam->shuttertime = -1.0f;
#endif

	/* type */
	kcam->type = type;

	/* anamorphic lens bokeh */
	kcam->inv_aperture_ratio = 1.0f / aperture_ratio;

	/* panorama */
	kcam->panorama_type = panorama_type;
	kcam->fisheye_fov = fisheye_fov;
	kcam->fisheye_lens = fisheye_lens;
	kcam->equirectangular_range = make_float4(longitude_min - longitude_max, -longitude_min,
	                                          latitude_min -  latitude_max, -latitude_min + M_PI_2_F);

	/* sensor size */
	kcam->sensorwidth = sensorwidth;
	kcam->sensorheight = sensorheight;

	/* render size */
	kcam->width = width;
	kcam->height = height;
	kcam->resolution = resolution;

	/* store differentials */
	kcam->dx = float3_to_float4(dx);
	kcam->dy = float3_to_float4(dy);

	/* clipping */
	kcam->nearclip = nearclip;
	kcam->cliplength = (farclip == FLT_MAX)? FLT_MAX: farclip - nearclip;

	/* Camera in volume. */
	kcam->is_inside_volume = 0;

	/* Rolling shutter effect */
	kcam->rolling_shutter_type = rolling_shutter_type;
	kcam->rolling_shutter_duration = rolling_shutter_duration;

	previous_need_motion = need_motion;
}