Exemplo n.º 1
0
std::unique_ptr<unsigned char[]> getRaster() {
  const unsigned int row_bytes = scene.cam.viewportWidth * 3;
  const unsigned int buf_len = scene.cam.viewportHeight * row_bytes;
  auto buf = std::unique_ptr<unsigned char[]>(new unsigned char[buf_len]);
  const double multFactor = 1 / (1.0 * NUM_AA_SUBPIXELS);
  const Eigen::Vector4d maxrgb(255, 255, 255, 0);
  tbb::parallel_for(
      tbb::blocked_range2d<int, int>(0, scene.cam.viewportHeight, 0,
                                     scene.cam.viewportWidth),
      [&](tbb::blocked_range2d<int, int> &r) {

        // Support for AA sampler
        std::vector<Ray, Eigen::aligned_allocator<Ray>> rays(
            NUM_AA_SUBPIXELS, Ray(Eigen::Vector4d(0, 0, 0, 1),
                                  Eigen::Vector4d(1, 0, 0, 0), ID_AIR));
        RandomAASampler sampler(scene.cam);

        for (int row = r.rows().begin(); row != r.rows().end(); ++row) {
          for (int col = r.cols().begin(); col != r.cols().end(); ++col) {
            Camera cam(scene.cam);
            Eigen::Vector4d color = Eigen::Vector4d::Zero();
            // materials stack for refraction
            std::array<int, MAX_DEPTH + 1> objStack;
            if (!aa) {
              Ray ray = cam.constructRayThroughPixel(col, row);
              objStack.fill(ID_AIR);
              color = getColor(ray, 0, 0, objStack);
            } else {
              sampler.constructRaysThroughPixel(col, row, rays);
              for (int i = 0; i < NUM_AA_SUBPIXELS; ++i) {
                objStack.fill(ID_AIR);
                color += getColor(rays[i], 0, 0, objStack);
              }
              color *= multFactor;
            }

            color = 255 * color;
            color = color.cwiseMin(maxrgb);
            buf[row * row_bytes + (col * 3)] = color[0];
            buf[row * row_bytes + (col * 3) + 1] = color[1];
            buf[row * row_bytes + (col * 3) + 2] = color[2];
          }
        }
      });
  return buf;
}
Exemplo n.º 2
0
fvec scene::get_intersection_color(
		const priority_queue<ray_intersection> &pq ) const {

	// no intersection found, report back the default background color
	if ( pq.size() == 0 ) {
		return BACKGROUND;
	}

	priority_queue<ray_intersection> rays( pq );

	fvec surface_color;
	surface_color.zeros( 4 );

	// get the closest intersected object
	ray_intersection r = rays.top();
	surface_color += (get_surface_color( r, 1 ));

	return surface_color;
}
Exemplo n.º 3
0
void test_sample_primary_rays(bool use_gpu) {
    // Let's have a perspective camera with 1x1 pixel, 
    // with identity to world matrix,
    // fov 45 degree
    auto pos = Vector3f{0, 0, 0};
    auto look = Vector3f{0, 0, 1};
    auto up = Vector3f{0, 1, 0};
    Matrix3x3f n2c = Matrix3x3f::identity();
    Matrix3x3f c2n = Matrix3x3f::identity();
    Camera camera{1, 1,
        &pos[0],
        &look[0],
        &up[0],
        &n2c.data[0][0],
        &c2n.data[0][0],
        1e-2f,
        false};
    parallel_init();

    // Sample from the center of pixel
    Buffer<CameraSample> samples(use_gpu, 1);
    samples[0].xy = Vector2{0.5f, 0.5f};
    Buffer<Ray> rays(use_gpu, 1);
    Buffer<RayDifferential> ray_differentials(use_gpu, 1);
    sample_primary_rays(camera,
                        samples.view(0, 1),
                        rays.view(0, 1),
                        ray_differentials.view(0, 1),
                        use_gpu);
    cuda_synchronize();

    equal_or_error<Real>(__FILE__, __LINE__, rays[0].org, Vector3{0, 0, 0});
    equal_or_error<Real>(__FILE__, __LINE__, rays[0].dir, Vector3{0, 0, 1});
    // TODO: test ray differentials

    parallel_cleanup();
}
Exemplo n.º 4
0
int main(int argc, char ** argv)
{
  if (argc < 2) {
    std::cout << "Usage: " << argv[0] << " <no. of cones> [<direction-x> <direction-y>]" << std::endl;
    return 1;
  }

  unsigned int k = atoi(argv[1]);
  if (k<2) {
    std::cout << "The number of cones should be larger than 1!" << std::endl;
    return 1;
  }

  Direction_2 initial_direction;
  if (argc == 2)
    initial_direction = Direction_2(1, 0);  // default initial_direction
  else if (argc == 4)
    initial_direction = Direction_2(atof(argv[2]), atof(argv[3]));
  else {
    std::cout << "Usage: " << argv[0] << " <no. of cones> [<direction-x> <direction-y>]" << std::endl;
    return 1;
  }

  // construct the functor
  CGAL::Compute_cone_boundaries_2<Kernel> cones;
  // create the vector rays to store the results
  std::vector<Direction_2> rays(k);
  // compute the cone boundaries and store them in rays
  cones(k, initial_direction, rays.begin());

  // display the computed rays, starting from the initial direction, ccw order
  for (unsigned int i=0; i<k; i++)
    std::cout << "Ray " << i << ": " << rays[i] << std::endl;

  return 0;
}
Exemplo n.º 5
0
void c_sampler_renderer::render_scene(scene_ptr scene)
{ 
	//////////////////////////////////////////////////////////////////////////
	// Allocate and initialize sample 
	//////////////////////////////////////////////////////////////////////////
	sample_ptr origin_sample = sample_ptr(new c_sample(m_sampler, m_surface_integrator, m_volume_integrator, scene));

	c_rng rng(2047); 

	int num_pixel_samples = 0; 
	int max_samples = m_sampler->get_max_num_samples(); 
	
	ray_array_ptr rays(new c_ray[max_samples]);
	spectrum_array_ptr ls(new c_spectrum[max_samples]); 
	spectrum_array_ptr ts(new c_spectrum[max_samples]); 
	isect_array_ptr isects(new c_intersection[max_samples]); 
	samples_array_ptr samples_array = origin_sample->duplicate(max_samples); 

	while ((num_pixel_samples = m_sampler->get_current_pixel_samples(samples_array, rng)) > 0)
	{
		for (int j = 0; j < num_pixel_samples; ++j)
		{
			m_camera->generate_ray(samples_array[j], &rays[j]); 

			ls[j] = render_ray(scene, rays[j], &samples_array[j], rng, &isects[j], &ts[j]); 

			assert(!ls[j].has_nan());
	
			m_camera->get_render_target()->add_sample(samples_array[j], ls[j]); 
		}
		
		m_report->update();
	}

	m_display->update_display(m_camera->get_render_target()); 
}
Exemplo n.º 6
0
void VisIntegrator::integrate()
{
	const size_t samp_dim = 8;

	RNG rng;
	ImageSampler image_sampler(spp, image->width, image->height);

	// Sample array
	Array<float> samps;
	samps.resize(RAYS_AT_A_TIME * samp_dim);

	// Sample pixel coordinate array
	Array<uint16_t> coords;
	coords.resize(RAYS_AT_A_TIME * 2);

	// Light path array
	Array<VisPath> paths;
	paths.resize(RAYS_AT_A_TIME);

	// Ray and Intersection arrays
	Array<Ray> rays(RAYS_AT_A_TIME);
	Array<Intersection> intersections(RAYS_AT_A_TIME);

	// ids corresponding to the rays
	Array<uint32_t> ids(RAYS_AT_A_TIME);

	bool last = false;
	while (true) {
		// Generate a bunch of samples
		std::cout << "\t--------\n\tGenerating samples" << std::endl;
		std::cout.flush();
		for (int i = 0; i < RAYS_AT_A_TIME; i++) {
			if (!image_sampler.get_next_sample(samp_dim, &(samps[i*samp_dim]), &(coords[i*2]))) {
				samps.resize(i*samp_dim);
				paths.resize(i);
				last = true;
				break;
			} else {
				paths[i].done = false;
			}
		}
		uint32_t ssize = samps.size() / samp_dim;


		// Size the ray buffer appropriately
		rays.resize(ssize);

		// Generate a bunch of camera rays
		std::cout << "\tGenerating camera rays" << std::endl;
		std::cout.flush();
		for (uint32_t i = 0; i < ssize; i++) {
			float rx = (samps[i*samp_dim] - 0.5) * (image->max_x - image->min_x);
			float ry = (0.5 - samps[i*samp_dim+1]) * (image->max_y - image->min_y);
			float dx = (image->max_x - image->min_x) / image->width;
			float dy = (image->max_y - image->min_y) / image->height;
			rays[i] = scene->camera->generate_ray(rx, ry, dx, dy, samps[i*samp_dim+4], samps[i*samp_dim+2], samps[i*samp_dim+3]);
			rays[i].finalize();
			ids[i] = i;
		}


		// Trace the camera rays
		std::cout << "\tTracing camera rays" << std::endl;
		std::cout.flush();
		tracer->trace(rays, &intersections);


		// Update paths
		std::cout << "\tUpdating paths" << std::endl;
		std::cout.flush();
		uint32_t rsize = rays.size();
		for (uint32_t i = 0; i < rsize; i++) {
			if (intersections[i].hit) {
				// Ray hit something!  Store intersection data
				paths[ids[i]].done = true;
				paths[ids[i]].col = intersections[i].col;
			} else {
				// Ray didn't hit anything, done and black background
				paths[ids[i]].done = true;
				paths[ids[i]].col = Color(0.0, 0.0, 0.0);
			}
		}


		// Accumulate the samples
		std::cout << "\tAccumulating samples" << std::endl;
		std::cout.flush();
		for (size_t i = 0; i < ssize; i++) {
			image->add_sample(paths[i].col, coords[i*2], coords[i*2+1]);
		}

		// Print percentage complete
		static int32_t last_perc = -1;
		int32_t perc = image_sampler.percentage() * 100;
		if (perc > last_perc) {
			std::cout << perc << "%" << std::endl;
			last_perc = perc;
		}

		if (callback)
			callback();

		if (last)
			break;
	}
}
void DirectLightingIntegrator::integrate()
{
	const size_t samp_dim = 8;

	RNG rng;
	ImageSampler image_sampler(spp, image->width, image->height);

	// Sample array
	Array<float> samps(RAYS_AT_A_TIME * samp_dim);

	// Sample pixel coordinate array
	Array<uint16_t> coords(RAYS_AT_A_TIME * 2);

	// Light path array
	Array<DLPath> paths(RAYS_AT_A_TIME);

	// Ray and Intersection arrays
	Array<Ray> rays(RAYS_AT_A_TIME);
	Array<Intersection> intersections(RAYS_AT_A_TIME);

	// ids corresponding to the rays
	Array<uint32_t> ids(RAYS_AT_A_TIME);

	bool last = false;
	while (true) {
		// Generate a bunch of samples
		std::cout << "\t--------\n\tGenerating samples" << std::endl;
		std::cout.flush();
		for (int i = 0; i < RAYS_AT_A_TIME; i++) {
			if (!image_sampler.get_next_sample(samp_dim, &(samps[i*samp_dim]), &(coords[i*2]))) {
				samps.resize(i*samp_dim);
				paths.resize(i);
				last = true;
				break;
			} else {
				paths[i].done = false;
			}
		}
		uint32_t ssize = samps.size() / samp_dim;


		// Size the ray buffer appropriately
		rays.resize(ssize);


		// Generate a bunch of camera rays
		std::cout << "\tGenerating camera rays" << std::endl;
		std::cout.flush();
		for (uint32_t i = 0; i < ssize; i++) {
			float rx = (samps[i*samp_dim] - 0.5) * (image->max_x - image->min_x);
			float ry = (0.5 - samps[i*samp_dim+1]) * (image->max_y - image->min_y);
			float dx = (image->max_x - image->min_x) / image->width;
			float dy = (image->max_y - image->min_y) / image->height;
			rays[i] = scene->camera->generate_ray(rx, ry, dx, dy, samps[i*samp_dim+4], samps[i*samp_dim+2], samps[i*samp_dim+3]);
			rays[i].finalize();
			ids[i] = i;
		}


		// Trace the camera rays
		std::cout << "\tTracing camera rays" << std::endl;
		std::cout.flush();
		tracer->trace(rays, &intersections);


		// Update paths
		std::cout << "\tUpdating paths" << std::endl;
		std::cout.flush();
		uint32_t rsize = rays.size();
		for (uint32_t i = 0; i < rsize; i++) {
			if (intersections[i].hit) {
				// Ray hit something!  Store intersection data
				paths[ids[i]].inter = intersections[i];
			} else {
				// Ray didn't hit anything, done and black background
				paths[ids[i]].done = true;
				paths[ids[i]].col = Color(0.0, 0.0, 0.0);
			}
		}


		// Generate a bunch of shadow rays
		std::cout << "\tGenerating shadow rays" << std::endl;
		std::cout.flush();
		uint32_t sri = 0; // Shadow ray index
		for (uint32_t i = 0; i < ssize; i++) {
			if (!paths[i].done) {
				// Select a light and store the normalization factor for it's output
				Light *lighty = scene->finite_lights[(uint32_t)(samps[i*samp_dim+5] * scene->finite_lights.size()) % scene->finite_lights.size()];

				// Sample the light source
				Vec3 ld;
				paths[i].lcol = lighty->sample(intersections[i].p, samps[i*samp_dim+6], samps[i*samp_dim+7], rays[i].time, &ld)
				                * (float)(scene->finite_lights.size());

				// Create a shadow ray for this path
				float d = ld.length();
				ld.normalize();
				rays[sri].o = paths[i].inter.p + paths[i].inter.offset;
				rays[sri].d = ld;
				rays[sri].time = samps[i*samp_dim+4];
				rays[sri].is_shadow_ray = true;
				rays[sri].ow = paths[i].inter.owp();
				rays[sri].dw = 0.0f;
				//rays[sri].has_differentials = false;
				rays[sri].max_t = d;
				rays[sri].finalize();

				ids[sri] = i;

				// Increment shadow ray index
				sri++;
			}
		}
		rays.resize(sri);


		// Trace the shadow rays
		std::cout << "\tTracing shadow rays" << std::endl;
		std::cout.flush();
		tracer->trace(rays, &intersections);


		// Calculate sample colors
		std::cout << "\tCalculating sample colors" << std::endl;
		std::cout.flush();
		rsize = rays.size();
		for (uint32_t i = 0; i < rsize; i++) {
			uint32_t id = ids[i];
			if (intersections[i].hit) {
				// Sample was shadowed
				paths[id].done = true;
				paths[id].col = Color(0.0, 0.0, 0.0);
			} else {
				// Sample was lit
				paths[id].inter.n.normalize();
				float lambert = dot(rays[i].d, paths[id].inter.n);
				if (lambert < 0.0) lambert = 0.0;

				paths[id].col = paths[id].lcol * lambert;
			}
		}


		// Print percentage complete
		static int32_t last_perc = -1;
		int32_t perc = image_sampler.percentage() * 100;
		if (perc > last_perc) {
			std::cout << perc << "%" << std::endl;
			last_perc = perc;
		}

		if (!Config::no_output) {
			// Accumulate the samples
			std::cout << "\tAccumulating samples" << std::endl;
			std::cout.flush();
			for (size_t i = 0; i < ssize; i++) {
				image->add_sample(paths[i].col, coords[i*2], coords[i*2+1]);
			}

			if (callback)
				callback();
		}

		if (last)
			break;
	}
}