示例#1
0
// TODO: implement adaptive supersampling
RGBColour World::colourForPixelAt(int i, int j) {
	double d = viewport.getViewingDistance();

	if (ss_level == 1) {
		// no super-sampling
		double uValue = viewport.uAmount(i, 1, 0, false);
		double vValue = viewport.vAmount(j, 1, 0, false);
		vec3 direction = wAxis.scaled(-d) + uAxis.scaled(uValue) + vAxis.scaled(vValue);
		Ray theRay(cameraPosition, direction);

		return RGBColour(traceRay(theRay, 0.0, 0));
	}

	// 0.01 => x16, 1.2 => x64
	double thresholds[2] = {0.01, 1.2}; 


	double var = 0.0;
	int lvl_log = 2;

	RGBVec pixelColour;

	while (lvl_log <= ss_level) {
		pixelColour = RGBVec(0.0,0.0,0.0);

		int lvl = int_pow(2, lvl_log - 1); 

		double scale_factor = 1.0 / static_cast<double>(lvl*lvl);

		vec3 sum_x_sq(0.0,0.0,0.0);

		for (int a = 0; a < lvl; a++) {
			for (int b = 0; b < lvl; b++) {
				double uValue = viewport.uAmount(i, ss_level, a, lvl_log > 2);
				double vValue = viewport.vAmount(j, ss_level, b, lvl_log > 2);
				vec3 direction = wAxis.scaled(-d) + uAxis.scaled(uValue) + vAxis.scaled(vValue);	
				Ray theRay(cameraPosition, direction);
				RGBVec sample = traceRay(theRay, 0.0, 0).scaled(scale_factor);
				pixelColour += sample;
				if (ss_level > 2) sum_x_sq += sample.getVector().pointwise(sample.getVector());
			}
		}		

		if (lvl_log == 2 && ss_level > 2) {
			vec3 varvec = sum_x_sq.scaled(scale_factor) - pixelColour.getVector().pointwise(pixelColour.getVector());
			var = varvec.magnitude();
			if (i % 100 == 0 && j % 100 == 0) std::cout << "var: " << var << std::endl;
		}

		if (var < thresholds[lvl_log - 2] || lvl_log == ss_level) break;
		lvl_log++;
	}

	if (lvl_log == 2) renderStats.ss_x4++;
       	else if (lvl_log == 3) renderStats.ss_x16++;
	else if (lvl_log == 4) renderStats.ss_x64++;

	return RGBColour(pixelColour);
}	
示例#2
0
文件: main.cpp 项目: Jordangc/Project
void slaveFunc () {
	int nodeRank;
	MPI_Comm_rank(MPI_COMM_WORLD, &nodeRank);  
	
	int treeSize;
	MPI_Bcast (&treeSize, 1, MPI_INT, MASTER, MPI_COMM_WORLD);

	char * treeBuffer = new char[treeSize];
	MPI_Bcast (treeBuffer,  treeSize, MPI_BYTE, MASTER, MPI_COMM_WORLD);
	
	Octree t;
	t.readSerializedData(treeBuffer, treeSize);
	
	MPI_Status status;
	int chunkSize;
	MPI_Recv (&chunkSize, 1, MPI_INT, MASTER, HEADER, MPI_COMM_WORLD, &status);

	cout << "chunk size" << chunkSize << endl;
	RayPixel * chunk = new RayPixel[chunkSize];
	MPI_Recv (chunk, chunkSize * sizeof(RayPixel), MPI_BYTE, MASTER, RAY_ARRAY, MPI_COMM_WORLD, &status);

	TracePixel * traceChunk = new TracePixel[chunkSize];

	for (int i = 0; i < chunkSize; i++) {
		traceChunk[i] = traceRay(chunk[i], t);
	}
	cout << "done tracing client pack" << endl;
	
	MPI_Request request;
	MPI_Isend (&chunkSize, 1, MPI_INT, MASTER, HEADER, MPI_COMM_WORLD, &request);
	MPI_Isend (traceChunk, chunkSize * sizeof(TracePixel), MPI_BYTE, MASTER, NODE_ARRAY, MPI_COMM_WORLD, &request);
}
/*
* private:
*/
void RayTracer::render() {
  int imageWidth = mScene->getCamera()->getImageWidth();
  int imageHeight = mScene->getCamera()->getImageHeight();
  CameraPointer camera = mScene->getCamera();
  unsigned *renderedImageData = reinterpret_cast< unsigned* >(mRenderedImage.bits());

  for (int y = 0; y < imageHeight; ++y) {
    for (int x = 0; x < imageWidth; ++x) {
      RayIntersection intersection;

      Ray ray = camera->emitRay(x, y);
      Color pixelColor = traceRay(ray, 
                                  0,      // initial recursion depth is 0
                                  false,  // ray emitted from camera is not reflected
                                  1.0,    // air refraction coefficient
                                  1.0,    // initial reflection
                                  intersection);
      
      unsigned char redComponent   = static_cast<unsigned char>(std::min<unsigned>(pixelColor.r * 255, 255));
      unsigned char greenComponent = static_cast<unsigned char>(std::min<unsigned>(pixelColor.g * 255, 255)); 
      unsigned char blueComponent  = static_cast<unsigned char>(std::min<unsigned>(pixelColor.b * 255, 255));

      int index = y * imageWidth + x;
      *(renderedImageData + index) = RGBA(redComponent, greenComponent, blueComponent, 255);
    }
  }
}
示例#4
0
const ColorRGB Scene3D::traceRay(const Ray& ray, int depth) const
{

	float closest_t_value = NO_INTERSECT;
	const SceneObject* closest_object = findClosest(ray, closest_t_value);

	if (closest_object == 0)
		return ColorRGB(0,0,0);

	ColorRGB retColor(0,0,0);


	for (int i = 0; i < lights.size(); i++)
	{
		Vector3D normL = ((*lights[i]).get_position()-ray.getPointAt(closest_t_value)).normalize();
		Vector3D normN = (*closest_object).surface_normal(ray.getPointAt(closest_t_value));

		retColor += (*lights[i]).get_color()*(*closest_object).get_color()*std::max((normL*normN),float(0));
		
		if (depth < 6 && (*closest_object).get_reflectivity() > 0)
		{
				Ray reflected_ray = ray.reflect(ray.getPointAt(closest_t_value), (*closest_object).surface_normal(ray.getPointAt(closest_t_value)));
				ColorRGB reflection_color = traceRay(reflected_ray, depth+1);
				retColor+= (*closest_object).get_reflectivity()*reflection_color;
		}
		
	}	


	return retColor;
}
示例#5
0
void CPURenderer::render() {
	cout << "rendering frame " << step << endl;
	auto camera = scene->camera;

	// produce ray samples and trace the samples and accumulate path traced image
	for (int y = 0; y < config.h; y++) {
		for (int x = 0; x < config.w; x++) {
			Ray r = camera.sampleRay(x, y);

			glm::vec3 color = traceRay(r);

			rawimg->pixel(x, y) += color;
		}
	}

  // tone-mapping to produce image for display
  // linear mapping
  for (int y = 0; y < config.h; y++) {
    for (int x = 0; x < config.w; x++) {
      img->pixel(x, y).r = rawimg->pixel(x, y).r * 255.0;
      img->pixel(x, y).g = rawimg->pixel(x, y).g * 255.0;
      img->pixel(x, y).b = rawimg->pixel(x, y).b * 255.0;
      img->pixel(x, y).a = 255;
    }
  }
	++step;
}
示例#6
0
文件: main.cpp 项目: axnjaxn/ReiTrei
void traceAt_AA(const Scene& scene, Texture& screen, int r, int c) {
  if (settings.coherence) randomizer.reseed();

  Vect4 O, D, color;
  float rmag, rth, rx, ry;
  Camera camera;
  for (int i = 0; i < settings.nsamples; i++) {    
    if (settings.dof_range > 0.0) {
      rmag = randomizer.uniform() * settings.dof_range; 
      rth = randomizer.uniform() * 2 * PI;
      rx = rmag * cos(rth);
      ry = rmag * sin(rth);
    }
    else {rx = ry = 0.0;}

    camera = scene.camera;
    camera.xrotate(rx);
    camera.yrotate(ry);
    
    O = scene.camera.getOrigin();
    for (Real r1 = -0.5; r1 <= 0.5; r1 += 0.5)
      for (Real c1 = -0.5; c1 <= 0.5; c1 += 0.5) {
	D = scene.camera.getDirection(r + r1, c + c1);
	color += traceRay(scene, O, D) / 9.0;
      }
  }
      
  screen.setColor(r, c, color / settings.nsamples);
}
示例#7
0
bool RayTracer::render () {
    Ray3D ray;

    ray.setOrigin (Vector3 (0, 0, 5));

    float sigmaX = static_cast<float> (_left);
    float sigmaY = static_cast<float> (_top);

    for (int curScanLine = 0; curScanLine < _bitmap.getHeight (); curScanLine++) {
        for (int x = 0; x < _bitmap.getWidth (); x++) {
            ray.setDirection (Vector3 (sigmaX, sigmaY, 0) - ray.getOrigin ());	//no need to normalize - Ray3D does this automatically for us!

            Color pixelColor = traceRay (ray, 999999999.0f);

            if (_bitmap.putPixel (x, curScanLine, pixelColor) != E_SUCCESS) {
                return (_done = false);
            }

            sigmaX += _deltaX;
        }

        sigmaX = static_cast<float> (_left);
        sigmaY += _deltaY;
    }

    _done = true;

    return _done;
}
void Tracer::traceScene(const float width, const float height) {
    Vec3 *image = new Vec3[(int) width * (int) height];
    double fov = 60.0, asp = width / (double) height;
    double ang = tan(M_PI * 0.5 * fov / 180.);

    #pragma omp parallel for
    for (signed int j = 0; j < (unsigned int) height; j++) {
        #pragma omp parallel for
        for (signed int i = 0; i < (unsigned int) width; i++) {
            double x = (2 * ((i + 0.5) / width) - 1) * ang * asp;
            double y = (1 - 2 * ((j + 0.5) / height)) * ang;
            Vec3 dir{x, y, -1};
            dir = dir.norm();
            image[j * (int) width + i] = traceRay(Ray{dir, Vec3{0.0, 0.0, 0.0}}, 0);
        }
    }

    std::ofstream ofs("./testscene.ppm", std::ios::out | std::ios::binary);
    ofs << "P6\n" << width << " " << height << "\n255\n";
    for (unsigned i = 0; i < width * height; ++i) {
        ofs << (unsigned char) (std::min(float(1), (const float &) image[i].x) * 255) <<
            (unsigned char) (std::min(float(1), (const float &) image[i].y) * 255) <<
            (unsigned char) (std::min(float(1), (const float &) image[i].z) * 255);
    }
    ofs.close();
    delete[] image;
}
示例#9
0
RGB Raytracer::indirectRadiance(const TraceResult &r, int depth)
{
    RGB indirect_color(0);
    glm::vec3 point = r.biasedIntersectionPoint();


    int numIndirectRays = 1;
    for (int i = 0; i < numIndirectRays; i++)
    {
        float survive = 1.0f;
        if(depth > 0)
        {
            float inverse_pdf = 0;
            glm::vec3 rand_direction = uniformImportanceSampling(r.p->surfaceNormal(), inverse_pdf);
            Ray indirect_ray(point, point + rand_direction);
            RGB per_ray_color; 
            traceRay(indirect_ray, per_ray_color, depth - 1);
            indirect_color +=  survive * per_ray_color * r.p->BRDF(r.intersection) * r.p->geometricTerm(rand_direction) * 2.0f;
        }
        
    }
    return indirect_color / (float) numIndirectRays;

    //return indirect_color / (float) numIndirectRays;
}
示例#10
0
void OpenMPDevice::renderTask(int index, renderinfo *info) { 
    //printf("",m_pHeader[1]);
    
    rect window = m_tasks[index];
    int2 start = window.getOrigin();
    int2 size = window.getSize();
    
	int2 end = start+size;
    
    //printf("start %d %d end %d %d\n", start.getX(), start.getY(), end.getX(), end.getY());

    #pragma omp parallel for
    for(int y = start[1]/RAY_BUNDLE_WINDOW_SIZE; y < end[1]/RAY_BUNDLE_WINDOW_SIZE; y++) {
        #pragma omp parallel for
        for(int x = start[0]/RAY_BUNDLE_WINDOW_SIZE; x < end[0]/RAY_BUNDLE_WINDOW_SIZE; x++) {
            traceRayBundle(x, y, RAY_BUNDLE_WINDOW_SIZE, info);
            //printf("done %d %d\n", x, y);
        }
    }
    
	#pragma omp parallel for
	for(int y = start[1]; y < end[1]; y++) {
		#pragma omp parallel for
		for(int x = start[0]; x < end[0]; x++) {
			traceRay(x, y, info);
		}
	}
}
示例#11
0
vec3f RayTracer::trace( Scene *scene, double x, double y, isect& i )
{
    ray r( vec3f(0,0,0), vec3f(0,0,0) );
    scene->getCamera()->rayThrough( x,y,r );
	//Judge if the starting point is in the air or in an object
	vector<const SceneObject*> stack;
	n_ray += vec3f(0.02, 0.02, 0.02);
	return traceRay( scene, r, vec3f(1.0,1.0,1.0), depth, i, stack).clamp();
}
示例#12
0
// Trace a top-level ray through normalized window coordinates (x,y)
// through the projection plane, and out into the scene.  All we do is
// enter the main ray-tracing method, getting things started by plugging
// in an initial ray weight of (0.0,0.0,0.0) and an initial recursion depth of 0.
vec3f RayTracer::trace( Scene *scene, double x, double y )
{
    ray r( vec3f(0,0,0), vec3f(0,0,0) );
    scene->getCamera()->rayThrough( x,y,r );
	material_stack.clear();
	const Material air;
	material_stack.push_back(&air);
	return traceRay( scene, r, vec3f(1.0,1.0,1.0), 0 ).clamp();
}
示例#13
0
// Trace a top-level ray through normalized window coordinates (x,y)
// through the projection plane, and out into the scene.  All we do is
// enter the main ray-tracing method, getting things started by plugging
// in an initial ray weight of (0.0,0.0,0.0) and an initial recursion depth of 0.
vec3f RayTracer::trace( Scene *scene, double x, double y )
{
    ray r( vec3f(0,0,0), vec3f(0,0,0) );
    scene->getCamera()->rayThrough( x,y,r );
	mediaHistory.clear();

	if (x >= 0.5 && y >= 0.5){
		int t = 1;

	}

	return traceRay( scene, r, vec3f(1.0,1.0,1.0), 0 ).clamp();
}
示例#14
0
void Scene3D::render(const Camera& camera, const int imgSize, std::ostream& os)
{
	
	os << "P3 " << imgSize << " " << imgSize << " " << 255 << "\n";
	
	for (int y = 0; y < imgSize; y++)
	{
		for (int x = 0; x < imgSize; x++)
		{
			ColorRGB pixelColor = traceRay(camera.getRayForPixel(x, y, imgSize));
			pixelColor*=255;
			pixelColor.clamp_output(0,255, std::cout);
			os << "\n";
		}
	}
	
}
示例#15
0
Color traceRay(Ray r, vector<Shape*> shapes, vector<Lighting*> lights, float tmin, float tmax, float depth, float max_depth) {
    
    if (depth > max_depth) {return Color(0.0, 0.0, 0.0);}
    IntersectRecord record;
    bool is_hit = trace(record, r, shapes, tmin, tmax);
    if (is_hit) {
        Color radiance = record.material -> ambientResponse(record.intersection, record.uv);
        //calculate lighting / shadows
        for (int k = 0; k < lights.size(); k++) {
            Vector3 to_light = lights[k] -> getLightVector(record);
            //check for shadow / object blocking light
            if (!shadowTrace(Ray(record.intersection, to_light), shapes, tmin, tmax)) {
                radiance += lights[k] -> getIntensity() * record.material -> emittedRadiance(record.uvw, to_light, -r.direction());
            }
        }
        
        
        if (record.material -> isReflective()) {
            Vector3 reflect_dir = record.material -> getReflectionDirection(record.uvw, r.direction());
            Ray reflect_ray = Ray(record.intersection, reflect_dir);
            Color reflectance = traceRay(reflect_ray, shapes, lights, tmin, tmax, depth + 1, max_depth);
            radiance += reflectance * record.material -> reflectiveResponse(depth);
            //printf("%f %f %f", reflectance.getRed(), reflectance.getGreen(), reflectance.getBlue());
        }
        
        
        /*
        if (record.material -> isTransmissive()) {
            Vector3 trans_dir; float fresnel_scale; Color extinction;
            if (record.material -> getTransmissionDirection(record.uvw, r.direction(), extinction, fresnel_scale, trans_dir)) {
                Ray refract_ray = Ray(record.intersection, trans_dir);
                Color refraction = traceRay(refract_ray, shapes, lights, tmin, tmax, depth + 1, max_depth);
                radiance += refraction * fresnel_scale * extinction;
                //radiance += refraction * record.material -> transmissiveResponse(depth);
                //printf("%f %f %f", refraction.getRed(), refraction.getGreen(), refraction.getBlue());
            }
        }
        */
        
        return radiance;
         
    }

    return Color(0, 0, 0);
}
示例#16
0
void Raytracer::lightPixel(int u, int v)
{
    RGB color;
    Ray r;

    int numViewRays = 50;
    for(int i = 0; i < numViewRays ; i++)
    {
        RGB view_ray(0);
        camera->genViewingRay(u, v, r);
        traceRay(r, view_ray, 2);
        color += view_ray;
    }

    color /= numViewRays;

    img.setPixelColor(u, v , color);
}
示例#17
0
glm::vec3 CRayTracer::sampleRandom(unsigned int a, unsigned int b, SRay &ray,
	float zw, const CScene &scene) const {
	// Start color of a pixel is black
	glm::vec3 color(0.f);

	// Random sampling
	for (unsigned int p = 0; p < m_viewPlane.samples * m_viewPlane.samples; ++p)
	{
			float x = m_viewPlane.pSize * (b - 0.5f * m_viewPlane.hRes + rand_float());
			float y = m_viewPlane.pSize * (a - 0.5f * m_viewPlane.vRes + rand_float());
			ray.origin = glm::vec3(x, y, zw);

			// Accumulate color
			color += traceRay(ray, scene);
	}
	color /= m_viewPlane.samples * m_viewPlane.samples;
	return color;
}
示例#18
0
Image draw(float width, float height) {
    IntersectRecord record;
    vector<Shape*> shapes = makeScene();
    vector<Lighting*> lights = makeLighting();
    
    Image im(width, height);
    Camera cam(Vector3(0, 0, 0), Vector3(0, 0, -1), Vector3(0, 1, 0), 0.0, -2.0, 2.0, -2.0, 2.0, 3, width, height);
    
    //for each pixel
    for (int i = 0; i < width; i++) {
        for (int j = 0; j < height; j++) {
            Ray r = cam.getRay(i, j, 0, 0);
            Color radiance = traceRay(r, shapes, lights, 0.0001f, 100000.0f, 0, 5);
            im.set(i, j, radiance);
        }
    }
    return im;
}
示例#19
0
void DisplayClass::doRayTrace() {
	graph->rootNode->computeAllInverses();
	unsigned int width = static_cast<int>(*resoX);
	unsigned int height = static_cast<int>(*resoY);
	glm::vec3 A = glm::cross(rayCamera->center, rayCamera->up);
	glm::vec3 B = glm::cross(A, rayCamera->center);
	glm::vec3 M = rayCamera->center + rayCamera->eye;
	glm::vec3 V = (B * glm::length(rayCamera->center) * tan(glm::radians(rayCamera->fovy)))/ glm::length(B);
	//float fovx = atan(length(V) * (*WriteBMP::resoX/ *WriteBMP::resoY)/length(M));
	glm::vec3 H = (*resoX/ *resoY) * V;
	H.x = H.y;
	H.y = 0.0f;
	H.z = 0.0f;
	BMP output;
	output.SetSize(width, height);
	output.SetBitDepth(24);
	glm::vec3 P;
	glm::vec3 D;
	glm::vec3* E = new glm::vec3(rayCamera->eye.x, rayCamera->eye.y, rayCamera->eye.z);
	glm::vec3* color = new glm::vec3();
	std::cout << "Beginning raytrace" << std::endl;
	for(unsigned int x = 0; x < width; x++) {
		for(unsigned int y = 0; y < height; y++) {
			color = new glm::vec3(0.0f, 0.0f, 0.0f);
			E->x = rayCamera->eye.x;
			E->y = rayCamera->eye.y;
			E->z = rayCamera->eye.z;
			P = DisplayClass::mapPoint(x, height - y - 1, M, H, V);
			D = glm::normalize(P - *E);///glm::length(P - *E);
			traceRay(color, 0, *E, D, 1.0f, 1.0f, *rayLightCol->at(0));
			output(x, y)->Red = 255 * color->x;
			output(x, y)->Green = 255 * color->y;
			output(x, y)->Blue = 255 * color->z;
			delete color;
			color = 0;
		}
		if (x % 10 == 0) {
			std::cout << "finished vertical line: " << x << std::endl;
		}
	}
	std::cout << "Finished raytrace!" << std::endl;
	output.WriteToFile(rayOutputFile->c_str());
}
示例#20
0
void drawScene () {
  int i,j;

  /* declare data structures on stack to avoid dynamic allocation */
  point worldPix;  /* current pixel in world coordinates */
  point direction; 
  ray r;
  color c;

  /* initialize */
  worldPix.w = 1.0;
  worldPix.z = -pnear;

  r.start = &worldPix;
  r.end = &direction;


  /* trace a ray for every pixel */
  for (i=0; i<width; i++) {
    for (j=0; j<height; j++) {

      /* find position of pixel in world coordinates */
      /* y position = (pixel height/middle) scaled to world coords */ 
      worldPix.y = (j-(height/2))*imageWidth/width;
      /* x position = (pixel width/middle) scaled to world coords */ 
      worldPix.x = (i-(width/2))*imageWidth/width;

      /* find direction */
      /* note: direction vector is NOT NORMALIZED */
      calculateDirection(viewpoint,&worldPix,&direction);
   	
      /* trace the ray! */
      c.r = 0.0;
      c.g = 0.0;
      c.b = 0.0;

      traceRay(&r,&c,depth);
      /* write the pixel! */
      drawPixel(i,j,c.r,c.g,c.b);
    }
  }
}
示例#21
0
文件: Scene.cpp 项目: keishi/PixelKit
 void Scene::raytrace(CameraRef camera, Rect rect, RenderOption option, ImageRef image)
 {
     ASSERT(image->width() == rect.size.width && image->height() == rect.size.height);
     for (unsigned int y = 0; y < rect.size.height; y++) {
         for (unsigned int x = 0; x < rect.size.width; x++) {
             float xPos = (float)(x + rect.origin.x) / (float) camera->width() - 0.5f;
             float yPos = (float)(y + rect.origin.y) / (float) camera->height() - 0.5f;
             Ray viewRay = camera->viewRay(xPos, yPos);
             HitInfo hitInfo;
             Color color;
             
             if (traceRay(viewRay, option, 1.0f, 0, &color, &hitInfo)) {
                 image->setPixelColor(x, y, color);
             } else {
                 // set Background ccolor
                 //image->setPixelColor(x, y, hitInfo.material->color());
             }
             
         }
     }
 }
示例#22
0
glm::vec3 CRayTracer::sampleRegular(unsigned int a, unsigned int b, SRay &ray,
	float zw, const CScene &scene) const {
	// Start color of a pixel is black
	glm::vec3 color(0.f);

	// Regular sampling
	for (unsigned int py = 0; py < m_viewPlane.samples; ++py) {
		for (unsigned int px = 0; px < m_viewPlane.samples; ++px) {
			float x = m_viewPlane.pSize * (b - 0.5f * m_viewPlane.hRes +
				(px + 0.5f) / m_viewPlane.samples);
			float y = m_viewPlane.pSize * (a - 0.5f * m_viewPlane.vRes +
				(py + 0.5f) / m_viewPlane.samples);
			ray.origin = glm::vec3(x, y, zw);

			// Accumulate color
			color += traceRay(ray, scene);
		}
	}
	color /= m_viewPlane.samples * m_viewPlane.samples;
	return color;
}
示例#23
0
RGBVec World::traceRay(const Ray &ray, double t_min, int depth) {
	IntersectionDatum idat = testIntersection(ray, t_min, scenery);
	
	if (!idat.intersected)
		return bg_colour; 

	RGBVec result_vec;

	ShadableObject *obj = static_cast<ShadableObject *>(idat.intersectedObj);
	double t = idat.coefficient;	

	// compute lighting/shading
	for (std::vector<Light>::iterator lptr = lighting.begin(); lptr != lighting.end(); lptr++) {
		vec3 p = ray.intersectionPoint(t);
		vec3 n = obj->surfaceNormal(p);
		vec3 v = (ray.origin - lptr->pos).normalised();
		vec3 l = (lptr->pos - p).normalised();

		Ray shadowRay(p,l);
	
		// if we're not in shadow w.r.t this light
		if (!shadows_enabled || !traceShadowRay(shadowRay, scenery)) {
			result_vec += obj->material.shade(*lptr, n, v, l); 
		}

		if (reflections_enabled && obj->material.reflective && depth < MAX_TRACE_DEPTH) {
			// recursively trace reflection rays:
			
			vec3 d = ray.direction;
			Ray reflected(p, d - n.scaled(2 * d.dot(n)));
			RGBVec reflectedColour = traceRay(reflected, REFLECTION_EPS, depth + 1);
			if (reflectedColour.r() == 0.0 && reflectedColour.g() == 0.0 && reflectedColour.b() == 0.0) continue;
			RGBVec k_m = obj->material.specular_colour;
			result_vec += reflectedColour.multiplyColour(k_m);
		}

	}
	
	return result_vec; 
}	
示例#24
0
void RayThread::run()
{
	m_bIsRunning = true;
	/* 开始光线跟踪 */
	for (int i = m_nNum; i < m_pCommonData->m_nSceneHeight; i += m_nStride)
	{
		for (int j = 0; j < m_pCommonData->m_nSceneWidth; ++j)
		{
			RayPixel &tempPixel = m_pCommonData->scene[i][j];
			RayVector direction((double)j - m_pCommonData->m_nSceneWidth / 2 - eyePoint.x(),
								(double)(m_pCommonData->m_nSceneHeight) / 2 - i - eyePoint.y(),
								-scrPlane * 1.4 / 0.6);
			RayColor color;
			traceRay(eyePoint, direction, 0, color, false);
			color.normalize();
			tempPixel.setR(color.r());
			tempPixel.setG(color.g());
			tempPixel.setB(color.b());
		}
		m_pCommonData->m_nFinishHeight++;
	}
	m_bIsRunning = false;
}
示例#25
0
void SimpleRayTracer::renderScene() const {

	ViewPlane vp = scene.viewPlane;

	Ray ray;

	ray.direction = Vector(0, 0, -1);

	TGASupport tgaSupport("/Users/ivantod/test.tga");
	tgaSupport.prepareHeader(vp.hres, vp.vres);

	for (int row=0; row < vp.hres; row++) {
		for (int col=0; col < vp.vres; col++) {
			double x = vp.pixelSize * (col - 0.5 * (vp.hres - 1.0));
			double y = vp.pixelSize * (row - 0.5 * (vp.vres - 1.0));

			ray.origin = Point(x, y, vp.zw);
			Colour pixelColour = traceRay(ray);

			tgaSupport.writePixel(pixelColour);
		}
	}
}
void MyScene::raytrace(int w, int h, unsigned char* pixels) {
	resize(w, h);

	for (int y=0; y < h; y++) {
		if (bDoRender == false)
			break;

		for (int x = 0; x < w; x++) {
			// determine the color of the pixel (x,y) by raytracing

			// form the ray
			Point3 pixel (x, y, -1.0);
			pixel[0] = -1.0 + 2.0 * pixel[0] / (camera.getWidth() - 1);
			pixel[1] = -1.0 + 2.0 * pixel[1] / (camera.getHeight() - 1);
			pixel = camera.getCameraToWorld() * pixel;
			Vector3 dir = pixel - camera.getEye();
			Point3 o = camera.getEye();

			// trace the ray
			Color c = traceRay(o, dir,RECURSIVE_LIMIT);

			// clamp and store the color value
			c[0] = (c[0] > 0.0) ? ((c[0] < 1.0) ? c[0] : 1.0) : 0.0;
			c[1] = (c[1] > 0.0) ? ((c[1] < 1.0) ? c[1] : 1.0) : 0.0;
			c[2] = (c[2] > 0.0) ? ((c[2] < 1.0) ? c[2] : 1.0) : 0.0;
			*pixels++ = (unsigned char) (c[0] * 255.0);
			*pixels++ = (unsigned char) (c[1] * 255.0);
			*pixels++ = (unsigned char) (c[2] * 255.0);
		}

		progress = (double) y / (double) h;
		Fl::check();
	}

	progress = 1.0;
}
示例#27
0
文件: Scene.cpp 项目: keishi/PixelKit
    bool Scene::traceRay(Ray& ray, RenderOption option, float refractiveIndex, int level, Color* acc, HitInfo* hitInfo)
    {
        if (level > kMaxTraceLevel) {
            return false;
        }
        *acc = kColorBlack;
        
        if (m_rootGroup->hit(ray, hitInfo)) {
            Vector3 hitPosition = hitInfo->position;
            Vector3 hitNormal = hitInfo->normal;
            MaterialRef hitMaterial = hitInfo->material;
            
            if (option & RenderMaterialAmbience) {
                Color materialColor;
                if (hitMaterial->texture()) {
                    materialColor = hitMaterial->texture()->texelAt(hitInfo->uv);
                } else {
                    materialColor = hitMaterial->color();
                }

                *acc += hitMaterial->ambient() * materialColor * (1.0f - hitMaterial->transparency());
            }
            
            for (unsigned int i = 0; i < m_lights.size(); i++) {
                LightRef light = m_lights[i];
                *acc += light->luminanceOfMaterialAt(*hitInfo, this, option) * (1.0f - hitMaterial->transparency());
                
                if (option & RenderReflection) {
                    if (hitMaterial->reflectivity() > 0.0f && level < kMaxTraceLevel) {
                        float reflection = 2.0 * ray.direction().dot(hitNormal);
                        Vector3 reflectionDirection = (ray.direction() - (hitNormal * reflection)).normalize();
                        Ray reflectedRay(hitPosition, reflectionDirection);
                        
                        HitInfo reflectionInfo;
                        Color reflectedColor = kColorBlack;
                        if (traceRay(reflectedRay, option, refractiveIndex, level + 1, &reflectedColor, &reflectionInfo)) {
                            reflectedColor = reflectedColor * hitMaterial->reflectivity();
                            *acc += reflectedColor * (1.0f - hitMaterial->transparency());
                        }
                    }
                }
                
                if (option & RenderTransparency) {
                    if (hitMaterial->transparency() > 0.0f && level < kMaxTraceLevel) {
                        float n = refractiveIndex / hitMaterial->refractiveIndex();
                        float cosA = -ray.direction().dot(hitNormal);
                        float cosB = sqrtf(1.0f - n * n * (1 - cosA * cosA));
                        if (cosB > 0.0f) {
                            Vector3 refracDirection = (n * ray.direction()) + (n * cosA - cosB) * hitNormal;
                            
                            Vector3 refracRayOrigin = hitPosition + refracDirection * EPSILON;
                            Ray refracRay(refracRayOrigin, refracDirection);
                            
                            HitInfo refracInfo;
                            Color refracColor = kColorBlack;
                            if (traceRay(refracRay, option, refractiveIndex, level + 1, &refracColor, &refracInfo)) {
                                refracColor = refracColor * hitMaterial->transparency();
                                Color absorbance = hitMaterial->color() * 0.15f * -refracInfo.distance;
                                Color transparency = Color(expf(absorbance.red), expf(absorbance.green), expf(absorbance.blue));
                                refracColor = refracColor * transparency;
                                *acc += refracColor;
                            }
                        }
                    }
                }
            }
        }
        
        
        return true;
    }
示例#28
0
	Vector3d traceRay(Ray* ray, vector<SceneObject*>* objects, vector<SceneObject*>* lights, int remainingDepth) {
		Vector3d backgroundColour(0, 0, 0);
		Vector3d ambientLight(25, 25, 25);

		/*Vector3d RED(255, 0, 0);
		Vector3d GREEN(0, 255, 0);
		Vector3d BLUE(255, 0, 255);
		Vector3d YELLOW(255, 255, 0);
		Vector3d CYAN(0, 255, 255);
		Vector3d MAJENTA(255, 0, 255);*/

		if (remainingDepth <= 0)
			return backgroundColour;

		vector<Intersection*>* intersections = getIntersections(objects, ray, NULL, false, (remainingDepth < 2));
		Intersection* closestIntersection = getClosestIntersection(ray->origin, intersections);
		Vector3d closestOrigin;
		Vector3d closestDirection;
		SceneObject* closestObject;

		if (closestIntersection != NULL) {
			closestOrigin = *(closestIntersection->origin);
			closestDirection = *(closestIntersection->direction);
			closestObject = closestIntersection->object;
		}

		freeIntersections(intersections);
		intersections->clear();
		delete intersections;

		if (closestIntersection != NULL) {
			Vector3d fullLightColour = ambientLight;
			Vector3d surfaceColour = *(closestIntersection->object->colour);

			//for each light, add it to the full light on this point (if not blocked)
			for (unsigned int lightNum = 0; lightNum < lights->size(); lightNum++) {
				SceneObject* light = (*lights)[lightNum];
				Vector3d toLight = *(light->position) - closestOrigin;
				Vector3d toLightNormalized = toLight.normalized();
				double dot = toLightNormalized.dot(closestDirection);

				if (dot > 0) {
					intersections = getIntersections(objects, new Ray(&closestOrigin, &toLightNormalized), closestObject, true, false);
					bool inLight = false;
					if (intersections->size() == 0) {
						inLight = true;
					}
					else {
						Intersection* intersection = (*intersections)[0];
						Vector3d intersectionOrigin = Vector3d(*(intersection->origin));
						Vector3d intersectionDirection = Vector3d(*(intersection->direction));
						SceneObject* obj = intersection->object;

						if ((intersectionOrigin - closestOrigin).norm() > toLight.norm()) {
							inLight = true;
						}
					}

					if (inLight) {
						fullLightColour += *(light->colour) * dot;
					}

					freeIntersections(intersections);
					intersections->clear();
					delete intersections;
				}
			}

			double reflectivity = closestObject->reflectivity;
			if (reflectivity > 0) {
				Vector3d rayDirection = *(ray->direction);
				Vector3d normal = closestDirection;
				Vector3d reflectedDirection = rayDirection - ((2 * (normal.dot(rayDirection))) * normal);
				Ray* reflectedRay = new Ray(&closestOrigin, &reflectedDirection);

				Vector3d reflectionColour = traceRay(reflectedRay, objects, lights, remainingDepth - 1);
				delete reflectedRay;

				surfaceColour *= 1 - reflectivity;
				surfaceColour += reflectionColour * reflectivity;
			}

			Vector3d endColour = surfaceColour;
			endColour[0] *= fullLightColour[0] / 255;
			endColour[1] *= fullLightColour[1] / 255;
			endColour[2] *= fullLightColour[2] / 255;

			return endColour;
		}
		else {
			return backgroundColour;
		}
	}
示例#29
0
	void CMPIRayTracer::renderScheme1(ICamera* camera, const CScene* scene, unsigned int maxDepth)
	{
		if (!camera || !scene)
			return;

		IImage* pImageBuffer = camera->GetImageBuffer();
		mpScene = scene;

		unsigned int width = pImageBuffer->GetWidth();
		unsigned int height = pImageBuffer->GetHeight();

		CRay ray;
		CColor finalColor;

		MPI_Init(&mCommandLineParamsCount, &mCommandLineParams);

		int processId = 0;
		int processCount = 0;

		MPI_Comm_rank(MPI_COMM_WORLD, &processId);
		MPI_Comm_size(MPI_COMM_WORLD, &processCount);

		unsigned int totalSize = width * height;
		unsigned int size = totalSize / processCount;
		unsigned int bufIdx = 0;
		unsigned int x, y;
		unsigned int upperBorder = 0;
		unsigned char* pBuffer = nullptr;
		unsigned char* pResultBuffer = new unsigned char[totalSize * 3];

		int* pBlocksLengths = new int[processCount];
		int* pBlocksOffsets = new int[processCount];

		for (int i = 0; i < processCount; i++)
		{
			pBlocksLengths[i] = size * 3;
			pBlocksOffsets[i] = i * size * 3;
		}

		pBlocksLengths[processCount - 1] = (totalSize + (1 - processCount) * size) * 3;

		pBuffer = new unsigned char[pBlocksLengths[processId] * 3];

		F timeOfStart = MPI_Wtime();

		if (totalSize - size * processCount > 0)
		{
			upperBorder = (pBlocksOffsets[processId] + pBlocksLengths[processId]) / 3;

			for (unsigned int i = pBlocksOffsets[processId] / 3; i < upperBorder; i++)
			{
				x = i % width;
				y = int(i / width);

				ray = camera->ComputeRay(x, y);

				finalColor = traceRay(ray, 0).ToRange();

				pBuffer[bufIdx] = unsigned char(finalColor.r * 255);
				pBuffer[bufIdx + 1] = unsigned char(finalColor.g * 255);
				pBuffer[bufIdx + 2] = unsigned char(finalColor.b * 255);

				bufIdx += 3;
			}

			MPI_Gatherv(pBuffer, pBlocksLengths[processId], MPI_UNSIGNED_CHAR, pResultBuffer, pBlocksLengths,
				pBlocksOffsets, MPI_UNSIGNED_CHAR, 0, MPI_COMM_WORLD);
		}
示例#30
0
vec3f RayTracer::traceRay( Scene *scene, const ray& r, 
	const vec3f& thresh, int depth, isect& i, vector<const SceneObject*>& stack )
{
	if( depth>=0
		&& thresh[0] > threshold - RAY_EPSILON && thresh[1] > threshold - RAY_EPSILON && thresh[2] > threshold - RAY_EPSILON
		&& scene->intersect( r, i ) ) {
		// YOUR CODE HERE

		// An intersection occured!  We've got work to do.  For now,
		// this code gets the material for the surface that was intersected,
		// and asks that material to provide a color for the ray.  

		// This is a great place to insert code for recursive ray tracing.
		// Instead of just returning the result of shade(), add some
		// more steps: add in the contributions from reflected and refracted
		// rays.
		
		const Material& m = i.getMaterial();
		vec3f color = m.shade(scene, r, i);
		//calculate the reflected ray
		vec3f d = r.getDirection();
		vec3f position = r.at(i.t);
		vec3f direction = d - 2 * i.N * d.dot(i.N);
		ray newray(position, direction);
		if(!m.kr.iszero()) {
			vec3f reflect = m.kr.multiply(traceRay(scene, newray, thresh.multiply(m.kr), depth-1, stack).clamp());
			color += reflect;
		}

		//calculate the refracted ray
		double ref_ratio;
		double sin_ang = d.cross(i.N).length();
		vec3f N = i.N;
		//Decide going in or out
		const SceneObject *mi = NULL, *mt = NULL;
		int stack_idx = -1;
		vector<const SceneObject*>::reverse_iterator itr;
		//1 use the normal to decide whether to go in or out
		//0: travel through, 1: in, 2: out
		char travel = 0;
		if(i.N.dot(d) <= -RAY_EPSILON) {
			//from outer surface in
			//test whether the object has two face
			ray test_ray(r.at(i.t) + d * 2 * RAY_EPSILON, -d);
			isect test_i;
			if(i.obj->intersect(r, test_i) && test_i.N.dot(N) > -RAY_EPSILON) {
				//has interior
				travel = 1;
			}
		}
		else {
			travel = 2;
		}

		if(travel == 1) {
			if(!stack.empty()) {
				mi = stack.back();
			}
			mt = i.obj;
			stack.push_back(mt);
		}
		else if(travel == 2) {
			//if it is in our stack, then we must pop it
			for(itr = stack.rbegin(); itr != stack.rend(); ++itr) {
				if(*itr == i.obj) {
					mi = *itr;
					vector<const SceneObject*>::iterator ii = itr.base() - 1;
					stack_idx = ii - stack.begin();
					stack.erase(ii);
					break;
				}
			}
			if(!stack.empty()) {
				mt = stack.back();
			}
		}

		if(N.dot(d) >= RAY_EPSILON) {
			N = -N;
		}
		
		ref_ratio = (mi?(mi->getMaterial().index):1.0) / (mt?(mt->getMaterial().index):1.0);

		if(!m.kt.iszero() && (ref_ratio < 1.0 + RAY_EPSILON || sin_ang < 1.0 / ref_ratio + RAY_EPSILON)) {
			//No total internal reflection
			//We do refraction now
			double c = N.dot(-d);
			direction = (ref_ratio * c - sqrt(1 - ref_ratio * ref_ratio * (1 - c * c))) * N + ref_ratio * d;
			newray = ray(position, direction);
			vec3f refraction = m.kt.multiply(traceRay(scene, newray, thresh.multiply(m.kt), depth-1, stack).clamp());
			color += refraction;
		}

		if(travel == 1) {
			stack.pop_back();
		}
		else if(travel == 2) {
			if(mi) {
				stack.insert(stack.begin() + stack_idx, mi);
			}
		}

		return color;

	} else {
		// No intersection.  This ray travels to infinity, so we color
		// it according to the background color, which in this (simple) case
		// is just black.
		if(m_bBackground && bg) {
			double u, v;
			angleToSphere(r.getDirection(), u, v);
			//Scale to [0, 1];
			u /= 2 * M_PI;
			v /= M_PI;
			int tx = int(u * bg_width), ty = bg_height - int(v * bg_height);
			return vec3f(bg[3 * (ty * bg_width + tx)] / 255.0, bg[3 * (ty * bg_width + tx) + 1] / 255.0, bg[3 * (ty * bg_width + tx) + 2] / 255.0);
		}
		else {
			return vec3f( 0.0, 0.0, 0.0 );
		}
	}
}