예제 #1
0
Color Scene::lanceRayon(const Rayon& ray, int iteration) const
{
    this->addObjectsTabToBinder();
    Color result(0.0,0.0,0.0);
    float minDist(1000.0) ;
    float dist (0.0) ;
    Hit hit ;

    if (binder->intersect(ray,0.0,100.0,hit))
    {
        //result = hit.getObject()->getOptic()->getColor(hit.getU(),hit.getV());
        //result = hit.getObject()->getOptic()->getColor(0.5,0.5);


    Color coulObj(hit.getObject()->getOptic()->getColor(hit.getU(),hit.getV()));
    for ( std::vector<std::shared_ptr<LightSource>>::const_iterator it = lightsTab.begin(); it != lightsTab.end() ; ++it)
   // pour chaque source
    {

        //d = calcul distance point intersection source
        Vector directi(it->get()->getOrigin()-hit.getImpactPoint());
        float distInterSource = directi.getNorm() ;
        directi.normalize();
        //initialiser Ray : point intersect, direction(point intersect, source), couleur = on s'en fout
        Color c(0.0,0.0,0.0);
        Color resultNorm(0.0,0.0,0.0);
        Rayon ray(hit.getImpactPoint(),directi,c);

        if (! binder->intersect(ray, 0, distInterSource))
        {
            Color diff(it->get()->getColor()*coulObj*(dotProduct(hit.getNormal(),ray.getDirect())));
            Vector moinsV(-directi.getX(),-directi.getY(),-directi.getZ());
            Vector miroirV(moinsV + hit.getNormal()*(2*(dotProduct(directi,hit.getNormal()))));
            //Vmir = V symétrique par rapport à N
            //spec = coulspec(obj)* (tronquerAZero(RayS.Vmir))^n * coul(source)
            Color spec(it->get()->getColor()*coulObj*dotProduct(ray.getDirect(),miroirV));
             resultNorm = diff + spec ;
             if ( iteration < 2)
             {
               //Res2 = influence rayon réfléchi
                Rayon reflected(hit.getImpactPoint(),miroirV,c);
                Color reflectedColor(0.0,0.0,0.0);
                reflectedColor = this->lanceRayon(reflected,iteration+1);
                //return pourcent1*Res + ourcent2*Res2
                result = resultNorm*0.8 + reflectedColor*0.2 ;
             }
             else
             {
                 result = resultNorm ;
             }

        }

    }
    }
    return result;
}
예제 #2
0
bool Transform::Intersect(const Ray &r, Hit &h, float tmin) const
{
  bool result = false;
  
  Matrix m = m_matrix;
  if ( m.Inverse() )
    {
      Vec3f org = r.getOrigin();
      Vec3f dir = r.getDirection();
      m.Transform(org);
      m.TransformDirection(dir);
      Ray r2 (dir, org);
      result = m_pObject->Intersect(r2, h, tmin);
      
      if (result)
	{
	  Matrix m1 = m;
	  m1.Transpose();
	  Vec3f n = h.getNormal();
	  m1.TransformDirection(n);
	  n.Normalize();
	  h.set(h.getT(), h.getMaterial(), n, r);
	}
    }
  return result;
}
예제 #3
0
Vec3f RayTracer::shadow(const Vec3f &point,
			const Vec3f &pointOnLight,
			const Face *f,
			const Ray &ray,
			const Hit &hit) const
{
        const Vec3f normal(hit.getNormal());
        const Material *m = hit.getMaterial();

	Vec3f dirToLight = pointOnLight - point;
	dirToLight.Normalize();
	/* If dot product < 0, surface is not facing light */
	if (normal.Dot3(dirToLight) > 0) {
		Ray rayToLight(point, dirToLight);
		Hit hLight;
		bool blocked = CastRay(rayToLight, hLight, false);
		while (std::fabs(hLight.getT()) < SURFACE_EPSILON &&
				std::fabs((pointOnLight - point).Length()) > SURFACE_EPSILON) {
			rayToLight = Ray(rayToLight.pointAtParameter(SURFACE_EPSILON),
					dirToLight);
			blocked = CastRay(rayToLight, hLight, false);
		}
		if (hLight.getT() == FLT_MAX || hLight.getMaterial() != f->getMaterial()) {
			return Vec3f(0, 0, 0);
		}

		const Vec3f lightColor = 0.2 * f->getMaterial()->getEmittedColor() * f->getArea();
		return m->Shade(ray,hit,dirToLight,lightColor,args);
	}
	return Vec3f(0, 0, 0);
}
예제 #4
0
Vec3f find_color(Ray ray,Hit hit,Group* group,Camera* camera)
{
	int num_lights = sceneParser->getNumLights();
	Vec3f cambient = sceneParser->getAmbientLight();
	if (group->intersect(ray, hit, camera->getTMin()))//撞到了
	{
		Vec3f cobject = hit.getMaterial()->getDiffuseColor();
		Vec3f canswer = cambient * cobject;
		Vec3f clight;
		Vec3f light_dir;
		Vec3f normal_dir = hit.getNormal();
		float distolight;
		for (int i = 0; i < num_lights; i++)
		{
			Light *light = sceneParser->getLight(i);
			//light_dir : the direction to the light
			// 该方法用于获得指向光的方向,光的颜色,和到达光的距离
			light->getIllumination(hit.getIntersectionPoint(), light_dir, clight, distolight);
			//cpixel  =  cambient * cobject + SUMi [ clamped(Li . N) * clighti * cobject ]
			//返回局部光
			canswer = canswer + hit.getMaterial()->Shade(ray, hit, light_dir, clight)*cobject;
			canswer.Clamp();
		}
		return canswer;
	}
	else
		return sceneParser->getBackgroundColor();
}
예제 #5
0
/*
The intersect routine will first transform the ray, 
then delegate to the intersect routine of the contained 
object. Make sure to correctly transform the resulting 
normal according to the rule seen in lecture. You may 
choose to normalize the direction of the transformed ray 
or leave it un-normalized. If you decide not to normalize 
the direction, you might need to update some of your intersection code.
*/
bool Transform::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f r0 = r.getOrigin();
	Vec3f rd = r.getDirection();
	Matrix inv;
	matrix.Inverse(inv);
	inv.Transform(r0);
	inv.TransformDirection(rd);
	if (object != NULL)
	{
		//这里的h是有问题的,作如下修改:
		bool judge = object->intersect(Ray(r0,rd), h, tmin);
		Vec3f normal = h.getNormal();
		//这里很奇怪,normal的方向没有修正,然而结果却是对的
		//改了之后反而是错的!!
		//这里确定normal没有错,那么就是之后应用normal的
		//问题
		//好吧,就是这里的问题
		//经过把图形摆正,发现求的法向量没有问题,但是没有单位化…………!
		matrix.TransformDirection(normal);
		normal.Normalize();
		//or:
		//Matrix change,res;
		//matrix.Inverse(change);
		//change.Transpose(res);
		//res.TransformDirection(normal);
		h.set(h.getT(), h.getMaterial(), normal, r);
		return judge;
	}
	return false;
}
예제 #6
0
bool Transform::intersect(const Ray& r, Hit& h, float tmin)
{

	bool b = o->intersect(Ray((mInverse*Vector4f(r.getOrigin(),1)).xyz(), mInverse3*r.getDirection()), h, tmin);
	if (b) { h.setNormal((mInverseT3*(h.getNormal())).normalized()); }
	return b;

}
void PhotonMapping::TracePhoton(const Vec3f &position, const Vec3f &direction, 
				const Vec3f &energy, int iter) {
  
  if(iter>args->num_bounces){
    return;
  }

  Hit h = Hit();
  Ray R = Ray(position, direction);
  bool intersect = raytracer->CastRay(R, h, false);
  if(!intersect){
    return;
  }
  Material *m = h.getMaterial();
  Vec3f normal = h.getNormal();
  Vec3f point = R.pointAtParameter(h.getT());
  Vec3f opDirec = direction;
  opDirec.Negate();
  opDirec.Normalize();
  Vec3f diffuse = m->getDiffuseColor(), reflec = m->getReflectiveColor();
  double diffuseAnswer = diffuse.x()+diffuse.y()+diffuse.z();
  double reflecAnswer = reflec.x()+reflec.y()+reflec.z();
  double total = reflecAnswer+diffuseAnswer;
  diffuseAnswer /= total;
  reflecAnswer /= total;
  double seed = GLOBAL_mtrand.rand();
  if(seed <= diffuseAnswer && seed >= 0){
    Vec3f newEnergy = energy * diffuse;
    Vec3f newPosition = point;
    Vec3f newDirection = Vec3f(GLOBAL_mtrand.rand(),GLOBAL_mtrand.rand(),GLOBAL_mtrand.rand());
    newDirection.Normalize();
    Photon answer = Photon(point,opDirec,newEnergy,iter+1);
    kdtree->AddPhoton(answer);
    TracePhoton(newPosition, newDirection, newEnergy, iter+1);
  }
  else if(seed>diffuseAnswer && seed <= 1){
    Vec3f newEnergy = energy * reflec;
    Vec3f newPosition = point;
    Vec3f newDirection = direction - 2 * direction.Dot3(normal) * normal;
    Photon answer = Photon(point,opDirec,newEnergy,iter+1);
    kdtree->AddPhoton(answer);
    TracePhoton(newPosition, newDirection, newEnergy, iter+1);
  }
  // ==============================================
  // ASSIGNMENT: IMPLEMENT RECURSIVE PHOTON TRACING
  // ==============================================

  // Trace the photon through the scene.  At each diffuse or
  // reflective bounce, store the photon in the kd tree.

  // One optimization is to *not* store the first bounce, since that
  // direct light can be efficiently computed using classic ray
  // tracing.



}
예제 #8
0
Vec3f PhongMaterial::Shade(const Ray &ray, const Hit &hit, const Vec3f &dirToLight, const Vec3f &lightColor) const
{
    Vec3f eyeDir = ray.getDirection();
    eyeDir.Negate();

    Vec3f eyePlusLight = eyeDir + dirToLight;
    eyePlusLight.Normalize(); 
    
    float hn = eyePlusLight.Dot3(hit.getNormal());
    hn = pow(hn, mPhongComponent);

    Vec3f color = lightColor * mHighLightColor;
    color = hn * color;

    return color;
}
예제 #9
0
bool Transform::intersect( const Ray& r , Hit& h , float tmin)
{
    Matrix4f iMatrix=TMatrix_.inverse();
	Matrix4f tMatrix=TMatrix_.transposed() ;
    
    //Ray newRay = Ray((iMatrix * Vector4f(r.getOrigin(), 1.0f)).xyz(), (iMatrix* Vector4f(r.getDirection(), 0.0f)).xyz().normalized());
	Ray newRay = Ray((iMatrix * Vector4f(r.getOrigin(), 1.0f)).xyz(), (iMatrix* Vector4f(r.getDirection(), 0.0f)).xyz());
    if(o->intersect(newRay, h , tmin))
    {
    	
		h.set(h.getT(), h.getMaterial(), (iMatrix.transposed() * Vector4f(h.getNormal(), 0.0f)).xyz().normalized());
		return true;

	}

    return false;

}
예제 #10
0
파일: Material.cpp 프로젝트: samfin/6837-4
Vector3f Material::Shade( const Ray& ray, const Hit& hit,
                          const Vector3f& dirToLight, const Vector3f& lightColor ) {
    Vector3f kd;
    if(t.valid() && hit.hasTex) {
        Vector2f texCoord = hit.texCoord;
        Vector3f texColor = t(texCoord[0],texCoord[1]);
        kd = texColor;
    } else {
        kd = this->diffuseColor;
    }
    //Diffuse Shading
    if(noise.valid()) {
        kd = noise.getColor(ray.getOrigin()+ray.getDirection()*hit.getT());
    }
    Vector3f n = hit.getNormal().normalized();
    Vector3f color = clampedDot( dirToLight ,n )*pointwiseDot( lightColor , kd);
    return color;
}
예제 #11
0
Vec3f PhongMaterial::Shade(const Ray& ray, const Hit& hit, const Vec3f& dirToLight, const Vec3f& lightColor) const {
	Vec3f v = ray.getDirection() * -1.0f;
    Vec3f n = hit.getNormal();
	// Vec3f h = dirToLight + ray.getDirection() * -1.0f;
	// h.Normalize();
	float diffuse = dirToLight.Dot3(n);
	Vec3f r = n * 2.0 * n.Dot3(dirToLight) - dirToLight;
	float specular = v.Dot3(r);
	
	if (diffuse < 0.0f) diffuse = 0.0f;
	if (specular < 0.0f) specular = 0.0f;
	
	Vec3f color = this->getDiffuseColor();
	color = color * diffuse + m_specularColor * pow(specular, m_exponent);
	color.Scale(lightColor);
	
	return color;
}
void PhotonMapping::TracePhoton(const Vec3f &position, const Vec3f &direction, 
				const Vec3f &energy, int iter) {


  // ==============================================
  // ASSIGNMENT: IMPLEMENT RECURSIVE PHOTON TRACING
  // ==============================================

  // Trace the photon through the scene.  At each diffuse or
  // reflective bounce, store the photon in the kd tree.

  // One optimization is to *not* store the first bounce, since that
  // direct light can be efficiently computed using classic ray
  // tracing.

  //do ray cast
  Ray r(position,direction*(1/direction.Length()));
  Hit h;
  raytracer->CastRay(r,h,true);
  if (h.getT()>1000)
	  return;
  MTRand mtrand;
  Vec3f refl = h.getMaterial()->getReflectiveColor();
  Vec3f diff = h.getMaterial()->getDiffuseColor();
  double ran=mtrand.rand();
  if (iter==0)
	  ran= mtrand.rand(refl.Length()+diff.Length());
  //std::cout<<iter<<" "<<h.getT()<<" "<<refl.Length()+diff.Length()<<std::endl;
  //send reflective photon
  if (iter<args->num_bounces&&ran<=refl.Length())
	  TracePhoton(r.pointAtParameter(h.getT()),r.getDirection()-2*(r.getDirection().Dot3(h.getNormal()))*h.getNormal(),energy,iter+1);
  else if (iter<args->num_bounces&&ran<=refl.Length()+diff.Length())
	  TracePhoton(r.pointAtParameter(h.getT()),RandomDiffuseDirection(h.getNormal()),energy,iter+1);
  else
  {
	  Photon p(position,direction,energy,iter);
	  kdtree->AddPhoton(p);
  }


}
예제 #13
0
Vec3f RayTracer::reflections(const Ray &ray, const Hit &hit, int bounce_count, double roughness) const
{
	if (bounce_count <= 0)
		return Vec3f(0, 0, 0);
	const Vec3f point = ray.pointAtParameter(hit.getT());

	/* Get mirror direction */
	const Vec3f orig_dir = ray.getDirection();
	Vec3f norm = hit.getNormal();
	norm.Normalize();
	const Vec3f new_dir = orig_dir - 2 * orig_dir.Dot3(norm) * norm;

	const Ray new_ray(point, new_dir);
	Vec3f rand_vec(0, 0, 0);
	double answerx = 0, answery = 0, answerz = 0;
	/* sphere projection, what if the center of the sphere misses the
	 * object?
	 */

	{
		for (int i = 0; i <= args->num_glossy_samples; ++i) {
			/* Getting gloss ray */
			Ray start_ray(new_ray.getOrigin(),
					new_ray.getDirection() + rand_vec);
			const Vec3f answer(reflection(start_ray, bounce_count - 1));

			answerx += answer.x();
			answery += answer.y();
			answerz += answer.z();
			rand_vec = Vec3f(roughness * (static_cast<double>(rand()) / RAND_MAX),
				roughness * (static_cast<double>(rand()) / RAND_MAX),
				roughness * (static_cast<double>(rand()) / RAND_MAX));
		}
	}
	Vec3f answer = Vec3f(answerx, answery, answerz);
	answer *= static_cast<double>(1)/(args->num_glossy_samples + 1);
	return answer;
}
예제 #14
0
bool Transform::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f original = r.getOrigin();
	Vec3f dir = r.getDirection();

	mReverseMat.Transform(original);
	mReverseMat.TransformDirection(dir);


	Ray transformedRay(original, dir);

	if (mObj->intersect(transformedRay, h, tmin))
	{
        Vec3f normal = h.getNormal();
        Matrix t;
        mReverseMat.Transpose(t);
        t.TransformDirection(normal);
        h.set(h.getT(), h.getMaterial(), normal, r);
        return true;
	}

    return false;
}
예제 #15
0
bool Transform::intersect(const Ray & r, Hit & h, float tmin)
{

	// Need to Transform Ray before do intersection

	Matrix reverseMat = transformMat;
	reverseMat.Inverse();
	
	// cout << "Original Origin:\t" << r.getOrigin() << endl;
	// cout << "Original Direction:\t" << r.getDirection() << endl;

	Vec4f aug_origin(r.getOrigin(), 1);
	Vec3f aug_dir = r.getDirection();

	reverseMat.Transform(aug_origin);
	reverseMat.TransformDirection(aug_dir);

	// cout << "Now Origin:\t" << r.getOrigin() << endl;
	// cout << "Now Direction:\t" << r.getDirection() << endl;

	// aug_dir.Normalize();
	Ray transRay(aug_dir, Vec3f(aug_origin[0], aug_origin[1], aug_origin[2]));
	if ( !( object -> intersect(transRay, h, tmin) ) )
		return false;
	
	// After transforming Ray, we need to transform Normal
	Matrix transposeRevMat = reverseMat;
	transposeRevMat.Transpose();
	Vec3f hn = h.getNormal();
	transposeRevMat.Transform(hn);
	hn.Normalize();

	h.set(h.getT(), NULL, hn, r);

	return true;

}
예제 #16
0
glm::vec3 Material::Shade(const Ray &ray, const Hit &hit, 
                      const glm::vec3 &dirToLight, 
                      const glm::vec3 &lightColor, ArgParser *args) const {
  
  glm::vec3 point = ray.pointAtParameter(hit.getT());
  glm::vec3 n = hit.getNormal();
  glm::vec3 e = ray.getDirection()*-1.0f;
  glm::vec3 l = dirToLight;
  
  glm::vec3 answer = glm::vec3(0,0,0);

  // emitted component
  // -----------------
  answer += getEmittedColor();

  // diffuse component
  // -----------------
  float dot_nl = glm::dot(n,l);
  if (dot_nl < 0) dot_nl = 0;
  answer += lightColor * getDiffuseColor(hit.get_s(),hit.get_t()) * dot_nl;

  // specular component (Phong)
  // ------------------
  // make up reasonable values for other Phong parameters
  glm::vec3 specularColor = reflectiveColor;
  float exponent = 100;

  // compute ideal reflection angle
  glm::vec3 r = (l*-1.0f) + n * (2 * dot_nl);
  r = glm::normalize(r);
  float dot_er = glm::dot(e,r);
  if (dot_er < 0) dot_er = 0;
  answer += lightColor*specularColor*float(pow(dot_er,exponent))* dot_nl;

  return answer;
}
예제 #17
0
Vec3f Material::Shade(const Ray &ray, const Hit &hit, 
                      const Vec3f &dirToLight, 
                      const Vec3f &lightColor, ArgParser *args) const {
  
  Vec3f point = ray.pointAtParameter(hit.getT());
  Vec3f n = hit.getNormal();
  Vec3f e = ray.getDirection()*-1.0f;
  Vec3f l = dirToLight;
  
  Vec3f answer = Vec3f(0,0,0);

  // emitted component
  // -----------------
  answer += getEmittedColor();

  // diffuse component
  // -----------------
  double dot_nl = n.Dot3(l);
  if (dot_nl < 0) dot_nl = 0;
  answer += lightColor * getDiffuseColor(hit.get_s(),hit.get_t()) * dot_nl;

  // specular component (Phong)
  // ------------------
  // make up reasonable values for other Phong parameters
  Vec3f specularColor = reflectiveColor;
  double exponent = 100;

  // compute ideal reflection angle
  Vec3f r = (l*-1.0f) + n * (2 * dot_nl);
  r.Normalize();
  double dot_er = e.Dot3(r);
  if (dot_er < 0) dot_er = 0;
  answer += lightColor*specularColor*pow(dot_er,exponent)* dot_nl;

  return answer;
}
예제 #18
0
int main( int argc, char* argv[] )
{
    // Fill in your implementation here.

    // This loop loops over each of the input arguments.
    // argNum is initialized to 1 because the first
    // "argument" provided to the program is actually the
    // name of the executable (in our case, "a4").
    for( int argNum = 1; argNum < argc; ++argNum )
    {
        std::cout << "Argument " << argNum << " is: " << argv[argNum] << std::endl;
    }
	
    int w, h ; //img size
    float depthMin, depthMax;
    char filename[80];
    char output[80];
    char depthOutput[80];
    char normalsOutput[80];
    bool depthMode = false, normalsMode = false, imageMode = false;

    int max_bounces = 0;
    float cutoff_weight;
    bool shadows = false;
    bool refraction = false;

    int uniform_samples = 0;
    int jitter_samples = 0;
    float box_filter_radius;

    bool render_samples = false;
    char* render_samples_outfile;
    int zoom_factor;

    for( int i = 0 ; i < argc ; i++)
    {
        if(!strcmp(argv[i], "-input")){
                strcpy(filename, argv[i+1]);
        }
        else if(!strcmp(argv[i], "-size")){
                w = atoi(argv[i+1]);
                h = atoi(argv[i+2]);
        }
        else if(!strcmp(argv[i], "-output")){
                strcpy(output , argv[i+1]);
                imageMode = true;
        }
        else if(!strcmp(argv[i], "-depth")){
                depthMode = true;
                depthMin = atof(argv[i+1]);
                depthMax = atof(argv[i+2]);
                strcpy(depthOutput , argv[i+3]);
        }
        else if(!strcmp(argv[i], "-normals")){
                normalsMode = true;
                strcpy(normalsOutput , argv[i+1]);
        }
        else if(!strcmp(argv[i], "-bounces")){
            max_bounces = atoi(argv[i+1]);
        }
        else if(!strcmp(argv[i], "-weight")){
            cutoff_weight = atoi(argv[i+1]);
        }
        else if(!strcmp(argv[i], "-shadows")){
            shadows = true;
        }
        else if(!strcmp(argv[i], "-uniform_samples")){
            uniform_samples = atoi(argv[i+1]);
        }
        else if(!strcmp(argv[i], "-jittered_samples")){
            jitter_samples = atoi(argv[i+1]);
        }
        else if(!strcmp(argv[i], "-box_filter")){
            box_filter_radius = atof(argv[i+1]);
        }
        else if(!strcmp(argv[i], "-render_samples")){
//            strcpy(render_samples_outfile, argv[i+1]);
            render_samples_outfile = argv[i+1];
            zoom_factor = atoi(argv[i+2]);
            render_samples = true;
        }
        else if (!strcmp(argv[i], "-refraction")){
            refraction = true;
        }
    }
	
    // First, parse the scene using SceneParser.
    // Then loop over each pixel in the image, shooting a ray
    // through that pixel and finding its intersection with
    // the scene.  Write the color at the intersection to that
    // pixel in your output image.

    SceneParser sp = SceneParser(filename);

    RayTracer rt = RayTracer(&sp, max_bounces, cutoff_weight, shadows, refraction); 

    Image image( w , h );
    Image depth( w , h );
    Image normals( w,h ); 

    Camera* camera = sp.getCamera();

    // Variables for anti-aliasing
    SampleDebugger *sd;
    
    Hit hit;
    int num_samples = max(uniform_samples, jitter_samples);

    if (render_samples)
    {    
//        cout << "render samples - now making the sample_debugger" << endl;
        sd = new SampleDebugger(w, h, num_samples);
    }

//    cout << "now starting iteration through pixels" << endl;
    for( int  j = 0 ; j < h ; j++ )
    {
        for ( int i = 0 ; i < w ; i++ )
        {
//            if (i > 144 && j > 43) {cout << "at beginning of loop i = " << i<< "j = " << j << endl;}
            
            Vector3f pixelColor;
            Vector3f normalVal;
            
            
//            if (i > 144 && j > 43) {cout << " checking num_samples" << endl;}
            if (num_samples > 0)
            {
                float grid_width = sqrt(num_samples); 
                float max_offset = 1.0/grid_width;
                float offset = 0;
                
//                if (i > 144 && j > 43) {cout << " where is this getting stuck - jitter samples?" << endl;}
                if (jitter_samples > 0)
                {
                    offset += (float)rand()/RAND_MAX * max_offset;
                }
                int count = 0;
                Vector3f color_sum = Vector3f(0.0, 0.0, 0.0);


//                if (i > 144 && j > 43) {cout << " where is this getting stuck - for loop?" << endl;}
                for (int grid_x = 0; grid_x < grid_width; grid_x++)
                {
                    for (int grid_y = 0; grid_y < grid_width; grid_y++)
                    {
//                        if (i > 144 && j > 43) {cout << " in second for loop: grid_x = " << grid_x << "grid y =" << grid_y << endl;}
                        float xin = grid_x*max_offset + i + offset;
                        float yin = grid_y*max_offset + j + offset;

                        float normX = (float)((float)(xin-((float)w)/2))/((float)w/2);
                        float normY = (float)((float)(yin-((float)h)/2))/((float)h/2);

                        Ray ray = camera->generateRay( Vector2f( normX , normY ) );
                        
                        hit = Hit(INFINITY, NULL, Vector3f(1,0,0));
                        Vector3f local_color = rt.traceRay(ray, camera->getTMin(), max_bounces, cutoff_weight, hit);
                        color_sum += local_color;
                        
//                        if (i > 144 && j > 43) {cout << " where is this getting stuck first render sampels?" << endl;}
                        if (render_samples)
                        {
                            cout << "1) count = " << count << endl;
                            Vector2f offset_vec = Vector2f(max_offset*grid_x+offset, max_offset*grid_y+offset);
                            sd->setSample(i, j, count, offset_vec, local_color);
                            count++;
                        }

                    }

                }
//                if (i > 144 && j > 43) {cout << " where is this getting stuck - setting pixel color?" << endl;}
                pixelColor = color_sum/num_samples;
                
            }

            else
            {
//                float x = 2*((float)j/((float)w - 1.0f)) - 1.0f;
//                float y = 2*((float)i/((float)h - 1.0f)) - 1.0f;

                float x = (float)((float)(i+0.25-((float)w)/2))/((float)w/2);
                float y = (float)((float)(j+0.25-((float)h)/2))/((float)h/2);
                
                
                Ray ray = camera->generateRay( Vector2f( x , y ) );

//                        if (i > 144 && j > 43) {cout << " where is this getting stuck - tracing the ray?" << endl;}

                
                //			group->intersect( ray , hit , camera->getTMin() ) ; 			

                hit = Hit(INFINITY, NULL, Vector3f(1,0,0));
                Vector3f color_normal = rt.traceRay(ray, camera->getTMin(), max_bounces, cutoff_weight, hit);

//                if (i > 144 && j > 43) {cout << " made it through traceRay?" << endl;}
                pixelColor = color_normal;
                
//			if( hit.getMaterial()==NULL){ //background
//				
//				pixelColor = Scene.getBackgroundColor();
//				normalVal = Vector3f(0.0,0.0,0.0);
//			}
//			else{
//				//ambient light
//				pixelColor = PhongMaterial::pointwiseDot( Scene.getAmbientLight(), hit.getMaterial()->getDiffuseColor());
//				//defussion light
//				for( int i = 0 ; i < Scene.getNumLights(); i++){
//					Light* light = Scene.getLight(i);
//					Vector3f dirToLight, lightColor ;
//					Vector3f position = ray.pointAtParameter(hit.getT());
//					float dist = hit.getT(); 
//					light->getIllumination( position , dirToLight , lightColor , dist);
//
//					pixelColor += hit.getMaterial()->Shade( ray , hit , dirToLight , lightColor ) ; 
//				}
//				
//				//normal map			
//				Vector3f n = hit.getNormal();
//				normalVal = Vector3f( abs(n[0]),abs(n[1]),abs(n[2]));
//			}
            }
            float d = clampedDepth( hit.getT(), depthMin , depthMax);

//            cout << "setting pixel for i,j = " << i << ", " << j << endl;
            depth.SetPixel( i , j , Vector3f(d,d,d));
            image.SetPixel( i ,  j , pixelColor );
            
//            if (i > 144) {cout << "where is this getting stuck?" << endl;}
            
            normalVal = hit.getNormal();
            for (int k = 0; k < 3; k++)
            {
//                if (i > 144) {cout << "where is this getting stuck? in normals?" << endl;}
                normalVal[k] = fabs(normalVal[k]);
            }
//            if (i > 144) {cout << "where is this getting stuck? setting normals?" << endl;}
            normals.SetPixel( i , j , normalVal) ; 
//            if (i > 144) {cout << "where is this getting stuck? redner samples??" << endl;}

//            if (i > 144) {cout << "where is this getting stuck? before starting the next loop?" << endl;}

        }
    }

    cout << "output = " << output << "should not be null!" << endl;
    if(imageMode){image.SaveTGA(output);}
    if( depthMode){ depth.SaveTGA(depthOutput);}
    if( normalsMode){ normals.SaveTGA(normalsOutput);}

    if (render_samples)
    {
        sd->renderSamples(render_samples_outfile, zoom_factor);
    }
    return 0;
}
   //Six intersecting sides
   Hit h1 = box.intersect(Ray(Vector3(0,0,10),Vector3(0,0,-1)),1);
   Hit h2 = box.intersect(Ray(Vector3(10,0,0),Vector3(-1,0,0)),1);
   Hit h3 = box.intersect(Ray(Vector3(0,10,0),Vector3(0,-1,0)),1);
   Hit h4 = box.intersect(Ray(Vector3(0,0,-10),Vector3(0,0,1)),1);
   Hit h5 = box.intersect(Ray(Vector3(-10,0,0),Vector3(1,0,0)),1);
   Hit h6 = box.intersect(Ray(Vector3(0,-10,0),Vector3(0,1,0)),1);
   
   REQUIRE(h1.didHit());
   REQUIRE(h2.didHit());
   REQUIRE(h3.didHit());
   REQUIRE(h4.didHit());
   REQUIRE(h5.didHit());
   REQUIRE(h6.didHit());

   REQUIRE(h1.getNormal() == Vector3(0,0,1));
   REQUIRE(h2.getNormal() == Vector3(1,0,0));
   REQUIRE(h3.getNormal() == Vector3(0,1,0));
   REQUIRE(h4.getNormal() == Vector3(0,0,-1));
   REQUIRE(h5.getNormal() == Vector3(-1,0,0));
   REQUIRE(h6.getNormal() == Vector3(0,-1,0));
   

}

TEST_CASE("Box internal intersection", "[Box]")
{
   Box box(Vector3(-1,-1,-1), Vector3(1,1,1));
   //Six intersecting sides
   Hit h1 = box.intersect(Ray(Vector3(0,0,0),Vector3(0,0,-1)),1);
   Hit h2 = box.intersect(Ray(Vector3(0,0,0),Vector3(-1,0,0)),1);
예제 #20
0
Vector3f RayTracer::traceRay( Ray& c_ray, float tmin, float refractI, int bounces, Hit& h ) const
{
  h = Hit( FLT_MAX, NULL, Vector3f( 0, 0, 0 ) );
  Vector3f pixelPass = m_scene->getBackgroundColor(c_ray.getDirection());
  bool intersect = g->intersect(c_ray, h , tmin);
  Vector3f pixelIntersect = m_scene->getAmbientLight();
  if(intersect){
    float intersect_t = h.getT();
    Vector3f intersect_p = c_ray.pointAtParameter(intersect_t);
    pixelIntersect = h.getMaterial()->getDiffuseColor()*pixelIntersect;

    for(int l = 0; l < m_scene->getNumLights(); l++){
      //Light
      Light* light = m_scene->getLight(l);
      Vector3f light_dir = Vector3f();
      Vector3f light_col = Vector3f();
      float light_distance = float(0.0f);
      light->getIllumination(intersect_p, light_dir, light_col, light_distance);

      //Shadows
      //cout << "LIGHT_COL:" << light_col.x() << ':' << light_col.y() << ':' << light_col.z(); 
      if(m_shadow){
        Ray test_shade = Ray(intersect_p,light_dir);
        Hit h_shade = Hit(light_distance, NULL, NULL);
        bool intersect_shade = g->intersect(test_shade, h_shade, EPSILON);
        if(h_shade.getT() >= light_distance){
          pixelIntersect += h.getMaterial()->Shade(c_ray,h,light_dir,light_col);
        }
      }
      else{
        pixelIntersect += h.getMaterial()->Shade(c_ray,h,light_dir,light_col);
      }

    }

    if(bounces > 0){
      bool refract = false;
      Vector3f norm = h.getNormal().normalized();

      float nt = h.getMaterial()->getRefractionIndex();

      float R = 1.0;

      //Mirror reflection
      Vector3f mirrorDir = mirrorDirection(norm,c_ray.getDirection().normalized());
      Ray mirrorRay = Ray(intersect_p,mirrorDir);
      Hit mir_h = Hit();
      Vector3f reflColor = traceRay(mirrorRay, EPSILON, refractI, bounces - 1, mir_h);

      Vector3f refrDir = Vector3f();
      Vector3f refrColor = Vector3f();
      if(REFRACTING){
      //Refraction
        if(h.getMaterial()->getRefractionIndex() > 0){

          if (Vector3f::dot(c_ray.getDirection(), norm) > 0)
          {
            nt = 1.0f;
            norm *= -1;
          }
          refract = transmittedDirection(norm, c_ray.getDirection(), refractI, nt, refrDir);

          if(refract){
            Hit refr_h = Hit();
            //cout << "refrDir:" << refrDir.x() << ":" << refrDir.y() << ":" << refrDir.z();
            Ray refrRay = Ray(intersect_p,refrDir);
            refrColor = traceRay(refrRay, EPSILON, nt, bounces - 1, refr_h);

            //Weighting
            float c = 0.0f;
            if(refractI <= nt){
              c = abs(Vector3f::dot(c_ray.getDirection(),norm));
            }
            else{
              c = abs(Vector3f::dot(refrDir,norm));
            }
            float R_0 = pow(((nt - refractI)/(nt + refractI)),2);
            R = R_0 + (1-R_0)*pow((1-c),5);
          }
        }
      }

      pixelIntersect += h.getMaterial()->getSpecularColor()*(R*reflColor + (1-R)*refrColor);
    }
    return pixelIntersect;
  }
  else{
    return pixelPass;
  }
}
예제 #21
0
//没用 weight
Vec3f RayTracer::traceRay(Ray &ray, float tmin, int bounces, float indexOfRefraction, Hit &hit, Grid *grid) const
{
	if(bounces >= max_bounces)
		return Vec3f(0,0,0);
	RayTracingStats::IncrementNumNonShadowRays();
	//Group *group = s->getGroup();
	//group->intersect(ray,hit,tmin);  这里是没用grid的代码
	grid->intersect(ray,hit,tmin);
	
	if(hit.getMaterial()==NULL)
		return s->getBackgroundColor();
	else
	{
		RayTracingStats::IncrementNumShadowRays();
		Vec3f col(0,0,0);
		Vec3f hitPoint = hit.getIntersectionPoint();
		Vec3f tempAmb;
		Vec3f::MultRow(tempAmb,s->getAmbientLight(),hit.getMaterial()->getDiffuse(hitPoint));  //Kd La
		col += tempAmb;
		int lightNumber = s->getNumLights();
		Light *light;
		for(int i=0; i<lightNumber; i++)
		{
			light = s->getLight(i);
			Vec3f lightColor;
			Vec3f dirToLight;
			//Vec3f interPoint = hit.getIntersectionPoint();
			float distanceToLight;
			light->getIllumination(hitPoint,dirToLight,lightColor,distanceToLight);
			if(!castShadowRayGrid(hitPoint-ray.getDirection()*EPSILON,dirToLight,distanceToLight,grid))
			{
				Vec3f tempShade = hit.getMaterial()->Shade(ray,hit,dirToLight,lightColor);  //diffuse specular
				col += tempShade;
			}
		}
		if(hit.getMaterial()->isReflect(hitPoint))  
		{
			Ray rayReflect(mirrorDirection(hit.getNormal(),ray.getDirection()),hitPoint);
			Vec3f tempRefl;
			Hit hit2(1000,NULL,Vec3f(1,1,1));
			Vec3f::MultRow(tempRefl,hit.getMaterial()->getReflect(hitPoint),traceRay(rayReflect,tmin,bounces+1,indexOfRefraction,hit2,grid)); //weight,indexOfRefrection
			col += tempRefl;
		}
		if(hit.getMaterial()->isTransparent(hitPoint))
		{
			bool notTotalReflective;
			Vec3f transmittedDir;
			if(ray.getDirection().Dot3(hit.getNormal())>0) //ray is inside the object
			{
				notTotalReflective = transmittedDirection(hit.getNormal()*(-1.0f),ray.getDirection(),hit.getMaterial()->getIndexOfRefrac(hitPoint),indexOfRefraction,transmittedDir);
			}
			else  //outside
			{
				notTotalReflective = transmittedDirection(hit.getNormal(),ray.getDirection(),indexOfRefraction,hit.getMaterial()->getIndexOfRefrac(hitPoint),transmittedDir);
			}
			
			if(notTotalReflective)
			{
				Ray rayTransparent(transmittedDir,hitPoint);
				Vec3f tempTrans;
				Hit hit3(10000,NULL,Vec3f(1,1,1));
				Vec3f::MultRow(tempTrans,hit.getMaterial()->getTrans(hitPoint),traceRay(rayTransparent,tmin,bounces+1,indexOfRefraction,hit3,grid));
				col += tempTrans;
			}
			else
			{
				Vec3f tempTotalTrans;
				Vec3f::MultRow(tempTotalTrans,hit.getMaterial()->getTrans(hitPoint),hit.getMaterial()->getDiffuse(hitPoint));
				col += tempTotalTrans;
			}
		}
		return col;
	}
}
예제 #22
0
Vec3f RayTracer::traceRay(Ray &ray, float tmin, int bounces, float weight,
	float indexOfRefraction, Hit &hit) const
{
	//printf("当前已有光线:\n");
	//RayTree::Print();

	Vec3f canswer;
	if (bounces > max_bounces)
		return Vec3f(0.0f, 0.0f, 0.0f); 
	Camera *camera = sceneParser->getCamera();
	Group *group = sceneParser->getGroup();
	int num_lights = sceneParser->getNumLights();
	Vec3f cambient = sceneParser->getAmbientLight();
	//原来最后是这里出了问题,一旦碰到有转换的物体,那么hit带出来的值是
	//转换后的视线看到的值,而非本来视线看到的值
	//所以解决方案是:距离不变,根据距离重新计算焦点
	if (group->intersect(ray, hit, tmin))//撞到了
	{
		if (is_view_ray)
		{
			RayTree::SetMainSegment(ray, 0, hit.getT());
			is_view_ray = false;
		}
		Vec3f cobject = hit.getMaterial()->getDiffuseColor();
		Vec3f hitPoint = hit.getIntersectionPoint();
		//环境光部分
		canswer = cambient * cobject;
		Vec3f clight;//光的颜色
		Vec3f light_dir;//指向光的方向
		Vec3f normal_dir = hit.getNormal();//交点法线向量
		float distolight;//距离光源的距离
		for (int i = 0; i < num_lights; i++)
		{
			Light *light = sceneParser->getLight(i);
			//light_dir : the direction to the light
			// 该方法用于获得指向光的方向,光的颜色,和到达光的距离
			// 第一个参数传递的是焦点信息
			light->getIllumination(hitPoint, light_dir, clight, distolight);

			Ray ray2(hitPoint, light_dir);
			Vec3f init_normal(0, 0, 0);
			Hit hit2(distolight, NULL, init_normal);
			//阴影检测
			if (shadow)
			{
				if (group->intersect(ray2, hit2, tmin)){
					RayTree::AddShadowSegment(ray2, 0, hit2.getT());
					continue;
				}
				RayTree::AddShadowSegment(ray2, 0, hit2.getT());
			}
			//cpixel  =  cambient * cobject + SUMi [ clamped(Li . N) * clighti * cobject ]
			//返回局部光
			canswer = canswer + hit.getMaterial()->Shade(ray, hit, light_dir, clight);
		}

		//printf("当前已有光线:\n");
		//RayTree::Print();

		
		//反射光
		Material *material = hit.getMaterial();
		Vec3f rc = material->getReflectiveColor();
		if (rc.r() > 0 && rc.g() > 0 && rc.b() > 0)
		{
			Vec3f mirrorDir;
			Vec3f incoming = ray.getDirection();
			mirrorDir = mirrorDirection(normal_dir, incoming);
			// The ray weight is simply multiplied by the magnitude of the reflected color
			Ray ray3(hitPoint, mirrorDir);
			Vec3f init_normal(0, 0, 0);
			Hit hit3(distolight, NULL, init_normal);
			//忘记乘以本身的反射光系数%…………
			canswer += traceRay(ray3, tmin, bounces + 1, weight*rc.Length(), indexOfRefraction, hit3)*rc;
			if (bounces + 1 < max_bounces)
				RayTree::AddReflectedSegment(ray3, 0, hit3.getT());
		}

		//printf("当前已有光线:\n");
		//RayTree::Print();


		//从这里开始还都存在问题!!!!!
		//折射光
		Vec3f transmitted;
		Vec3f tc = material->getTransparentColor();
		float index = material->getIndexOfRefraction();
		if (tc.r() > 0 && tc.g() > 0 && tc.b() > 0)
		{
			Vec3f init_normal(0, 0, 0);
			Hit hit4(distolight, NULL, init_normal);
			//在判断折射光的存在之后,要考虑光线的位置:物体内还是物体外
			//这里根据normal和incoming的点积来判断
			Vec3f incoming = ray.getDirection();
			float judge = normal_dir.Dot3(incoming);
			if (judge < 0)//光线在外
			{
				if (transmittedDirection(normal_dir, incoming, 1, index, transmitted))
				{
					Ray ray4(hitPoint, transmitted);
					canswer += traceRay(ray4, tmin, bounces+1, weight*rc.Length(), index, hit4)*tc;
					RayTree::AddTransmittedSegment(ray4, 0, hit4.getT());
				}
			}
			else//光线在内
			{
				normal_dir.Negate();
				if (transmittedDirection(normal_dir, incoming, index, 1, transmitted))
				{
					Ray ray4(hitPoint, transmitted);
					canswer += traceRay(ray4, tmin, bounces+1, weight*rc.Length(), 1, hit4)*tc;
					RayTree::AddTransmittedSegment(ray4, 0, hit4.getT());
				}
			}
		}

		//printf("当前已有光线:\n");
		//RayTree::Print();

	}
	else
		canswer = sceneParser->getBackgroundColor();

	canswer.Clamp();
	return canswer;
}
예제 #23
0
bool Transform::intersect(const Ray &r, Hit &h, float tmin)
{
	float scaler;
	Vec3f d, o, n_ws;
	Ray r1;
	Matrix m;

	r1 = r;
	m = matrix;

	// get the length of direction vector
	scaler = r.getDirection().Length();

	// normalize the direction vector
	d = r1.getDirection();
	d.Normalize();

	// transform the ray
	o = r1.getOrigin();
	m.Transform(o);
	m.TransformDirection(d);

	// Now, Ray origin and direction is transformed to Object Space
	// intersect at Object space
	if (object->intersect(r1, h, tmin) == false)
		return false;

	// transform the hit point and 
	// normal vector back to world space
	//
	// In order to transform the normal vector,
	// let,
	// n_ws,  normal vector in world space,
	// v_ws,  perpendicular to normal in world space,
	// n_os,  normal vector in object space
	// v_os,  perpendicular to normal in object space,
	//
	// given,
	// n_os^T * v_os = 0 and v_ws^T * n_ws = 0
	// 
	// where,
	//   ^T present transport of the vector(matrix)
	//   ^-1 present inverse of the matrix
	//
	// n_os^T * v_os = 0 
	// ==> n_os^T * (M^-1 * M) * v_os = 0 
	// ==> (n_os^T * M^-1) * v_ws = 0
	// and
	//   n_ws * v_ws = 0
	// thus 
	//   n_ws = (n_os^T) * M^-1 = (M^T)^-1 * n_os

	m.Transpose();
	m.Inverse();
	n_ws = h.getNormal();
	m.Transform(n_ws);

	h.set(h.getT() * scaler, h.getMaterial(), n_ws, r);

	return true;
}
예제 #24
0
void
Renderer::Render(int width,
                 int height,
                 const std::string &outputFilename,
                 float minDepth,
                 float maxDepth,
                 const std::string &depthFilename,
                 const std::string &normalFilename)
{
    // TODO: implement

    // Loop over each pixel in the image, shooting a ray
    // through that pixel and finding its intersection with
    // the scene. Write the color at the intersection to that
    // pixel in your output image.
    //
    // Use the Image class to write out the requested output files.

	SceneParser Scene(fname); //create scene here 

	//scene is properly being read in
	cout << "Number of materials in scene " << Scene.getNumMaterials() << endl; 

	//just initialize these all anyways, very little memory not to, and annoying to deal with conditional
	//initialization
	Image IM=Image(width,height); 
	Image ID=Image(width,height); 
	Image IN=Image(width,height); 

	bool depth=false;
	if(!(depthFilename.empty()))  //we will produce depth image
		depth=true; 

	bool normal=false;
	if(!(normalFilename.empty()))  //we will produce normal image
		normal=true; 

	float xp;
	float yp; 
	Ray R;
	bool hitSomething; 
	Vector3f color; 
	float T; 

	for(int x=0; x<width; x++ )
	{
		for(int y=0; y<height; y++)
		{
			Hit H;  //need to reinitialize every single time 
	                        //unless we want it to remmeber things like the texture state 

			H.set(std::numeric_limits<float>::max(),NULL,Vector3f(0,0,0)); //initialization for Hit

			//cout << "pixel " << x<< " " <<y <<endl; 

			//compute coordinates for ray we want to generate 
			xp=2*((float)x)/((float)(width-1))-1; 			
			yp=2*((float)y)/((float)(height-1))-1; 		
			//generate ray object for camera 
			R=(Scene.getCamera())->generateRay(Vector2f(xp,yp));			

			//cout << "generated ray " <<endl;

			Group * gro=Scene.getGroup();

			//iterate through group objects and see which is closest hit for ray
			hitSomething=gro->intersect(R,(Scene.getCamera())->getTMin(),H);

			//cout << " hit " <<hitSomething <<endl;
			
			//set color of pixels for images based on what we get back for the ray
			if(hitSomething==false)  //set pixel to be background color, didn't hit anything
			{
				//cout << "setbackground " <<endl;
				IM.setPixel(x,y,Scene.getBackgroundColor());
				if(depth)
					ID.setPixel(x,y,Vector3f(0,0,0)); 	
				if(normal)
					IN.setPixel(x,y,Vector3f(0,0,0));

				//cout << "finished set background"<<endl;  	
			}
			else
			{
				//cout << "set Material "<<endl; 
				IM.setPixel(x,y,shade(R,H,&Scene)); //shade method is defined above 
				//IM.setPixel(x,y,Vector3f(0.5,0.5,0.5));
				if(depth)  //calculate Pixel color based on depth, set to gray scale 
				{
					T=H.getT();
					if(T<minDepth)
						T=minDepth; 
					if(T>maxDepth)
						T=maxDepth;
					color=Vector3f(1,1,1)-((T-minDepth)/(maxDepth-minDepth)*Vector3f(1,1,1)); 					
					ID.setPixel(x,y,color);
				}
				if(normal)
				{
					color=H.getNormal();
					color=Vector3f(fabs(color[0]),fabs(color[1]),fabs(color[2])); 					
					IN.setPixel(x,y,color);
				}
				//cout << "finished set material " <<endl; 
			}
		}
	}

	//save the images 

	IM.savePNG(outputFilename); 

	if(depth)  //we will produce depth image
		ID.savePNG(depthFilename); 

	if(normal)  //we will produce depth image
		IN.savePNG(normalFilename); 
	

}
예제 #25
0
Vector3f shade(const Ray &ray, const Hit &hit, SceneParser * Scene )
{
	Material * material=hit.getMaterial();

	//need to get a specific diffuse color depending on whether or not we have triangle object
	//and whether or not this triangle object contains a texture 

	//as default set as diffuse color
	Vector3f _diffuseColor=material->getDiffuseColor();

	Vector2f uv; 
	Texture * tex; 

	if(hit.hasTex())
	{
		if((*material).isLoaded())
			cout << "texture should have been loaded" <<endl; 
		else
			cout << "Material is not loaded!" <<endl;

		uv=hit.getTexCoord();
		tex=(*material).getTexture();
		cout << "texture coordinates" << uv[0] << " " << uv[1] <<endl; 
		_diffuseColor=(*tex).getTexel(uv[0],uv[1]); //set diffuse color based on texture map
	}

	Vector3f _specularColor=material->getSpecularColor();
	float _shininess=material->getShininess(); 

        //first we need to figure out where this section of material is located
        Vector3f n=hit.getNormal();
        Vector3f dray=ray.getDirection();
        Vector3f p=ray.pointAtParameter(hit.getT());
        Vector3f c_amb=Scene->getAmbientLight();
        int num=Scene->getNumLights();

        Vector3f color=_diffuseColor*c_amb; //* is direct vector product

        Light * bob;
        Vector3f ldir, col,ref;
        float dist,val,val2;

        //iterate through light sources
        for(int i=0; i<num; i++)
        {
                bob=Scene->getLight(i);
                bob->getIllumination(p,ldir,col,dist);

                //compute contribution from diffuse reflection
                val=ldir.dot(ldir,n);
                if(val<0) //clamp value
                        val=0;
                color=color+_diffuseColor*col*val;

                //compute contribution from specular reflection
                ref=ldir-2*ldir.dot(ldir,n)*n;
                val2=pow(dray.dot(dray,ref),_shininess); //as dray moves off reflection we suppress 
                if(dray.dot(dray,ref)<0) //clamp value
                        val2=0;

                color=color+_specularColor*col*val2;
        }

        return color;
}
예제 #26
0
파일: main.cpp 프로젝트: sdoctor/Graphics
int main( int argc, char* argv[] )
{
    // Fill in your implementation here.
    
    // Things we get from the input arguments
    const char* input_filename;
    int img_height;
    int img_width;
    const char* output_filename;
    int depth_min;
    int depth_max;
    const char* depth_output_filename = NULL;
    const char* normals_output_filename = NULL;
    
    cout << "All variables declared in main..." << endl;

    // This loop loops over each of the input arguments.
    // argNum is initialized to 1 because the first
    // "argument" provided to the program is actually the
    // name of the executable (in our case, "a4").
    for( int argNum = 1; argNum < argc; ++argNum )
    {
        std::cout << "Argument " << argNum << " is: " << argv[argNum] << std::endl;
        
        if (string(argv[argNum]) == "-input")
        {
            assert (argNum+1 <= argc && "Did not specify argument for flag -input"); // make sure there is something after the flag
            input_filename = argv[argNum+1];
            argNum += 1; // move past the flag you just read
        }
        if (string(argv[argNum]) == "-size")
        {
            assert (argNum+2 <= argc && "Did not specify enough arguments for flag -size"); // make sure there is something after the flag
            img_height = atoi(argv[argNum+1]);
            img_width = atoi(argv[argNum+2]);
            argNum += 2;
        }
        if (string(argv[argNum]) == "-output")
        {
            assert (argNum+1 <= argc && "Did not specify argument for flag -output"); // make sure there is something after the flag
            output_filename = argv[argNum+1];
            argNum+=1;
        }
        if (string(argv[argNum]) == "-depth")
        {
            assert (argNum+3 <= argc && "Did not specify enough arguments for flag -depth (should have 3)");
            depth_min = atoi(argv[argNum+1]);
            depth_max = atoi(argv[argNum+2]);
            depth_output_filename = argv[argNum+3];
            argNum+=3;
        }
        if (string(argv[argNum]) == "-normals")
        {
            assert (argNum+1 <= argc && "Did not specify enough arguments for flag -normals (should have 1)");
            normals_output_filename = argv[argNum+1];
            argNum+=1;
        }
        
    }
    
    cout << "Input arguments parsed" << endl;
    // From the handout: Write a main function that:
    //    1) reads the scene (using the parsing code provided)
    //    2) loops over the pixesl in the image plane
    //        3) generates a ray using your camera class
    //        4) intersects it with the high lievel Group that stores the objects of the scene
    //        5) writes the color of the closest intersected object
    
    
    // read the scene using SceneParser
    SceneParser* parser = new SceneParser(input_filename);
    Camera* camera = parser->getCamera();
    Group* group = parser->getGroup();
    Vector3f background_color = parser->getBackgroundColor(); 
    
    
    
    // Loop over pixels in the image plane
    Image* img = new Image(img_width, img_height);
    Image* depth_img = new Image(img_width, img_height);
    Image* normals_img = new Image(img_width, img_height);
    
    cout << "now starting to produce image" << endl;
    cout << "camera's tmin = " << camera->getTMin() << endl;
    for (int j = 0; j < img_height; j++) 
    {
        for (int i = 0; i < img_width; i++) 
        {
//            cout << "for pixel " << i << "," << j << ": ";
            // Generate ray using cmaera class
            float tmin = camera->getTMin();
            Hit hit = Hit(FLT_MAX, NULL, NULL); // is this supposed to be something more...profound?
            
            
            // we want it to map from (-1,-1) to (1,1)
            float x = (2.0/(float)img_width)*(float)i - 1.0;
            float y = (2.0/(float)img_height)*(float)j - 1.0;
            Vector2f pixel = Vector2f(x, y);

            Ray ray = camera->generateRay(pixel);
            
            // intersect the ray with the high level group for the scene
            if (group->intersect(ray, hit, tmin))
            {
                              
                // write color of the closest intersected object
                Material* material = hit.getMaterial();
                if (material->getDiffuseColor()[0] == 1)
                {
                    cout << "color = " << material->getDiffuseColor();
                }
//                cout << "material = " << material << endl;
                if (parser->getNumLights() == 0)
                {
                    Vector3f color = material->getDiffuseColor();
                    if (material->getDiffuseColor()[0] == 1)
                {
                    cout << "writing color = " << material->getDiffuseColor();
                }
                    img->SetPixel(i, j, color); 
                }
                else
                {
                    Vector3f light_color(0,0,0);// we want to iteratively add the light from black
                    for (int k = 0; k < parser->getNumLights(); k++)
                    {
                        Light* light = parser->getLight(k);
                        Vector3f p = ray.pointAtParameter(hit.getT());
                        Vector3f dir;
                        Vector3f col;
                        float distanceToLight = 10.0;
                        light->getIllumination(p, dir, col, distanceToLight); // what should distance to light actually be?                        
                        float coeff = Vector3f::dot(hit.getNormal().normalized(), dir.normalized());
                        light_color += material->getDiffuseColor() * col * coeff;
                                                
                    }
                    if (material->getDiffuseColor()[0] == 1)
                {
                    cout << "writing color = " << material->getDiffuseColor();
                }
                    img->SetPixel(i, j, light_color);
                }
                               
                
                // Visualize Depth (QUESTION: should this be a separate routine?
                // I don't think so because all of the tests also have depth 
                // but this can be commented out for testing if it doesn't work...
                
                if (depth_output_filename != NULL)
                {
                    float d = hit.getT();
//                    d+=1;
//                    d = -d;
//                    cout << "d = " << d << endl;
                    // if t is within the correct range
//                    cout << "depthMin = " << depth_min << ", depth max = " << depth_max << endl;
                    if (d >= depth_min && d <= depth_max)
                    {
                        float depth_val = (d - depth_min)/(depth_max - depth_min);
//                        cout << "depth val = " << depth_val << endl;
                        depth_val = 1-depth_val;
                        depth_img->SetPixel(i, j, Vector3f(depth_val, depth_val, depth_val));
                    }
                }
                
                
                if (normals_output_filename != NULL)
                {
                    Vector3f normal = hit.getNormal();
                    Vector3f fab_norm = Vector3f(fabs(normal[0]), fabs(normal[1]), fabs(normal[2]));
                    normals_img->SetPixel(i, j, fab_norm); // QUESTION: just color the normal?
                }
                    
            } 
            else
            {
//                cout << "writing background color" << endl;
                img->SetPixel(i, j, background_color);
                depth_img->SetPixel(i, j, background_color);
            }
            
            
            
            
            
        }      
    }
    
    
    // Instructions for Depth:
    // Implement a second rendering style to visualize the depth t of
    // objects in the scene. Two input depth values specify the range
    // of depth values which should be mapped to shades of gray in 
    // the visualization. Depth values outside this range = clamped
    
    
    // Instructions for Normals:
    // Implement a new rendering mode, normal visualization
    
    
    
    
    // Write outfiles!
    cout << "images produced, writing outfiles" << endl;
    img->SaveTGA(output_filename);
    if (depth_output_filename != NULL)
    {
        depth_img->SaveTGA(depth_output_filename);
    }
    if (normals_output_filename != NULL)
    {
        normals_img->SaveTGA(normals_output_filename);

    }
           
    

    return 0;
}
예제 #27
0
파일: main.cpp 프로젝트: dsngiem/6.837
int main( int argc, char* argv[] )
{
    // Fill in your implementation here.

    // This loop loops over each of the input arguments.
    // argNum is initialized to 1 because the first
    // "argument" provided to the program is actually the
    // name of the executable (in our case, "a4").
	string sceneInput;
	int sizeX;
	int sizeY;
	string outputFile;
	string normalFile;
	int depth1;
	int depth2;
	string depthFile;
	
	bool shadows = false;
	int bounces = 0;
	float weight = 0.1;
	
	int numSamples = 0;
	bool uniformSamples = true;
	bool jitteredSamples = false;
	float boxFilterRadius = 0.0f;
	
	bool antialiasing = false;
	string renderSamplesFile;
	int renderSamplesFactor = 1;
	
    for( int argNum = 1; argNum < argc; ++argNum )
    {
        //std::cout << "Argument " << argNum << " is: " << argv[argNum] << std::endl;
		
		string arg = argv[argNum];
		
		if (arg == "-input") {
			argNum++;
			sceneInput = argv[argNum];
			
		} else if (arg == "-size") {
			argNum++;
			sscanf(argv[argNum], "%d", &sizeX);
			argNum++;
			sscanf(argv[argNum], "%d", &sizeY);
			
		} else if (arg == "-output") {
			argNum++;
			outputFile = argv[argNum];
			
		} else if (arg == "-normals") {
			argNum++;
			normalFile = argv[argNum];
			
		} else if (arg == "-depth") {
			argNum++;
			sscanf(argv[argNum], "%d", &depth1);
			argNum++;
			sscanf(argv[argNum], "%d", &depth2);
			argNum++;
			depthFile = argv[argNum];
			
		} else if (arg == "-bounces") {
			argNum++;
			sscanf(argv[argNum], "%d", &bounces);
			
		} else if (arg == "-weight") {
			argNum++;
			sscanf(argv[argNum], "%f", &weight);
			
		} else if (arg == "-shadows") {
			shadows = true;
			
		} else if (arg == "-uniform_samples") {
			uniformSamples = true;
			argNum++;
			sscanf(argv[argNum], "%d", &numSamples);
			
		} else if (arg == "-jittered_samples") {
			jitteredSamples = true;
			argNum++;
			sscanf(argv[argNum], "%d", &numSamples);
			
		} else if (arg == "-box_filter") {
			argNum++;
			sscanf(argv[argNum], "%f", &boxFilterRadius);
			antialiasing = true;
			
		} else if (arg == "-render_samples") {
			argNum++;
			renderSamplesFile = argv[argNum];
			argNum++;
			sscanf(argv[argNum], "%d", &renderSamplesFactor);
			
		} else {
			std::cout << "Argument not implemented " << argNum << " is: " << argv[argNum] << std::endl;

		}
    }
	
	assert(sceneInput != "");
	
	SceneParser* sceneParser = new SceneParser( (char*)sceneInput.c_str() );

	Camera* camera = sceneParser->getCamera();
	Group* objects = sceneParser->getGroup();
	
    // First, parse the scene using SceneParser.
    // Then loop over each pixel in the image, shooting a ray
    // through that pixel and finding its intersection with
    // the scene.  Write the color at the intersection to that
    // pixel in your output image.

	float stepX = 2.0f/(sizeX);
	float stepY = 2.0f/(sizeY);
	float stepXStart = 3.0 * stepX / 8.0f;
	float stepYStart = 3.0 * stepY / 8.0f;
	
	
	int rootNumSamples = (int)sqrt(numSamples);
	float stepRoot = 1.0f / rootNumSamples;
	float stepRootStart = stepRoot / 2.0f;
	
	//float stepXrender = 2.0f/(sizeX - 1)/renderSamplesFactor;
	//float stepYrender = 2.0f/(sizeY - 1)/renderSamplesFactor;
	
	Image* output;
	Image* depth;
	Image* normal;
	SampleDebugger* render;
	
	if (outputFile != "") {
		output = new Image( sizeX, sizeY );
		output->SetAllPixels( sceneParser->getBackgroundColor() );
	}
	
	if (depthFile != "") {
		depth = new Image( sizeX, sizeY );
		depth->SetAllPixels( Vector3f::ZERO );
	}
	
	if (normalFile != "") {
		normal = new Image( sizeX, sizeY );
		normal->SetAllPixels( Vector3f::ZERO );
	}
	
	if (renderSamplesFile != "") {
		render = new SampleDebugger( sizeX, sizeY, numSamples );
	}
	
	RayTracer rayTracer = RayTracer( sceneParser, bounces, weight, shadows );
	
	for (int x = 0; x < sizeX; x++) {
		for (int y = 0; y < sizeY; y++) {
			
			Vector2f point = Vector2f(-1 + ((x + 0.5f) * stepX), -1 + ((y + 0.5f) * stepY));
			
			Ray ray = camera->generateRay( point );
			Hit hit = Hit();
			float tmin = camera->getTMin();

			if (renderSamplesFile != "") {
				for (int i = 0; i < numSamples; i++) {
					int row = floor((float)i / (float)rootNumSamples);
					int col = i % rootNumSamples;
					Vector2f offset = Vector2f( stepRootStart + col * stepRoot, stepRootStart + row * stepRoot);
					
					if (jitteredSamples) {
						offset = Vector2f( nextFloat(), nextFloat());
					}
					
					Vector3f color = sceneParser->getBackgroundColor(); 
					
					Ray renderRay = camera->generateRay( point - Vector2f(0.5f * stepX, 0.5f * stepY) + (offset) * Vector2f(stepX, stepY) );
					Hit renderHit = Hit();
					
					if (objects->intersect(renderRay, renderHit, tmin)) {
						color = rayTracer.traceRay( renderRay, tmin, 0, 1.0, renderHit, 1.0 );
					}
					
					render->setSample( x, y, i, offset, color );
				}
			}
			
			//cout << "testing ray at " << ray << tmin << endl; 
			
			bool intersected = objects->intersect( ray, hit, tmin );
			
			if (intersected || antialiasing) {
				
				//cout << "found an intersection for " << ray << "at " << hit.getT() << endl;
				
				if (outputFile != "") {
					
					Vector3f pixelColor = sceneParser->getBackgroundColor();
					
					if (antialiasing) {
						
						Vector3f color = Vector3f( 0 );
						
						for (int i = 0; i < numSamples; i++) {
							int row = floor((float)i / (float)rootNumSamples);
							int col = i % rootNumSamples;
							Vector2f offset = Vector2f( stepRootStart + col * stepRoot, stepRootStart + row * stepRoot);
							
							if (jitteredSamples) {
								offset = Vector2f( nextFloat(), nextFloat() );
							}
							
							Vector3f aColor = sceneParser->getBackgroundColor();
							
							Ray renderRay = camera->generateRay( point - Vector2f(0.5f * stepX, 0.5f * stepY) + (offset) * Vector2f(2.0 * boxFilterRadius * stepX, 2.0 * boxFilterRadius * stepY) );
							Hit renderHit = Hit();
							
							if (objects->intersect(renderRay, renderHit, tmin)) {
								aColor = rayTracer.traceRay( renderRay, tmin, 0, 1.0, renderHit, 1.0 );
							}
							
							color += aColor;
						}
						
						pixelColor = color / numSamples;
						
					} else if (intersected) {
						
						pixelColor = rayTracer.traceRay( ray, tmin, 0, 1.0, hit, 1.0 );
					}
					
					/*
					
					Vector3f pixelColor = sceneParser->getAmbientLight() * hit.getMaterial()->getDiffuseColor();
					
					for (int i = 0; i < sceneParser->getNumLights(); i++) {
						Light* light = sceneParser->getLight(i);
						
						Vector3f p = ray.pointAtParameter( hit.getT() );
						Vector3f dir = Vector3f();
						Vector3f col = Vector3f();
						float distance = 0;
						
						light->getIllumination(p, dir, col, distance);
						
						pixelColor += hit.getMaterial()->shade( ray, hit, dir, col );
					}
					
					
					//cout << "final pixel color: ";
					//pixelColor.print();
					 //cout << endl;
					 
					 */
					
					output->SetPixel(x, y, VecUtils::clamp(pixelColor));
				}
				
				if (depthFile != "") {
					Vector3f clamped = VecUtils::clamp(Vector3f(hit.getT()), depth1, depth2);
					Vector3f grayscale = (Vector3f(depth2) - clamped) / (float)(depth2 - depth1);
					
					//clamped.print();
					//grayscale.print();
					depth->SetPixel(x, y, grayscale);
				}
				
				if (normalFile != "") {
					normal->SetPixel(x, y, VecUtils::absoluteValue(hit.getNormal()) );
				}
				
			}
		}
	}
	
	if (outputFile != "") {
		output->SaveTGA( (char *)outputFile.c_str() );
	}
	
	if (depthFile != "") {
		depth->SaveTGA( (char *)depthFile.c_str() );
		
		//printf("depth %d %d\n", depth1, depth2);
	}
	
	if (normalFile != "") {
		normal->SaveTGA( (char *)normalFile.c_str() );
	}
	
	if (renderSamplesFile != "") {
		render->renderSamples( (char *)renderSamplesFile.c_str(), renderSamplesFactor );
	}
	
	
    return 0;
}
예제 #28
0
void render( RayTracer* rayTracer, SceneParser* scene, Args* args )
{
    assert( scene != NULL && args != NULL );

	Vec2i imageSize( args->width, args->height );

    // construct images
	Image* image = 0;
    Image* depth_image = 0;
	Image* normals_image = 0;
	if ( args->output_file != NULL )
    {
		image = new Image( imageSize, ImageFormat::RGBA_Vec4f );
        image->clear(Vec4f(0, 0, 0, 0));
    }
	if ( args->depth_file != NULL )
    {
		depth_image = new Image( imageSize, ImageFormat::RGBA_Vec4f );
        depth_image->clear(Vec4f(0, 0, 0, 0));
    }
    if ( args->normals_file != NULL )
    {
		normals_image = new Image( imageSize, ImageFormat::RGBA_Vec4f );
        normals_image->clear(Vec4f(0, 0, 0, 0));
    }

	// Construct sampler
	Sampler* sampler = Sampler::constructSampler( args->sampling_pattern, args->num_samples );

	/*** EXTRA
		The Filter and Film objects are not used by the starter code. They provide starting points
		for implementing smarter supersampling, whereas the required type of less fancy supersampling
		can be implemented by taking the average of the samples drawn from each pixel.
	**/
	Filter* filter = Filter::constructFilter( args->reconstruction_filter, args->filter_radius );
    Film* film = new Film( image, filter );

	/*** Main render loop!
		 Loop through all the pixels in the image
		 generate all the samples
		 fire rays
		 compute shading
		 accumulate into image
	*/
	// loop over scanlines
	for( int j = 0; j < args->height; ++j )
    {
		// print progress info
		if ( args->show_progress )
			::printf( "%.2f%%      \r", j*100.0f/imageSize.y );

		// loop over pixels on a scanline
		for( int i = 0; i < args->width; ++i )
        {
			Vec3f sampleColor(0.0f, 0.0f, 0.0f);

			// Loop through all the samples for this pixel.
            for( int n = 0; n < args->num_samples; ++n )
            {
				// Get the offset of the sample inside the pixel. 
				// You need to fill in the implementation for this function when implementing supersampling.
				// The starter implementation only supports one sample per pixel through the pixel center.
                Vec2f offset = sampler->getSamplePosition( n );

				// Convert floating-point pixel coordinate to canonical view coordinates in [-1,1]^2
				// You need to fill in the implementation for Camera::normalizedImageCoordinateFromPixelCoordinate.
				Vec2f ray_xy = Camera::normalizedImageCoordinateFromPixelCoordinate( Vec2f( float(i), float(j) ) + offset, imageSize );

				// Generate the ray using the view coordinates
				// You need to fill in the implementation for this function.
                Ray r = scene->getCamera()->generateRay( ray_xy );

				// trace the ray!
                Hit hit;
                float tmin = scene->getCamera()->getTMin();
				// !!Trace!!
				// You should fill in the gaps in the implementation of traceRay().
				// args->bounces gives the maximum number of reflections/refractions that should be traced.
                sampleColor += rayTracer->traceRay( r, tmin, args->bounces, 1.0f, hit );

				/*** YOUR CODE HERE
				     This starter code only supports one sample per pixel and consequently directly
					 puts the returned color to the image. You should extend the code to handle
					 multiple samples per pixel. The basic requirement is just to take an average
					 of all the samples within the pixel (so-called "box filtering"). Note that this
					 starter code does not take an average, it just assumes the first and only sample
					 is the final color.

					 For extra credit, you can implement more sophisticated ones, such as "tent" and bicubic
					 "Mitchell-Netravali" filters. This requires you to implement the addSample()
					 function in the Film class and use it instead of directly setting pixel values in the image.
				**/
            }

			image->setVec4f( Vec2i(i,j), Vec4f(sampleColor/args->num_samples,1.0f) );

			// if normal and depth images are requested, shoot special rays through the center,
			// i.e., do not supersample.
            if( depth_image != NULL || normals_image != NULL )
            {
				// Get canonical image coordinates from pixel units...
				Vec2f ray_xy = Camera::normalizedImageCoordinateFromPixelCoordinate( Vec2f( float(i), float(j) ) + Vec2f( 0.5f, 0.5f ), imageSize );
				// ..generate the ray..
                Ray r = scene->getCamera()->generateRay( ray_xy );

				// ..trace..
                Hit hit;
                float tmin = scene->getCamera()->getTMin();
                Vec3f shade = rayTracer->traceRay( r, tmin, args->bounces, 1.0f, hit );
				// ...and store results.
                if( depth_image != NULL )
                {
					/*** YOUR CODE HERE:
					     Here you should map the t range [depth_min, depth_max] to the inverted range [1,0] for visualization
						 Note that closer objects should appear brighter.
					**/
					float t = hit.getT();

					float dmax = Args::instance()->depth_max;
					float dmin = Args::instance()->depth_min;

					float f = 1.0f - ( FW::abs(t-dmin) / FW::abs(dmax-dmin) );
					
                    depth_image->setVec4f( Vec2i( i, j ), Vec4f( f, f, f, 1 ) );
                }
                if( normals_image != NULL )
                {
                    Vec3f n = hit.getNormal();
                    Vec3f color( fabs( n[0] ), fabs( n[1] ), fabs( n[2] ) );
					color = color.clamp( Vec3f(0), Vec3f(1) );
                    normals_image->setVec4f( Vec2i( i, j ), Vec4f( color, 1 ) );
                }
            } // close sample count
        } // close width
    } // close height

	/*** YOUR CODE HERE (EXTRA)
	     When you implement smarter filtering,
		 you should normalize the filter weight
		 carried in the 4th channel.
	**/
    if( image != NULL )
    {
    }

	// Clean up
    delete filter;
    delete sampler;
    delete film;

    // And finally, save the images as PNG!
    if( image != NULL )
    { 
		::printf( "Done, writing out %s\n", args->output_file );
		FW::File f( args->output_file, FW::File::Create );
		exportLodePngImage( f, image );
		delete image;
    }
    if( depth_image != NULL )
    { 
		FW::File f( args->depth_file, FW::File::Create );
		exportLodePngImage( f, depth_image );
        delete depth_image; 
    }
    if( normals_image != NULL)
    { 
		FW::File f( args->normals_file, FW::File::Create );
		exportLodePngImage( f, normals_image );
        delete normals_image; 
    }
}