コード例 #1
0
ファイル: primitives.cpp プロジェクト: shihongzhi/RayTracing
bool Sphere::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f v = center - r.getOrigin();
	float tp = v.Dot3(r.getDirection());
	float det = tp*tp - v.Dot3(v) + radius*radius;
	//intersect
	if(det > 0)
	{
		//t'
		det = sqrtf(det); 

		float t1 = tp - det;
		float t2 = tp + det;

		if(t1 > tmin && t1 < h.getT())
		{
			Vec3f normal = (r.pointAtParameter(t1) - center);
			normal /= radius;
			normal.Normalize();
			h.set(t1,material,normal,r);
			return 1;
		}
		else if(t2 > tmin && t2 < h.getT())
		{
			//sphere's normal
			Vec3f normal = (r.pointAtParameter(t2) - center);
			normal /= radius;
			normal.Normalize();
			h.set(t2,material,normal,r);
			return 1;
		}
	}
	return 0;
}
void PhotonMapping::TracePhoton(const Vec3f &position, const Vec3f &direction, 
				const Vec3f &energy, int iter) {
  
  if(iter>args->num_bounces){
    return;
  }

  Hit h = Hit();
  Ray R = Ray(position, direction);
  bool intersect = raytracer->CastRay(R, h, false);
  if(!intersect){
    return;
  }
  Material *m = h.getMaterial();
  Vec3f normal = h.getNormal();
  Vec3f point = R.pointAtParameter(h.getT());
  Vec3f opDirec = direction;
  opDirec.Negate();
  opDirec.Normalize();
  Vec3f diffuse = m->getDiffuseColor(), reflec = m->getReflectiveColor();
  double diffuseAnswer = diffuse.x()+diffuse.y()+diffuse.z();
  double reflecAnswer = reflec.x()+reflec.y()+reflec.z();
  double total = reflecAnswer+diffuseAnswer;
  diffuseAnswer /= total;
  reflecAnswer /= total;
  double seed = GLOBAL_mtrand.rand();
  if(seed <= diffuseAnswer && seed >= 0){
    Vec3f newEnergy = energy * diffuse;
    Vec3f newPosition = point;
    Vec3f newDirection = Vec3f(GLOBAL_mtrand.rand(),GLOBAL_mtrand.rand(),GLOBAL_mtrand.rand());
    newDirection.Normalize();
    Photon answer = Photon(point,opDirec,newEnergy,iter+1);
    kdtree->AddPhoton(answer);
    TracePhoton(newPosition, newDirection, newEnergy, iter+1);
  }
  else if(seed>diffuseAnswer && seed <= 1){
    Vec3f newEnergy = energy * reflec;
    Vec3f newPosition = point;
    Vec3f newDirection = direction - 2 * direction.Dot3(normal) * normal;
    Photon answer = Photon(point,opDirec,newEnergy,iter+1);
    kdtree->AddPhoton(answer);
    TracePhoton(newPosition, newDirection, newEnergy, iter+1);
  }
  // ==============================================
  // ASSIGNMENT: IMPLEMENT RECURSIVE PHOTON TRACING
  // ==============================================

  // Trace the photon through the scene.  At each diffuse or
  // reflective bounce, store the photon in the kd tree.

  // One optimization is to *not* store the first bounce, since that
  // direct light can be efficiently computed using classic ray
  // tracing.



}
コード例 #3
0
Vec3f cCameraManager::getRight()
{
   Vec3f right;
   Vec3f lookingVec = m_vecCamPos - m_vecCamLookAt;
   lookingVec.Normalize();
   Vec3f::Cross3(right, lookingVec, m_vecCamUp);
   right.Normalize();

   return right;
}
コード例 #4
0
ファイル: camera.cpp プロジェクト: perfect28/MIT-Graphics
PerspectiveCamera::PerspectiveCamera(Vec3f cer, Vec3f &direction, Vec3f &up, float angle)
{
	this->center = cer;
	direction.Normalize();
	this->dir = direction;
	up.Normalize();
	this->up = up;
	Vec3f::Cross3(this->hor, this->dir, this->up);
	this->hor.Normalize();
	this->angle = angle;
	float theta = angle / 2.0f;
	this->dis = 1.0f / (sin(theta) * 2.0f);
}
コード例 #5
0
ファイル: RayTracer.cpp プロジェクト: shihongzhi/RayTracing
Vec3f RayTracer::mirrorDirection(const Vec3f &normal, const Vec3f &incoming) const
{
	Vec3f reflectionDir;
	reflectionDir = incoming - normal * (incoming.Dot3(normal)) * 2;
	reflectionDir.Normalize();
	return reflectionDir;
}
コード例 #6
0
ファイル: triangle.cpp プロジェクト: perfect28/MIT-Graphics
bool Triangle::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f r0 = r.getOrigin();
	Vec3f rd = r.getDirection();

	Vec3f E1 = a - b;
	Vec3f E2 = a - c;
	Vec3f S = a - r0;

	//参数写错,rd写成r0了……

	float de = det3x3(rd.x(), rd.y(), rd.z(), E1.x(), E1.y(), E1.z(), E2.x(), E2.y(), E2.z());
	if (de == 0.0f)
		return false;
	float t = det3x3(S.x(), S.y(), S.z(), E1.x(), E1.y(), E1.z(), E2.x(), E2.y(), E2.z())/de;
	float belta = det3x3(rd.x(), rd.y(), rd.z(), S.x(), S.y(), S.z(), E2.x(), E2.y(), E2.z()) / de;
	float lamda = det3x3(rd.x(), rd.y(), rd.z(), E1.x(), E1.y(), E1.z(), S.x(), S.y(), S.z()) / de;

	Vec3f normal;
	Vec3f::Cross3(normal, b - a, c - a);
	normal.Normalize();
	h.set(t, material, normal, r);
	if (t >= tmin && belta > 0.0f && lamda > 0.0f && belta + lamda < 1.0f)
		return true;
	else
		return false;
}
コード例 #7
0
ファイル: transform.cpp プロジェクト: perfect28/MIT-Graphics
/*
The intersect routine will first transform the ray, 
then delegate to the intersect routine of the contained 
object. Make sure to correctly transform the resulting 
normal according to the rule seen in lecture. You may 
choose to normalize the direction of the transformed ray 
or leave it un-normalized. If you decide not to normalize 
the direction, you might need to update some of your intersection code.
*/
bool Transform::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f r0 = r.getOrigin();
	Vec3f rd = r.getDirection();
	Matrix inv;
	matrix.Inverse(inv);
	inv.Transform(r0);
	inv.TransformDirection(rd);
	if (object != NULL)
	{
		//这里的h是有问题的,作如下修改:
		bool judge = object->intersect(Ray(r0,rd), h, tmin);
		Vec3f normal = h.getNormal();
		//这里很奇怪,normal的方向没有修正,然而结果却是对的
		//改了之后反而是错的!!
		//这里确定normal没有错,那么就是之后应用normal的
		//问题
		//好吧,就是这里的问题
		//经过把图形摆正,发现求的法向量没有问题,但是没有单位化…………!
		matrix.TransformDirection(normal);
		normal.Normalize();
		//or:
		//Matrix change,res;
		//matrix.Inverse(change);
		//change.Transpose(res);
		//res.TransformDirection(normal);
		h.set(h.getT(), h.getMaterial(), normal, r);
		return judge;
	}
	return false;
}
コード例 #8
0
ファイル: scene.cpp プロジェクト: nbajiaoshi/RayTrace
bool Scene::getIntersection(Line l,Vec3f& N,Vec3f& IntersectPoint,Vec3f& Color,Material& material,double len_limit) const{
    double t,dis = len_limit;
    bool haveIntersection = false;
    Vec3f n;
    for (int i = 0;i < models.size();i++)
        if (models[i]->getIntersection(l,N,IntersectPoint,Color,material,dis)){
            haveIntersection = true;
            dis = (IntersectPoint - l.start_point).Len();
        }
    /*if (tri_tree.getIntersection(l,N,IntersectPoint,Color,material,len_limit)){
        haveIntersection = true;
        dis = (IntersectPoint - l.start_point).Len();
    }*/
    for (int i = 0;i < _objects.size();i++){
        t = _objects[i]->getIntersection(l);
        if (t > EPS && t < dis){
            haveIntersection = true;
            dis = t;
            IntersectPoint = l.start_point + l.dir * t;
            N = _objects[i]->getN(IntersectPoint);
            material = _objects[i]->material;
            Color =_objects[i]->getColor(IntersectPoint);
        }
    }

    N.Normalize();
    if (N * l.dir < 0){
        N = - N;
        material.refract_n = 1.0 / material.refract_n;
    }
    return haveIntersection;
}
コード例 #9
0
void ElevatorSimRenderWindow::rayCasting(int x, int y) {
   SimulationState& simState = SimulationState::acquire();

   float fovX = (GLWindow_width/GLWindow_height) * 45.f;

   float mx = (float)((x - GLWindow_width * 0.5) *
            (1.0 / GLWindow_width) * fovX * 0.5);
   float my = (float)((y - GLWindow_height * 0.5) *
            (1.0 / GLWindow_width) * fovX * 0.5);
   Vec3f dx = simState.getCameraManager().getRight() * mx;
   Vec3f dy = simState.getCameraManager().GetCameraUp() * my;

   Vec3f dir = simState.getCameraManager().GetCameraLookAt() + (dx + dy) * 2.0;
   dir.Normalize();

   const int eachFloorHeight = simState.getBuilding().gfxEachFloorHeight;
   std::vector<Elevator*> & elevators = simState.getBuilding().getElevators();

   std::for_each(
            elevators.begin(),
            elevators.end(),
            [this, &eachFloorHeight] ( const Elevator* thisElev ) {
      float pos = 1.0f + thisElev->getYVal() /
               Floor::YVALS_PER_FLOOR * eachFloorHeight;

      (void) pos;
   });
}
コード例 #10
0
ファイル: rayTracer.cpp プロジェクト: perfect28/MIT-Graphics
Vec3f mirrorDirection(const Vec3f &normal, const Vec3f &incoming)
{
	//(反射光方向:R = V - 2(V.N)N )
	Vec3f reflectDir = incoming - 2 * (incoming.Dot3(normal))*normal;
	reflectDir.Normalize();
	return reflectDir;
}
コード例 #11
0
ファイル: Sphere.cpp プロジェクト: ecattell/CattellRT
// Detects if ray intersects sphere.
// TODO: Manage special case for transformed spheres by transforming vector.
Intersection* Sphere::hit(const Ray& r)
{		
	// Transform vector to use inverse of sphere's transformation matrix

	Vec3f e = Matrix4f::pntMult(Ti,r.e);
	Vec3f d = Matrix4f::vecMult(Ti,r.d);

	// Calculate coefficients of quadratic
	//Isn't this always 1?
	float a = Vec3f::Dot(d,d);
	float b = Vec3f::Dot(2*d,e-cntr);
	float c = Vec3f::Dot(e-cntr,e-cntr)-pow(rad,2);

	float discriminant = b*b-4*a*c;

	if (discriminant<0)
	{
		return NULL;
	}
	float t = RTMin((-b + sqrt(discriminant))/(2*a),(-b - sqrt(discriminant))/(2*a));

	Pnt3f p = Matrix4f::pntMult(T,e+t*d); // transform point  back to world coordinates
	Vec3f n = Matrix4f::vecMult(Tti,(p-cntr)/rad); // transform normal back to world coordinates
	n.Normalize();

	if (!SameDirection(d,n)) return NULL;  // Backface culling
	
	if (t > r.max || t < r.min) return NULL; // check range
	return new Intersection(t, p, n, parent);
}
コード例 #12
0
bool Transform::Intersect(const Ray &r, Hit &h, float tmin) const
{
  bool result = false;
  
  Matrix m = m_matrix;
  if ( m.Inverse() )
    {
      Vec3f org = r.getOrigin();
      Vec3f dir = r.getDirection();
      m.Transform(org);
      m.TransformDirection(dir);
      Ray r2 (dir, org);
      result = m_pObject->Intersect(r2, h, tmin);
      
      if (result)
	{
	  Matrix m1 = m;
	  m1.Transpose();
	  Vec3f n = h.getNormal();
	  m1.TransformDirection(n);
	  n.Normalize();
	  h.set(h.getT(), h.getMaterial(), n, r);
	}
    }
  return result;
}
コード例 #13
0
Vec3f RayTracer::shadow(const Vec3f &point,
			const Vec3f &pointOnLight,
			const Face *f,
			const Ray &ray,
			const Hit &hit) const
{
        const Vec3f normal(hit.getNormal());
        const Material *m = hit.getMaterial();

	Vec3f dirToLight = pointOnLight - point;
	dirToLight.Normalize();
	/* If dot product < 0, surface is not facing light */
	if (normal.Dot3(dirToLight) > 0) {
		Ray rayToLight(point, dirToLight);
		Hit hLight;
		bool blocked = CastRay(rayToLight, hLight, false);
		while (std::fabs(hLight.getT()) < SURFACE_EPSILON &&
				std::fabs((pointOnLight - point).Length()) > SURFACE_EPSILON) {
			rayToLight = Ray(rayToLight.pointAtParameter(SURFACE_EPSILON),
					dirToLight);
			blocked = CastRay(rayToLight, hLight, false);
		}
		if (hLight.getT() == FLT_MAX || hLight.getMaterial() != f->getMaterial()) {
			return Vec3f(0, 0, 0);
		}

		const Vec3f lightColor = 0.2 * f->getMaterial()->getEmittedColor() * f->getArea();
		return m->Shade(ray,hit,dirToLight,lightColor,args);
	}
	return Vec3f(0, 0, 0);
}
コード例 #14
0
ファイル: primitives.cpp プロジェクト: shihongzhi/RayTracing
bool Transform::intersect(const Ray &r, Hit &h, float tmin)
{
	Vec3f rTransOri = r.getOrigin();
	Vec3f rTransDir = r.getDirection();
	Matrix mInverse;
	m.Inverse(mInverse);
	mInverse.Transform(rTransOri);
	mInverse.TransformDirection(rTransDir);
	
	rTransDir.Normalize();
	Ray rTrans(rTransDir,rTransOri);
	Hit hTrans(10000,NULL,Vec3f(0,0,0));  //need a new hit,because the x-y-z had changed  就因为这里没有使用一个新的hit导致了自己debug了两天
	instance->intersect(rTrans,hTrans,tmin);
	if(hTrans.getT()<10000)
	{
		//world's t
		float t;
		//Vec3f hitPoint = rTransOri + rTransDir * hTrans.getT();
		Vec3f hitPoint = rTrans.pointAtParameter(hTrans.getT());
		m.Transform(hitPoint);
		Vec3f rOri = r.getOrigin();
		Vec3f rDir = r.getDirection();
		if((fabs(rDir[0])>=fabs(rDir[1]))&&(fabs(rDir[0])>=fabs(rDir[2]))){
			t = (hitPoint[0] - rOri[0]) / rDir[0]; 	
		}
		else if((fabs(rDir[1])>=fabs(rDir[0]))&&(fabs(rDir[1])>=fabs(rDir[2]))){
			t = (hitPoint[1] - rOri[1]) / rDir[1];
		}
		else if((fabs(rDir[2])>=fabs(rDir[0]))&&(fabs(rDir[2])>=fabs(rDir[1]))){
			t = (hitPoint[2] - rOri[2]) / rDir[2];
		}

		//world's normal
		mInverse.Transpose();
		Vec3f wNormal = hTrans.getNormal();
		mInverse.TransformDirection(wNormal);
		wNormal.Normalize();  //need normalize
		//h.setNormal(wNormal);

		if(t>=tmin && t<=h.getT())
		{
			h.set(t,hTrans.getMaterial(),wNormal,r);
			return 1;
		}
	}
	return 0;
}
コード例 #15
0
ファイル: mesh.cpp プロジェクト: nathanfaucett/Odin.cpp
	inline void Mesh::CalculateNormals(void) {
		Vec3f* u = new Vec3f();
		Vec3f* v = new Vec3f();
		Vec3f* uv = new Vec3f();
		Vec3f* faceNormal;
		Vec3f* va;
		Vec3f* vb;
		Vec3f* vc;

		uint32 vertexCount = vertices.Length(),
			   normalCount = normals.Length(),
			   a, b, c, i, il;
		
		if (vertexCount < normalCount) {
			for (i = vertexCount, il = normalCount; i > il; i--) {
				Vec3f* normal = normals[i];
				normals.Splice(i, 1);
				delete normal;
			}
		} else {
			i = vertexCount;
			normalCount = normals.Length();
			while(i-- > normalCount) normals.Push(new Vec3f());
			for (i = 0, il = vertexCount; i < il; i++) normals[i]->Set(0.0f, 0.0f, 0.0f);
		}

		for (i = 0, il = indices.Length(); i < il; i += 3) {
			a = indices[i];
			b = indices[i + 1];
			c = indices[i + 2];

			va = vertices[a];
			vb = vertices[b];
			vc = vertices[c];
			
			Vec3Sub<float32>(*vc, *vb, *u);
			Vec3Sub<float32>(*va, *vb, *v);
			
			Vec3Cross<float32>(*u, *v, *uv);

			faceNormal = uv;
			faceNormal->Normalize();

			*(normals[a]) += *faceNormal;
			*(normals[b]) += *faceNormal;
			*(normals[c]) += *faceNormal;
		}
		
		for (i = 0, il = indices.Length(); i < il; i += 3) {
			normals[indices[i]]->Normalize();
			normals[indices[i + 1]]->Normalize();
			normals[indices[i + 2]]->Normalize();
		}

		delete u;
		delete v;
		delete uv;
		m_needsUpdate = true;
	}
コード例 #16
0
ファイル: sphere.cpp プロジェクト: kinikibu/RayTracer
Vec3f sphere::getNormal(Vec3f eye, Vec3f dir)
{
	Vec3f normal;

	normal = (eye + dir * testIntersection(eye, dir)) - center;
	normal.Normalize();

	return normal;
}
コード例 #17
0
inline Vec3f ComputeNormal(const Vec3f &p1, const Vec3f &p2, const Vec3f &p3) {
	Vec3f v12 = p2;
	v12 -= p1;
	Vec3f v23 = p3;
	v23 -= p2;
	Vec3f normal;
	Vec3f::Cross3(normal,v12,v23);
	normal.Normalize();
	return normal;
}
コード例 #18
0
ファイル: orthocamera.C プロジェクト: drankez/uni
// Constructor
OrthographicCamera::OrthographicCamera(Vec3f &c, Vec3f &p, Vec3f &u,
                                       float sz) {
    size = sz;
    center = c;
    p.Normalize();
    projection = p;
    Vec3f::Cross3(horizontal, u, p);
    horizontal.Normalize();
    Vec3f::Cross3(up, p, horizontal);
}
コード例 #19
0
ファイル: app.cpp プロジェクト: bcrowell/planetfinder_ios
void updateOrientation() {
	if ( orientationDirty == false ) {
		return;
	}
	orientationDirty = false;
	
	float decay = 0.9f;
	
	float hyst = cos( ToRadians( app_orientationHysteresis.GetVal() ) );

	if ( GotCompassUpdate ) {
		if ( headingSmooth.Dot( heading ) < hyst ) {
			headingSmooth *= decay;
			headingSmooth += (1.0f - decay) * heading;
		}
		headingSmooth.Normalize();
	} 		

	if ( accelSmooth.Dot( accel ) < hyst ) {
		accelSmooth *= decay;
		accelSmooth += (1.0f - decay) * accel;		
	}
	accelSmooth.Normalize();		
	
	Vec3f up = -accelSmooth;
	up.Normalize();
	Vec3f north = headingSmooth;
	north -= up * up.Dot( north );
	north.Normalize();

	Matrix3f toTrueNorth = Rotationf( up, ToRadians( trueHeadingDiff ) ).GetMatrix3();
	north = toTrueNorth * north;
	
	Vec3f east = north.Cross( up );

	Matrix4f o;
	o.SetRow( 0, Vec4f(  east.x,  east.y,  east.z, 0.0f ) );
	o.SetRow( 1, Vec4f( north.x, north.y, north.z, 0.0f ) );
	o.SetRow( 2, Vec4f(    up.x,    up.y,    up.z, 0.0f ) );
	o.SetRow( 3, Vec4f(     0.0,     0.0,     0.0, 1.0f ) );
	
	platformOrientation = o.Transpose();
}
コード例 #20
0
ファイル: Light.cpp プロジェクト: slajar/BeatDetection
//---------------------------------------------------------------------------
Light::Light( Vec3f& position, Rgba& color, float ambientness, float innerFalloffRadius, float outerFalloffRadius, Vec3f& direction, Degrees innerAperture, Degrees outerAperture )
	: m_position( position ),
	m_color( color ),
	m_ambientness( ambientness ),
	m_innerFalloffRadius( innerFalloffRadius ),
	m_outerFalloffRadius( outerFalloffRadius ),
	m_direction( direction )
{
	SetApertureRangeDegrees( innerAperture, outerAperture );
	direction.Normalize();
}
void QRenderOutputWidget::Pan(float DownDegrees, float RightDegrees)
{
	Vec3f LoS = FocalPoint - Position;

	Vec3f right		= LoS.Cross(ViewUp);
	Vec3f orthogUp	= LoS.Cross(right);

	right.Normalize();
	orthogUp.Normalize();

	const float Length = (FocalPoint - Position).Length();

	const unsigned int WindowWidth	= this->Image.Width();

	const float U = Length * (RightDegrees / WindowWidth);
	const float V = Length * (DownDegrees / WindowWidth);

	Position	= Position + right * U - ViewUp * V;
	FocalPoint	= FocalPoint + right * U - ViewUp * V;
}
コード例 #22
0
ファイル: mesh.cpp プロジェクト: Terranlee/mesh
void Mesh::calculate_args(Face& temp)                //计算每一个面的参数
{
	Vec3f e1 = points[temp.v0].num - points[temp.v1].num;
	Vec3f e2 = points[temp.v1].num - points[temp.v2].num;
	Vec3f e = e1.cross(e2);
	e.Normalize();
	for(int i=0; i<3; i++)
		temp.arg[i] = e[i];
	temp.arg[3] = - e.product(points[temp.v0].num);
	temp.change_Qf();
}
コード例 #23
0
ファイル: app.cpp プロジェクト: bcrowell/planetfinder_ios
void setAccel( float x, float y, float z ) {
	
	accels[ accelCount ] = Vec3f( x, y, z );
	accelCount++;
	accelCount &= ( ARRAY_ELEMENTS( accels ) - 1 );
	
	for( int i = 0; i < ARRAY_ELEMENTS( accels ); i++ ) {
		accel += accels[ i ];
	}
	accel.Normalize();
	//Output( "accel: (%.2f, %.2f, %.2f)", x, y, z );
	orientationDirty = true;
}
コード例 #24
0
Quaternion::Quaternion(const Vec3f &axis, float angle)
{
	angle /= 2.0f;

	Vec3f normalizedAxis = axis.Normalize();

	float sinAngle = sinf(angle);

	x = (normalizedAxis.x * sinAngle);
	y = (normalizedAxis.y * sinAngle);
	z = (normalizedAxis.z * sinAngle);
	w = cosf(angle);
}
コード例 #25
0
Ray PerspectiveCamera::generateRay(Vec2f point)
{
    float x_ndc = point.x();
    float y_ndc = point.y();
#ifdef DEBUG
    printf("PerspectiveCamera::generateRay, x_ndc=%f, y_ndc=%f\n", x_ndc, y_ndc);
#endif
    float screenWidth = 0.f;
    float screenHeight = 0.f;

    if (mRatio > 1.f)
    {
        screenWidth = 2 * mRatio;
        screenHeight = 2.f;
    }
    else
    {
        screenWidth = 2.f;
        screenHeight = 2 * mRatio;
    }
#ifdef DEBUG
    printf("screenWidth=%f, screenHeight=%f\n", screenWidth, screenHeight);
#endif
    //float height = 2 * tan(mAngle * PI / 360.0);
    //float width = height * mRatio;

    float left = - screenWidth / 2.0;
    float top  = - screenHeight / 2.0;

    float u = x_ndc * screenWidth + left;
    float v = y_ndc * screenHeight + top;
#ifdef DEBUG
    printf("u=%f, v=%f\n", u, v);
#endif
    float near = screenHeight / (2.f * tanf(mAngle / 2.0));
#ifdef DEBUG
    printf("near=%f\n", near);
#endif
    Vec3f originalDir = near * mDirection + u * mHorizontal + v * mUp;

    if (originalDir.Length() != 0)
    {
        originalDir.Normalize();
    }

    Ray r(mCenter, originalDir);
#ifdef DEBUG
    cout<<r<<endl;
#endif
    return r;
}
コード例 #26
0
ファイル: RayTracer.cpp プロジェクト: shihongzhi/RayTracing
//何时返回true?非全反射时 何时返回false
bool RayTracer::transmittedDirection(const Vec3f &normal, const Vec3f &incoming, float index_i, float index_t, Vec3f &transmitted) const
{
	float nr = index_i / index_t;
	Vec3f I = incoming*(-1.0f);
	float cosI = I.Dot3(normal);
	float isAllTrans = 1 - nr*nr*(1 - cosI*cosI);
	if(isAllTrans < 0)  //全反射
		return false;

	float cosT = sqrt(isAllTrans);
	transmitted = normal*(nr*cosI - cosT) - I*nr;
	transmitted.Normalize();
	return true;
}
コード例 #27
0
Vec3f triangle::getNormal(Vec3f eye, Vec3f dir)
{
	//construct the barycentric coordinates for the plane
	Vec3f bary1 = alpha;
	Vec3f bary2 = beta;

	//cross them to get the normal to the plane
	//note that the normal points in the direction given by right-hand rule
	//(this can be important for refraction to know whether you are entering or leaving a material)
	Vec3f normal;
	Vec3f::Cross3(normal,bary1,bary2);
	normal.Normalize();

	return normal;
}
コード例 #28
0
ファイル: camera.cpp プロジェクト: shihongzhi/RayTracing
Ray PerspectiveCamera::generateRay(Vec2f point)
{
	Ray r;
	r.setOrigin(center);
	float fovScale = tan(angle*0.5) * 2;  //parser's angle is already a Radians
	up.Normalize();
	Vec3f uAdd = up;
	Vec3f rAdd;
	direction.Normalize();
	Vec3f::Cross3(rAdd,direction,uAdd);
	Vec3f rDirNormal = direction + uAdd*(point[0]-0.5)*fovScale + rAdd*(point[1]-0.5)*fovScale;  
	rDirNormal.Normalize();
	r.setDirection(rDirNormal);  //dirction need normalization

	return r;
}
コード例 #29
0
ファイル: PhongMaterial.cpp プロジェクト: alexunder/X-toys
Vec3f PhongMaterial::Shade(const Ray &ray, const Hit &hit, const Vec3f &dirToLight, const Vec3f &lightColor) const
{
    Vec3f eyeDir = ray.getDirection();
    eyeDir.Negate();

    Vec3f eyePlusLight = eyeDir + dirToLight;
    eyePlusLight.Normalize(); 
    
    float hn = eyePlusLight.Dot3(hit.getNormal());
    hn = pow(hn, mPhongComponent);

    Vec3f color = lightColor * mHighLightColor;
    color = hn * color;

    return color;
}
コード例 #30
0
ファイル: triangle.cpp プロジェクト: perfect28/MIT-Graphics
void Triangle::paint(void)
{
	material->glSetMaterial();
	glBegin(GL_TRIANGLES);
	Vec3f normal;
	Vec3f::Cross3(normal, b - a, c - a);
	normal.Normalize();
	
	Vec3f diffuseColor = material->getDiffuseColor();

	glColor3f(diffuseColor.x(), diffuseColor.y(), diffuseColor.z());
	glNormal3f(normal.x(), normal.y(), normal.z());
	glVertex3f(a.x(), a.y(), a.z());
	glVertex3f(b.x(), b.y(), b.z());
	glVertex3f(c.x(), c.y(), c.z());
	
	glEnd();
}