Exemplo n.º 1
1
vec3 Scene::calculPixel(const Rayon& ray, float dist, const vec3& oeil, float &oclu) const
{
    #if 0
        return vec3(dist*0.17, dist*0.19, dist*0.23);
    #else

        const vec3 dRay = ray.getDirection();
        vec3 p(ray.getOrigine() + dRay*dist);
        node->repositionne(p);
        const vec3 n(node->getNormal(p));

        Material texture = node->getMaterial(p);
        //return texture.getColor();

        std::vector<Lumiere> lumieres2;
        //lumieres2.reserve(NB_RAYONS_CIEL+this->lumieres.size());
        float ombre = calculPoisson(p, n, lumieres2);
        oclu = ombre;
        if(ombre > 0)
        {
            //for(const Lumiere& l: this->lumieres)
                //lumieres2.push_back(l);

            vec3 color = phong(texture, p, lumieres2, n, oeil);
            //color *= ombre;
            color = glm::clamp(color, vec3(0,0,0), vec3(1,1,1));
            return color;
        }
        else
            return NOIR;


    #endif
}
Exemplo n.º 2
0
void KeyboardKeys(unsigned char key, int x, int y) {
   if(key == 'c') {
      memset(cov_img, 0.0, width*height);
      generateCovariance = !generateCovariance;
   } else if(key == 'B') {
      generateReference = !generateReference;
   } else if(key == 'b') {
      generateBackground = !generateBackground;
   } else if(key == 'h') {
      displayBackground  = !displayBackground;
   } else if(key == 'f') {
       useCovFilter = !useCovFilter;
   } else if(key == '+') {
      Material phong(Vector(), Vector(), Vector(1,1,1)*.999, spheres[1].mat.exponent * 10);
      spheres[1].mat = phong;
      nPasses = 0;
   } else if(key == '-') {
      Material phong(Vector(), Vector(), Vector(1,1,1)*.999, fmax(spheres[1].mat.exponent / 10, 1.0));
      spheres[1].mat = phong;
      nPasses = 0;
   } else if(key == 'p') {
      ExportImage(width*mouse.X, height*mouse.Y);
   } else if(key == 'd') {
      std::cout << sout.str() << std::endl;
   }
   glutPostRedisplay();
}
Exemplo n.º 3
0
t_color		compute_light(t_scene *scene, t_ray *ray)
{
	t_list	*current;
	t_ray	lray;
	t_color	color[3];
	t_phpa	ph;

	current = scene->lights;
	ph.normal = get_normal(*ray);
	set_ambiant_light(&ph, scene, ray, color);
	while (current)
	{
		lray.pos = ((t_light *)current->content)->pos;
		lray.dir = norm_vect(mtx_add(mtx_sub(mtx_mult(ray->dir, ray->t),
			lray.pos), ray->pos));
		lray.invdir = get_inv_vect(&lray.dir);
		if (find_closest(scene, &lray) && ray->closest == lray.closest
			&& near_enough(ray, &lray))
		{
			set_params(&ph, &lray, ray);
			ph.camera = scene->camera;
			ph.light = (t_light *)current->content;
			phong(&ph);
		}
		current = current->next;
	}
	set_color_max(&ph);
	return (*color);
}
Exemplo n.º 4
0
void trace(float * p, float * v, int step){
    Sphere * sp = intersect(p, v);
    if(sp != NULL){
        normal(sp);
        reflect(v);
        phong(v, sp);
        TRIPLE(color[i] += inters_c[i]);

        if(step < TRACE_MAX) trace(inter, ref, step+1);
    }
}
Exemplo n.º 5
0
/************************************************************************
 * This is the recursive ray tracer - you need to implement this!
 * You should decide what arguments to use.
 ************************************************************************/
RGB_float recursive_ray_trace(Point &pos, Vector &ray, int num, bool inside=false) {
	IntersectionInfo end;
	const Object *s = getClosestObject(pos, ray, end);
	if (s == nullptr) {
		return background_clr;
	}

	Vector norm = s->getNormal(end);
	if (inside) {
		norm *= -1;
	}
	RGB_float color = phong(end.pos, ray, norm, s);
	if (num <= step_max) {
		Vector h;
		RGB_float ref({0,0,0});
		RGB_float ract({0,0,0});
		if (!inside && reflect_on) {
			h = vec_reflect(ray, norm);
			ref = recursive_ray_trace(end.pos, h, num + 1);
		}
		if (stochdiff_on) {
			RGB_float diff = {0,0,0};
			std::default_random_engine generator;
			std::uniform_int_distribution<int> distribution(-10,10);
			for (int i = 0; i < STOCH_RAYS; ++i) {
				h = vec_reflect(ray, norm);
				h = RotateX(distribution(generator)) *
					RotateY(distribution(generator)) *
					RotateZ(distribution(generator)) * h;
				diff += recursive_ray_trace(end.pos, h,  num+1);
			}
			diff /= 6;
			color += (diff*s->reflectance);
		}

		if (refract_on) {
			if (inside) {
				h = vec_refract(ray, norm, 1.5, 1);
			} else {
				h = vec_refract(ray, norm, 1, 1.5);
			}
			ract = recursive_ray_trace(end.pos, h, num + 1, !inside);
		}
		float reflectWeight = s->reflectance;
		float refractWeight = 0;
		if (refract_on && s->transparency > 0) {
			refractWeight = s->transparency;
			reflectWeight = (1-refractWeight)*s->reflectance;
		}
		color += (ref * reflectWeight + ract * refractWeight);
	}
	return color;
}
Exemplo n.º 6
0
void Image::generateSamplesFromPhongBRDF(float exponent, uint sampleNum) {
    float n = exponent;
    vecpairuu samples;
    for (uint i = 0; i < sampleNum; ++i) {
        float a = (float) rand()/(float) RAND_MAX;
        float b = (float) rand()/(float) RAND_MAX;
        latlong ll(phong(n, a, b));
        int x = (int) ((ll.phi/(2.0*PI)) * width);
        int y = (int) ((ll.theta/PI) * height);
        samples.push_back(std::pair<uint,uint>(x,y));
    }

    this->highlightSamples(samples);

}
Exemplo n.º 7
0
/************************************************************************
 * This is the recursive ray tracer
 ************************************************************************/
vec3 recursive_ray_trace(vec3 eye, vec3 ray,int ignore, int step) {
    Object* S = NULL;
    vec3 hit;

    S = intersectScene(eye, ray, &hit, ignore);
    if(S == NULL)
        return background_clr;

    vec3 color(0,0,0);
    vec3 viewDir = glm::normalize(eye - hit);
    vec3 surf_norm = S->GetNormal(hit);

    return phong(hit,viewDir, surf_norm, ray, S , step);

}
/**
 * Simple program that starts our raytracer
 */
int main(int argc, char *argv[]) {
	try {
		RayTracer* rt;
		Timer t;
		rt = new RayTracer(800, 600);
		
		std::shared_ptr<SceneObjectEffect> color(new ColorEffect(glm::vec3(0.0, 1.0, 0.0)));
		std::shared_ptr<SceneObjectEffect> phong(new PhongEffect(glm::vec3(0.0, 0.0, 10.0)));
		std::shared_ptr<SceneObjectEffect> steel(new SteelEffect());
		std::shared_ptr<SceneObjectEffect> fresnel(new FresnelEffect());
		
		std::shared_ptr<SceneObject> s1(new Sphere(glm::vec3(-3.0f, 0.0f, 6.0f), 2.0f, steel));
		rt->addSceneObject(s1);
		std::shared_ptr<SceneObject> s2(new Sphere(glm::vec3(3.0f, 0.0f, 3.0f), 2.0f, fresnel));
		rt->addSceneObject(s2);
		std::shared_ptr<SceneObject> s3(new Sphere(glm::vec3(0.0f, 3.0f, 9.0f), 2.0f, steel));
		rt->addSceneObject(s3);


		std::string path = "cubemaps/SaintLazarusChurch3/";
		std::shared_ptr<SceneObject> cubeMap(new CubeMap(path + "posx.jpg", path + "negx.jpg",
			path + "posy.jpg", path + "negy.jpg",
			path + "posz.jpg", path + "negz.jpg"));
		rt->addSceneObject(cubeMap);


		std::shared_ptr<SceneObject> triangle(new Triangle(glm::vec3(0.0f, 2.0f, -1.0f), 
			glm::vec3(-2.0f, -2.0f, -1.0f), glm::vec3(2.0f, -2.0f, 0.0f),  steel));
		rt->addSceneObject(triangle);
				
		t.restart();
		rt->render();
		double elapsed = t.elapsed();
		std::cout << "Computed in " << elapsed << " seconds" <<  std::endl;
		rt->save("test", "bmp"); //We want to write out bmp's to get proper bit-maps (jpeg encoding is lossy)

		delete rt;
	} catch (std::exception &e) {
		std::string err = e.what();
		std::cout << err.c_str() << std::endl;
		return -1;
	}
	return 0;
}
Exemplo n.º 9
0
RGB_float recursive_ray_trace(Vector ray, Point p, int step) {

	RGB_float color = background_clr;
	RGB_float reflected_color = {0,0,0};
	RGB_float refracted_color = {0,0,0};
	
	Spheres *closest_sph;
	Point *hit = new Point;
	closest_sph = intersect_scene(p, ray, scene, hit);
	

	//get the point color here
	//intersects a sphere
	Point *plane_hit = new Point;
	color = background_clr;
	if(chessboard_on && intersect_plane(p, ray, N_plane, p0, plane_hit))
	{
		Vector eye_vec = get_vec(*plane_hit, eye_pos);
		Vector light_vec = get_vec(*plane_hit, p);
		normalize(&light_vec);
		normalize(&eye_vec);
		color = colorPlane(*plane_hit);
		Vector shadow_vec = get_vec(*plane_hit, light1);
		Spheres *sph = NULL;
		if(inShadow(*plane_hit, shadow_vec, scene, sph) && shadow_on)
		{
			color = clr_scale(color, .5);
		}

	}

	if(closest_sph != NULL)
	{
		Vector eye_vec = get_vec(*hit, eye_pos);
		Vector surf_norm = sphere_normal(*hit, closest_sph);
		Vector light_vec = get_vec(*hit, p);
		normalize(&light_vec);
		normalize(&surf_norm);
		normalize(&eye_vec);

		color = phong(*hit, eye_vec, surf_norm, closest_sph);

		if(step < step_max && reflection_on)
		{
			Vector reflect_vec = vec_minus(vec_scale(surf_norm, vec_dot(surf_norm, light_vec)*2), light_vec);
			step += 1;
			normalize(&reflect_vec);

			reflected_color = recursive_ray_trace(reflect_vec, *hit, step);
			reflected_color = clr_scale(reflected_color, closest_sph->reflectance);
			color = clr_add(color, reflected_color);

		}

		if(step < step_max && refraction_on)
		{
			Vector refracted_ray = getRefractedRay(1.51, closest_sph, surf_norm, light_vec);
			step += 1;
			normalize(&refracted_ray);
			
			refracted_ray.x = hit->x + refracted_ray.x;
			refracted_ray.y = hit->x + refracted_ray.y;
			refracted_ray.z = hit->x + refracted_ray.z;
			refracted_color = recursive_ray_trace(refracted_ray, *hit, step);
			color = clr_add(color, reflected_color);

		}
		return color;
	}
	else
	{

		return color;
	}


}
avtImage_p
avtVolumeFilter::RenderImage(avtImage_p opaque_image,
                             const WindowAttributes &window)
{
  if (atts.GetRendererType() == VolumeAttributes::RayCastingSLIVR){
        return RenderImageRaycastingSLIVR(opaque_image,window);
    }
    

    //
    // We need to create a dummy pipeline with the volume renderer that we
    // can force to execute within our "Execute".  Start with the source.
    //
    avtSourceFromAVTDataset termsrc(GetTypedInput());


    //
    // Set up the volume renderer.
    //
    avtRayTracer *software = new avtRayTracer;
    software->SetInput(termsrc.GetOutput());
    software->InsertOpaqueImage(opaque_image);
    software->SetRayCastingSLIVR(false);

    unsigned char vtf[4*256];
    atts.GetTransferFunction(vtf);
    avtOpacityMap om(256);
    if ((atts.GetRendererType() == VolumeAttributes::RayCasting) && (atts.GetSampling() == VolumeAttributes::Trilinear))
        om.SetTable(vtf, 256, atts.GetOpacityAttenuation()*2.0 - 1.0, atts.GetRendererSamples());
    else
        om.SetTable(vtf, 256, atts.GetOpacityAttenuation());
    double actualRange[2];
    bool artificialMin = atts.GetUseColorVarMin();
    bool artificialMax = atts.GetUseColorVarMax();
    if (!artificialMin || !artificialMax)
    {
        GetDataExtents(actualRange, primaryVariable);
        UnifyMinMax(actualRange, 2);
    }
    double range[2];
    range[0] = (artificialMin ? atts.GetColorVarMin() : actualRange[0]);
    range[1] = (artificialMax ? atts.GetColorVarMax() : actualRange[1]);
    if (atts.GetScaling() == VolumeAttributes::Log)
    {
        if (artificialMin)
            if (range[0] > 0)
                range[0] = log10(range[0]);
        if (artificialMax)
            if (range[1] > 0)
                range[1] = log10(range[1]);
    }
    else if (atts.GetScaling() == VolumeAttributes::Skew)
    {
        if (artificialMin)
        {
            double newMin = vtkSkewValue(range[0], range[0], range[1],
                                         atts.GetSkewFactor());
            range[0] = newMin;
        }
        if (artificialMax)
        {
            double newMax = vtkSkewValue(range[1], range[0], range[1],
                                         atts.GetSkewFactor());
            range[1] = newMax;
        }
    }
    om.SetMin(range[0]);
    om.SetMax(range[1]);

    if (atts.GetRendererType() == VolumeAttributes::RayCastingIntegration)
    {
        if (!artificialMin)
            range[0] = 0.;
        if (!artificialMax)
        {
/* Don't need this code, because the rays will be in depth ... 0->1.
            double bounds[6];
            GetSpatialExtents(bounds);
            UnifyMinMax(bounds, 6);
            double diag = sqrt((bounds[1]-bounds[0])*(bounds[1]-bounds[0]) +
                               (bounds[3]-bounds[2])*(bounds[3]-bounds[2]) +
                               (bounds[5]-bounds[4])*(bounds[5]-bounds[4]));
            range[1] = (actualRange[1]*diag) / 2.;
 */
            range[1] = (actualRange[1]) / 4.;
        }
    }

    //
    // Determine which variables to use and tell the ray function.
    //
    VarList vl;
    avtDataset_p input = GetTypedInput();
    avtDatasetExaminer::GetVariableList(input, vl);
    int primIndex = -1;
    int opacIndex = -1;
    int gradIndex = -1;
    int count = 0;
    char gradName[128];
    const char *gradvar = atts.GetOpacityVariable().c_str();
    if (strcmp(gradvar, "default") == 0)
        gradvar = primaryVariable;
    // This name is explicitly sent to the avtGradientExpression in
    // the avtVolumePlot.
    SNPRINTF(gradName, 128, "_%s_gradient", gradvar);

    for (int i = 0 ; i < vl.nvars ; i++)
    {
        if ((strstr(vl.varnames[i].c_str(), "vtk") != NULL) &&
            (strstr(vl.varnames[i].c_str(), "avt") != NULL))
            continue;

        if (vl.varnames[i] == primaryVariable)
        {
            primIndex = count;
        }
        if (vl.varnames[i] == atts.GetOpacityVariable())
        {
            opacIndex = count;
        }
        if (vl.varnames[i] == gradName)
        {
            gradIndex = count;
        }
        count += vl.varsizes[i];
    }

    if (primIndex == -1)
    {
        if (vl.nvars <= 0)
        {
            debug1 << "Could not locate primary variable "
                   << primaryVariable << ", assuming that we are running "
                   << "in parallel and have more processors than domains."
                   << endl;
        }
        else
        {
            EXCEPTION1(InvalidVariableException, primaryVariable);
        }
    }
    if (opacIndex == -1)
    {
        if (atts.GetOpacityVariable() == "default")
        {
            opacIndex = primIndex;
        }
        else if (vl.nvars <= 0)
        {
            debug1 << "Could not locate opacity variable "
                   << atts.GetOpacityVariable().c_str() << ", assuming that we "
                   << "are running in parallel and have more processors "
                   << "than domains." << endl;
        }
        else
        {
            EXCEPTION1(InvalidVariableException,atts.GetOpacityVariable());
        }
    }
    if (  atts.GetRendererType() != VolumeAttributes::RayCastingIntegration &&
          atts.GetLightingFlag() &&
          gradIndex == -1)
    {
        if (vl.nvars <= 0)
        {
            debug1 << "Could not locate gradient variable, assuming that we "
                   << "are running in parallel and have more processors "
                   << "than domains." << endl;
        }
        else
        {
            EXCEPTION1(InvalidVariableException,gradName);
        }
    }

    int newPrimIndex = UnifyMaximumValue(primIndex);
    if (primIndex >= 0 && newPrimIndex != primIndex)
    {
        //
        // We shouldn't ever have different orderings for our variables.
        //
        EXCEPTION1(InvalidVariableException, primaryVariable);
    }
    primIndex = newPrimIndex;

    int newOpacIndex = UnifyMaximumValue(opacIndex);
    if (opacIndex >= 0 && newOpacIndex != opacIndex)
    {
        //
        // We shouldn't ever have different orderings for our variables.
        //
        EXCEPTION1(InvalidVariableException, atts.GetOpacityVariable());
    }
    opacIndex = newOpacIndex;

    int newGradIndex = UnifyMaximumValue(gradIndex);
    if (gradIndex >= 0 && newGradIndex != gradIndex)
    {
        //
        // We shouldn't ever have different orderings for our variables.
        //
        EXCEPTION1(InvalidVariableException, gradName);
    }
    gradIndex = newGradIndex;

    //
    // Set up lighting
    //
    avtFlatLighting fl;
    avtLightingModel *lm = &fl;
    double gradMax = 0.0, lightingPower = 1.0;
    if (atts.GetLowGradientLightingReduction() != VolumeAttributes::Off)
    {
        gradMax = atts.GetLowGradientLightingClampValue();
        if (atts.GetLowGradientLightingClampFlag() == false)
        {
            double gradRange[2] = {0,0};
            GetDataExtents(gradRange, gradName);
            gradMax = gradRange[1];
        }
        switch (atts.GetLowGradientLightingReduction())
        {
          case VolumeAttributes::Lowest:   lightingPower = 1./16.; break;
          case VolumeAttributes::Lower:    lightingPower = 1./8.;  break;
          case VolumeAttributes::Low:      lightingPower = 1./4.;  break;
          case VolumeAttributes::Medium:   lightingPower = 1./2.;  break;
          case VolumeAttributes::High:     lightingPower = 1.;     break;
          case VolumeAttributes::Higher:   lightingPower = 2.;     break;
          case VolumeAttributes::Highest:  lightingPower = 4.;     break;
          default: break;
        }
    }
    avtPhong phong(gradMax, lightingPower);
    if (atts.GetLightingFlag())
    {
        lm = &phong;
    }
    else
    {
        lm = &fl;
    }

    avtOpacityMap *om2 = NULL;
    if (primIndex == opacIndex)
    {
        // Note that we are forcing the color variables range onto the
        // opacity variable.
        om2 = &om;
    }
    else
    {
        om2 = new avtOpacityMap(256);
        om2->SetTable(vtf, 256, atts.GetOpacityAttenuation());
        double range[2];

        bool artificialMin = atts.GetUseOpacityVarMin();
        bool artificialMax = atts.GetUseOpacityVarMax();
        if (!artificialMin || !artificialMax)
        {
            InputSetActiveVariable(atts.GetOpacityVariable().c_str());
            avtDatasetExaminer::GetDataExtents(input, range);
            UnifyMinMax(range, 2);
            InputSetActiveVariable(primaryVariable);
        }
        range[0] = (artificialMin ? atts.GetOpacityVarMin() : range[0]);
        range[1] = (artificialMax ? atts.GetOpacityVarMax() : range[1]);
        om2->SetMin(range[0]);
        om2->SetMax(range[1]);
        // LEAK!!
    }
    avtCompositeRF *compositeRF = new avtCompositeRF(lm, &om, om2);
    if (atts.GetRendererType() == VolumeAttributes::RayCasting && atts.GetSampling() == VolumeAttributes::Trilinear){
        compositeRF->SetTrilinearSampling(true);
        double *matProp = atts.GetMaterialProperties();
        double materialPropArray[4];
        materialPropArray[0] = matProp[0];
        materialPropArray[1] = matProp[1];
        materialPropArray[2] = matProp[2];
        materialPropArray[3] = matProp[3];
        compositeRF->SetMaterial(materialPropArray);
    }
    else
        compositeRF->SetTrilinearSampling(false);
    avtIntegrationRF *integrateRF = new avtIntegrationRF(lm);

    compositeRF->SetColorVariableIndex(primIndex);
    compositeRF->SetOpacityVariableIndex(opacIndex);
    if (atts.GetLightingFlag())
        compositeRF->SetGradientVariableIndex(gradIndex);
    integrateRF->SetPrimaryVariableIndex(primIndex);
    integrateRF->SetRange(range[0], range[1]);
    if (atts.GetSampling() == VolumeAttributes::KernelBased)
    {
        software->SetKernelBasedSampling(true);
        compositeRF->SetWeightVariableIndex(count);
    }

    if (atts.GetRendererType() == VolumeAttributes::RayCasting && atts.GetSampling() == VolumeAttributes::Trilinear)
        software->SetTrilinear(true);
    else
        software->SetTrilinear(false);
    
    if (atts.GetRendererType() == VolumeAttributes::RayCastingIntegration)
        software->SetRayFunction(integrateRF);
    else
        software->SetRayFunction(compositeRF);

    software->SetSamplesPerRay(atts.GetSamplesPerRay());

    const int *size = window.GetSize();
    software->SetScreen(size[0], size[1]);

    const View3DAttributes &view = window.GetView3D();
    avtViewInfo vi;
    CreateViewInfoFromViewAttributes(vi, view);

    avtDataObject_p inputData = GetInput();
    int width_,height_,depth_;
    if (GetLogicalBounds(inputData, width_,height_,depth_))      
    {
        // if we have logical bounds, compute the slices automatically
        double viewDirection[3];
        int numSlices;
        
        viewDirection[0] = (view.GetViewNormal()[0] > 0)? view.GetViewNormal()[0]: -view.GetViewNormal()[0];
        viewDirection[1] = (view.GetViewNormal()[1] > 0)? view.GetViewNormal()[1]: -view.GetViewNormal()[1];
        viewDirection[2] = (view.GetViewNormal()[2] > 0)? view.GetViewNormal()[2]: -view.GetViewNormal()[2];

        numSlices = (width_*viewDirection[0] + height_*viewDirection[1] + depth_*viewDirection[2]) * atts.GetRendererSamples();

        if (atts.GetRendererType() == VolumeAttributes::RayCasting && atts.GetSampling() == VolumeAttributes::Trilinear)
            software->SetSamplesPerRay(numSlices);
    }
    software->SetView(vi);
    if (atts.GetRendererType() == VolumeAttributes::RayCastingIntegration)
    {
        integrateRF->SetDistance(view.GetFarPlane()-view.GetNearPlane());
        integrateRF->SetWindowSize(size[0], size[1]);
    }

    double view_dir[3];
    view_dir[0] = vi.focus[0] - vi.camera[0];
    view_dir[1] = vi.focus[1] - vi.camera[1];
    view_dir[2] = vi.focus[2] - vi.camera[2];
    double mag = sqrt(view_dir[0]*view_dir[0] + view_dir[1]*view_dir[1]
                      + view_dir[2]*view_dir[2]);
    if (mag != 0.) // only 0 if focus and camera are the same
    {
        view_dir[0] /= mag;
        view_dir[1] /= mag;
        view_dir[2] /= mag;
    }
    lm->SetViewDirection(view_dir);
    lm->SetViewUp(vi.viewUp);
    lm->SetLightInfo(window.GetLights());
    const RenderingAttributes &render_atts = window.GetRenderAtts();
    if (render_atts.GetSpecularFlag())
    {
        lm->SetSpecularInfo(render_atts.GetSpecularFlag(),
                            render_atts.GetSpecularCoeff(),
                            render_atts.GetSpecularPower());
    }

    //
    // Set the volume renderer's background color and mode from the
    // window attributes.
    //
    software->SetBackgroundMode(window.GetBackgroundMode());
    software->SetBackgroundColor(window.GetBackground());
    software->SetGradientBackgroundColors(window.GetGradBG1(),
                                          window.GetGradBG2());

    //
    // We have to set up a sample point "arbitrator" to allow small cells
    // to be included in the final picture.
    //
    avtOpacityMapSamplePointArbitrator arb(om2, opacIndex);
    avtRay::SetArbitrator(&arb);

    //
    // Do the funny business to force an update.
    //
    avtDataObject_p dob = software->GetOutput();
    dob->Update(GetGeneralContract());

    if (atts.GetRendererType() == VolumeAttributes::RayCastingIntegration)
        integrateRF->OutputRawValues("integration.data");

    //
    // Free up some memory and clean up.
    //
    delete software;
    avtRay::SetArbitrator(NULL);
    delete compositeRF;
    delete integrateRF;

    //
    // Copy the output of the volume renderer to our output.
    //
    avtImage_p output;
    CopyTo(output, dob);
    return  output;
}
Exemplo n.º 11
0
/************************************************************************
 * This is the recursive ray tracer 
 ************************************************************************/
glm::vec3 recursive_ray_trace(glm::vec3 eye, glm::vec3 ray,int ignore, int step) {
    Object* S = NULL;
    glm::vec3 hit;

    S = intersectScene(eye, ray, &hit, ignore);
    //printf("%d : after intersect scene (type: '%c')\n", step, S==NULL?'N':S->type);

    glm::vec3 color;

    if(S == NULL) 
        color = background_clr; 
    else {
        //color = glm::vec3(1.0,1.0,1.0);
        glm::vec3 viewDir = glm::normalize(eye - hit);
        glm::vec3 surf_norm = S->GetNormal(hit);

        //printf("  after get normal\n");
        color = phong(hit,viewDir, surf_norm, S );

        //printf("  after phong\n");

        if(reflect_on && step < step_max){
            //printf("  enter reflect\n");
            glm::vec3 reflectDir = glm::normalize(glm::rotate(viewDir, glm::radians(180.0f), surf_norm));
            glm::vec3 color_rf = recursive_ray_trace(hit, reflectDir, S->index, step+1);

            color += color_rf * S->reflectance ;
            //printf("  exit reflect\n");
        }

        if(refract_on && step < step_max && S->refract ){
            if(S->type == 'S'){
                glm::vec3 outRay, outPoint;
                if(S->Refract(ray, hit, &outRay, &outPoint)){
                    glm::vec3 color_rfr = recursive_ray_trace(outPoint, outRay, S->index, step+2);
                    color += color_rfr * S->refractance;
                }  
            }
            else{
                glm::vec3 outRay;
                if(S->GetRefractRay(ray, hit, &outRay)){
                    glm::vec3 color_rfr = recursive_ray_trace(hit,outRay, S->index, step+1);
                    color += color_rfr * S->refractance;
                }
            }
        }

        
        if(difref_on && step < 2){
            for(int i=0;i<DIFFUSE_RAYS;i++){
                glm::vec3 difrefDir = glm::normalize(glm::rotate(viewDir, glm::radians(180.0f), surf_norm));
                glm::vec3 axis = glm::cross(viewDir, surf_norm);
                float angle1 = random(-5.0f,0.0f);
                difrefDir = glm::rotate(difrefDir, glm::radians(angle1), axis);
                float angle2 = random(-5.0f,5.0f);
                difrefDir = glm::rotate(difrefDir, glm::radians(angle2), surf_norm);
                difrefDir = glm::normalize(difrefDir);

                glm::vec3 color_difref = recursive_ray_trace(hit, difrefDir, S->index, step+1);

                color += color_difref * float(0.1);
            }
        }


    }

    return color;
}
Exemplo n.º 12
0
/************************************************************************
 * This is the recursive ray tracer - you need to implement this!
 * You should decide what arguments to use.
 ************************************************************************/
vec3 recursive_ray_trace(vec3 eye, vec3 ray, int num, bool inobj) {
//
// do your thing here
//
	if(num>step_max) return null_clr;
	vec3 hit;
	int isplane;
	void *sph = intersect_scene(eye, ray, scene, &hit, &isplane);

	vec3 color = null_clr;
	if(sph==NULL) { 
		return background_clr;
	}

	vec3 lightvec = light1 - hit;
	vec3 lightvec_normal = normalize(lightvec);
	vec3 lighthit;
	int lightisplane;
	void * light_sph = intersect_scene(hit, lightvec_normal, scene, &lighthit, &lightisplane);
	
	vec3 surf_normal = isplane?vec3(0,1,0):sphere_normal(hit, (Spheres*)sph);

	if(light_sph==NULL) {
		color += phong(-1*ray, lightvec, surf_normal, sph, hit, isplane);
	}
	else {
		if(!shadow_on) {
			color += phong(-1*ray, lightvec, surf_normal, sph, hit, isplane);
		}
		else {
			color += get_shadow(-1*ray, lightvec, surf_normal, sph, hit, isplane);
		}
	}

	if(reflect_on) {
		vec3 reflect_vector = 2*dot(-1*ray, surf_normal)*surf_normal + ray;
		reflect_vector = normalize(reflect_vector);
		if(isplane) {
			color += ((struct plane*)sph)->reflectance * recursive_ray_trace(hit, reflect_vector,num+1, inobj);
		}
		else if(!isplane)
			color += ((Spheres *)sph)->reflectance * recursive_ray_trace(hit, reflect_vector, num+1, inobj);
	}
	
	if(refract_on) {
		vec3 outlightvector; 
		if(refraction(hit, -1*ray, sph, isplane, inobj, &outlightvector)) {
			if(!isplane) {
//				printf("refraction point\n");
				Spheres * refractsph = (Spheres *)sph;
				color += refractsph->refr*recursive_ray_trace(hit, outlightvector, num+1, !inobj);
			}
		}
	}
	
	if(diffuse_reflection_on && num<2) {
		int i;
		for (i=0;i<DIFFUSE_REFLECTION;i++) {
			float xtheta = 2.0 * static_cast <float> (rand()) / static_cast <float> (RAND_MAX) - 1.0;
			float ytheta = 2.0 * static_cast <float> (rand()) / static_cast <float> (RAND_MAX) - 1.0;
			float ztheta = 2.0 * static_cast <float> (rand()) / static_cast <float> (RAND_MAX) - 1.0;
			vec3 dfray;
			dfray = rotateX(xtheta*M_PI,surf_normal);
			dfray = rotateY(ytheta*M_PI,dfray);
			dfray = rotateZ(ztheta*M_PI,dfray);
			color += (0.1/DIFFUSE_REFLECTION)*recursive_ray_trace(hit, dfray, num+1, inobj);
		}
	}

	return color;

}
Exemplo n.º 13
0
int main(int argc, char ** argv[])
{

	Display display(800, 600, "TSBK07 Level of Detail on Terrain");
	Basic_Shader base_shader("./shaders/space");
	Phong_Shader phong("./shaders/phong");
	Texture texture("./textures/dirt.tga");
	Camera camera(glm::vec3(0, 1, 0), 70.0f, display.GetAspectRation(), 0.01f, 1000.0f);

	Terrain terr("./textures/terrain2.jpg", "./textures/terrain2.jpg");
	
	Skybox sky;
	sky.SkyboxInit("./textures/skybox/", "back.jpg", "front.jpg", "left.jpg", "right.jpg", "top.jpg", "bottom.jpg");
	Transform transform;
	Keyboard keyboard;
	Mouse mouse;

	float counter = 0.0f;
	Mesh monkey("./models/monkey3.obj");
	Mesh box("./models/box.obj");

	std::cout << "init complete" << std::endl;

	bool wireframe = true;
	bool lock = false;

	while (!display.IsClosed())
	{
		display.Clear(1, 0, 1, 1);

		SDL_Event e;

		while (SDL_PollEvent(&e))
		{
			if (e.type == SDL_QUIT)
			{
				display.HandleEvent(e);
			}
			if (e.type == SDL_MOUSEMOTION || e.type == SDL_MOUSEBUTTONDOWN || e.type == SDL_MOUSEBUTTONUP)
			{
				mouse.HandleEvent(e, camera);
			}
		}

		const Uint8* currentKeyStates = SDL_GetKeyboardState(NULL);

		keyboard.HandleEvent(currentKeyStates, camera);
		
		sky.Draw(transform, camera);

		if (currentKeyStates[SDL_SCANCODE_B])
		{
			lock = !lock;
		}
		if (currentKeyStates[SDL_SCANCODE_F])
		{
			wireframe = !wireframe;
		}
	
		terr.Draw(transform, camera, lock, wireframe);

		display.Update();

		counter += 0.001f;
	}
	return 0;
}
Exemplo n.º 14
0
/***************************
 * void drawPhongSpan()    *
 *                         *
 * This routine sets the   *
 * buffer values for each  *
 * span of pixels which    *
 * intersect the current   *
 * scanline.               *
 ***************************/
void
drawPhongSpan(triple pt,float N[3],int dFlag)
{
  int                xpixel,hue,shade;
  float              colorindx, col;
  triple             hs;


  /* negative values of xleft and xright have been pushed to machine0 */

  xpixel = (int)xleft;


  while (xpixel <= (int)xright) {
    /* if z is closer to viewer than value in zBuffer continue */
    if ( (zC < get_zBuffer(xpixel)) ) {
      /* get the intensity for current point */
      col = phong(pt,N);
      put_cBuffer_axes(xpixel,'0');
      put_zBuffer(xpixel,zC);
      /* if mono (bw dsply) do black and white semi-random dithering */
      if (mono || (dFlag == PSoption) || viewport->monoOn) {
        if (get_random() < 100.0*exp((double)-1.3*(pi_sq*(col-.2)*(col-.2))))  {
          put_cBuffer_indx(xpixel,black);
        } else {
          put_cBuffer_indx(xpixel,white);
        }
      } else {
        /* glossy shading for one hue else dithered for many hues */
        if (viewport->hueOffset == viewport->hueTop && !smoothError) {
          colorindx = (float)(smoothConst+1) * col;
          if (colorindx > (smoothConst+1)) colorindx = smoothConst+1;
          put_cBuffer_indx(xpixel,XPixelColor((int)colorindx-1));
        } else { /* probabalistic multi-hued dithering */
          hs = norm_dist();
          hue = (int)(intersectColor[0]+hs.x/20.0);
          /* cannot dither out of color map range */
          if (viewport->hueOffset < viewport->hueTop) {
            if (hue < viewport->hueOffset)
              hue = viewport->hueOffset;
            else {
              if (hue > viewport->hueTop)
                hue = viewport->hueTop;
            }
          } else {
            if (hue < viewport->hueTop)
              hue = viewport->hueTop;
            else {
              if (hue > viewport->hueOffset)
                hue = viewport->hueOffset;
            }
          }
          col += hs.y/6.0;  /* perturb intensity */
          if (col > 1.0) put_cBuffer_indx(xpixel,white);
          else {
            if (col < 0.0) put_cBuffer_indx(xpixel,black);
            else {
              shade = (int)(col * 4.0);
              put_cBuffer_indx(xpixel,XSolidColor(hue,shade));
            }
          }
        }
      }
    } /* zC < zBuffer */
    zC += dzdx;
    if (viewport->hueOffset != viewport->hueTop || smoothError ||
        viewport->monoOn)
      intersectColor[0] += dcolor;
    N[0] += dnorm.x;  N[1] += dnorm.y;  N[2] += dnorm.z;
    pt.x += dpt.x;  pt.y += dpt.y;  pt.z += dpt.z;
    xpixel++;
  } /* while each pixel */

}
Exemplo n.º 15
0
void exportTerrain::printShadingData(const MFnMesh& theMesh, MString texture)
{
    MObjectArray    shaders;
    MIntArray	    indices;
    MPlug	    tPlug;
    MPlugArray      connections,inConnections;
    MObject	    node,shaderObject;
    MFnDependencyNode dpNode;
    MStatus	    status;
    
    int i,j;

    theMesh.getConnectedShaders(0 , shaders, indices);

    fout << "Shading Data:" << endl;

    //Will assume that only one shader is used, and therefore only prints
    //data for the first index;
    
    //Assuming only one shader
    dpNode.setObject( shaders[0] );
    dpNode.getConnections(connections);
    for(i=0;i < connections.length();++i){
	connections[i].connectedTo(inConnections,true,true);
	for(j=0;j<inConnections.length();++j){
	    node = inConnections[j].node();	
	    dpNode.setObject(node);
	    if(node.hasFn(MFn::kLambert) ){
		shaderObject = node;
	    }
	}
    }
    

    MFnLambertShader shader(shaderObject, &status);
    if(!status){
	status.perror("Unable to create MFnLambertShader!");
	return;
    }
    //Collect all the data
    fout << "Diffuse_Color: " << (shader.diffuseCoeff(&status)*(MColor(1.0,1.0,1.0) )*shader.color(&status)) * 
						(MColor(1.0,1.0,1.0) - shader.transparency(&status) )<< endl;
    fout << "Ambient: " << shader.ambientColor(&status) << endl;
    fout << "Emmision_Color: " << shader.incandescence(&status) << endl;	

	
    if(shaderObject.hasFn(MFn::kBlinn) ){
	MFnBlinnShader blinn(shaderObject);
	fout << "Specular_Color: " << blinn.specularColor() << endl;
	fout << "Shininess: " << blinn.eccentricity() << endl;
    }
    else if(shaderObject.hasFn(MFn::kPhong) ){
	MFnPhongShader phong(shaderObject);
	fout << "Specular_Color: " << phong.specularColor() << endl;
	fout << "Shininess: " << phong.cosPower() << endl;
    }
    else{
	fout << "Specular_Color: " << MColor() << endl;
	fout << "Shininess: " << double(0) << endl;
    }
    	
    fout << "Texture: " << texture << endl;

}