Beispiel #1
0
int main(void)
{

	ConfigVFR(frame[0].vfr_register,
			 frame[0].width, frame[0].height,
			 frame[0].base0,
			  frame[0].base1,
			  frame[0].words_divider,
			  frame[0].cpr_halve
			  );


	cleanscreen((unsigned int*)frame[0].base0);

    Raytracer_int raytracer(640, 480);
    raytracer.setreal_frame_width(SCREEN_WIDTH);

    raytracer.generateSimpleScene();
    unsigned char count=0;
    while (1)
   {
    	printf("%d\n",count);
    	raytracer.render((unsigned int*)frame[0].base0);
    	// se activa el frame de video
		//VIPFR_ActiveDrawFrame(pReader);
    	count++;
  }


	return 0;
}
Beispiel #2
0
int main(int argc, char *argv[])
{
	const int width = 300;
	const int height = 200;

	const int numSamples = 9;
	int sqSamples = sqrt(numSamples);

	Vector3 origin(0, 0, 0);
	Vector3 target(0, 0, -1);
	Vector3 up(0, 1, 0);

	Camera camera(width, height, origin, target, up, 90.0);

	RayTracer raytracer(width, height, "WriteImage3.ppm");


//	raytracer.addShape(new Sphere(Vector3(0.0, -2.0, -10.0), 3.0,  new Diffusive(Color(1.0, 0.0, 0.0), 0.4)));
	raytracer.addShape(new Sphere(Vector3(0.0, -2.0, -10.0), 3.0,  new Reflective(new ConstantTexture(Color(0.1, 0.1, 0.1)), 0.1, 0.6, 0.9, 20)));
	raytracer.addShape(new Sphere(Vector3(7.0, -2.0, -13.0), 3.0,  new Diffusive(new ConstantTexture(Color(0.3, 0.4, 0.1)), 0.4)));
	raytracer.addShape(new Sphere(Vector3(-7.0, -2.0, -17.0), 3.0,  new Specular(new ConstantTexture(Color(0.1, 0.3, 0.3)), 0.33,  0.6, 20)));

	raytracer.addShape(new Sphere(Vector3(-7.0, -2.0, -13.0), 3.0,  new Refractive(new ConstantTexture(Color(0.1, 0.1, 0.1)), 0.1,  0.99, 0.4, 20, 1.33)));

	raytracer.addShape(new Sphere(Vector3(0.0, 4.0, -13.0), 3.0,  new Reflective(new ConstantTexture(Color(0.1, 0.1, 0.1)), 0.1, 0.6, 0.6, 20)));
	raytracer.addShape(new Plane(Vector3(0.0, -5.0, 0.0), Vector3(0.0, 1.0, 0.0), new Diffusive(new CheckerTexture(Color(0.1, 0.1, 0.1), Color(0.8, 0.8, 0.1), 2), 0.6)));
	raytracer.addLight(new Light(Vector3(0.0, 10, 0.0), Color(1.3, 1.3, 1.3)));
	raytracer.addLight(new Light(Vector3(10.0, 10, -5.0), Color(1.3, 1.3, 1.3)));

	Color pixcolor(0);
	for (int Y = 0; Y < height; ++Y)
	{
		for (int X = 0; X < width; ++X)
		{
			pixcolor = Color(0);

			for (int i = 0; i < sqSamples; ++i)
			{
				for (int j = 0; j < sqSamples; ++j)
				{
					ShadeRec rec(false);
//					Ray camRay = raytracer.castRay(double(X) + (double(i) / double(sqrt(numSamples))), double(Y) + (double(j) / double(sqrt(numSamples)) ));
//					Ray camRay = camera.getRay((double(X) + (double(i) / double(sqrt(numSamples))) / double(width)), (double(Y) + (double(j) / double(sqrt(numSamples))) / double(height)));
					Ray camRay = camera.getRay(double(X) + (double(i) / double(sqSamples)) , double(Y) + (double(j) / double(sqSamples)));

					pixcolor += raytracer.calculate_pixel_color(camRay, 0);
				}
			}
			pixcolor /= numSamples;

			raytracer.image->pixel(X, Y, pixcolor);
		}
	}

	//raytracer.image->clamp(1.0);

	raytracer.write();

	return 0;
}
Beispiel #3
0
void	antialiasing(t_thread_data* my_data, double real_x,
		     double real_y, double offset)
{
	t_thread_pixel	pixel[ANTIALIASING * ANTIALIASING];
	int		i;
	int		j;
	double		pixel_x;
	double		pixel_y;

	i = 0;
	pixel_x = real_x;
	pixel_y = real_y;
	while (i < ANTIALIASING * ANTIALIASING)
	{
		raytracer(my_data, pixel_x, pixel_y, &pixel[i]);
		pixel_y = pixel_y + offset;
		i = i + 1;
		if (i % (ANTIALIASING) == 0)
		{
			pixel_y = real_y;
			pixel_x = pixel_x + offset;
		}
	}
	j = (int)(real_x * my_data->env->height + real_y);
	make_avg(pixel, &my_data->pixels[j]);
	my_data->pixels[j].pixel_x = real_x;
	my_data->pixels[j].pixel_y = real_y;
}
Beispiel #4
0
void	init_env(t_env *e, int ac, char **av)
{
	if (ac < 2)
		exit(ft_dprintf(2, "usage : %s <map>\n", av[0]));
	e->ac = ac;
	e->av = av;
	e->img = ft_new_img(e->mlx, WIN_WIDTH, WIN_HEIGHT);
	e->dir.x = 0;
	e->dir.y = 0;
	e->dir.z = 1;
	e->pos.x = 0;
	e->pos.y = 0;
	e->pos.z = -5;
	e->screen = ft_memalloc(sizeof(t_obj));
	init_tab_obj(e, av[1]);
	raytracer(e);
}
Beispiel #5
0
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);

    QMainWindow* window = new QMainWindow();
    RenderGui::Viewer* viewer = new RenderGui::Viewer();
    window->setCentralWidget(viewer);
    window->show();

    int width = 640;
    int height = 480;

    SimpleScene scene;
    PhongMaterial metal(scene,
            RGBColor(0.1,0.1,0.1),
            RGBColor(0.8,0.8,0.8),
            RGBColor(0.8,0.8,0.8),
            2.0);

//    Sphere sphere(Coord(0,0,4), metal);
//    scene.push_obj(&sphere);

    Plane triangle(Vector3d(1.0,0.0,1.0),
            Vector3d(0.0,1.0,1.0),
            Vector3d(0.0,0.0,1.0),
            metal);
    scene.push_obj(&triangle);

    PhongPointLight light(Coord(-5, -0.2, -0.2),
            RGBColor(0.2,0.2,0.2),
            RGBColor(0.2,0.2,0.2),
            RGBColor(0.2,0.2,0.2));
    scene.push_light(&light);

    Coord cam(0,0,-1);
    SimpleRaytracer raytracer(scene);
    QImageBuffer buffer(width, height);
    raytracer.render(buffer, 60, cam);

    //buffer.setPixel(50, 50, ColorOps::white);
    viewer->showImage(buffer.qimage());

    return a.exec();
}
Beispiel #6
0
int		main()
{
  t_imgdata	imgdata;
  t_pov		pov;
  t_coordinate	lightsource;

  start_minilibx(&imgdata);
  pov.x = -700;
  pov.y = 0;
  pov.z = 200;
  pov.anglex = 0;
  pov.angley = 0;
  pov.anglez = 0;
  lightsource.x = -500;
  lightsource.y = 400;
  lightsource.z = 600;
  raytracer(&imgdata, &pov, objects, &lightsource);
  mlx_expose_hook(imgdata.win_ptr, expose, &imgdata);
  mlx_loop(imgdata.mlx_ptr);
  return (0);
}
Beispiel #7
0
int main( int argc, const char* argv[] )
{
	int width = atoi(argv[1]);
	int height = atoi(argv[2]);
	int start = atoi(argv[3]);
	int end = atoi(argv[4]);
	rayTracer::Camera cam( 0.3, 5, 60 );
	//cam.SampleLens(3);
    rayTracer::Scene scene( cam );
    //rayTracer::RayTracer raytracer( &scene, 400,300  );
    rayTracer::RayTracer raytracer( &scene, width, height );
	//raytracer.SetDepthOfField(3);
	raytracer.SetAntialias(3);
	raytracer.CreateRays();
    for(uint32_t i=start; i< end; i +=1)
    {
        scene.Update(i);
        // Cast rays into scene and write to image
        raytracer.CastRay( i, -2.5);
        std::cout<<"frame "<<i<<"\n";
    }
    return EXIT_SUCCESS;
}
Beispiel #8
0
void thread_loop(AppData& app_data, CommonData& common, x11::Display& display, glx::Context& context)
{
	Context gl;
	ResourceAllocator alloc;
	RaytraceCopier::Params rtc_params(
		display,
		context,
		common.context
	);
	RaytraceCopier rt_copier(app_data, common.rt_target);
	RaytracerResources rt_res(app_data, common.rt_data, alloc);
	Raytracer raytracer(app_data, rt_res);
	raytracer.Use(app_data);

	std::vector<unsigned> backlog(app_data.cols());
	std::chrono::milliseconds bl_interval(100);

	while(!common.Done())
	{
		// all threads must wait until
		// the raytrace target is cleared
		common.master_ready.Wait();

		if(common.Done()) break;

		raytracer.InitFrame(app_data, common.Face());

		auto bl_begin = std::chrono::steady_clock::now();
		unsigned tile = 0;
		while(common.NextFaceTile(tile))
		{
			raytracer.Raytrace(app_data, tile);
			backlog.push_back(tile);

			auto now = std::chrono::steady_clock::now();

			if(bl_begin + bl_interval < now)
			{
				gl.Finish();
				auto lock = common.Lock();
				for(unsigned bl_tile : backlog)
				{
					rt_copier.Copy(
						app_data,
						rtc_params,
						raytracer,
						bl_tile
					);
				}
				lock.unlock();
				backlog.clear();
				bl_begin = now;
			}
			else gl.Finish();
		}
		auto lock = common.Lock();
		for(unsigned bl_tile : backlog)
		{
			rt_copier.Copy(
				app_data,
				rtc_params,
				raytracer,
				bl_tile
			);
		}
		lock.unlock();
		backlog.clear();
		gl.Finish();

		// signal to the master that the raytracing
		// of the current face has finished
		common.thread_ready.Signal();

		if(common.Done()) break;

		// wait for the master to save the face image
		common.master_ready.Wait();
	}
}
Beispiel #9
0
void render_loop(AppData& app_data)
{
	ResourceAllocator alloc;

	RaytracerTarget raytrace_tgt(app_data, alloc);

	RaytracerData raytrace_data(app_data);
	RaytracerResources raytrace_res(app_data, raytrace_data, alloc);

	Raytracer raytracer(app_data, raytrace_res);
	RaytraceCopier::Params copy_params;
	RaytraceCopier copier(app_data, raytrace_tgt);

	Renderer renderer(app_data, raytrace_tgt.tex_unit);
	Saver saver(app_data);

	unsigned face = 0;
	unsigned tile = 0;
	const unsigned tiles = app_data.tiles();


	while(true)
	{
		if(app_data.skip_face[face])
		{
			++face;
			continue;
		}

		if(app_data.verbosity > 0)
		{
			app_data.logstr()
				<< "Rendering cube face "
				<< face
				<< std::endl;
		}

		raytracer.Use(app_data);

		if(tile == 0)
		{
			raytrace_tgt.Clear(app_data);
			raytracer.InitFrame(app_data, face);
			renderer.InitFrame(app_data, face);
		}

		if(tile < tiles)
		{
			raytracer.BeginWork(app_data);
			raytracer.Raytrace(app_data, tile);
			raytracer.EndWork(app_data);

			copier.Copy(app_data, copy_params, raytracer, tile);

			renderer.Use(app_data);
			renderer.Render(app_data);

			glfwSwapBuffers();

			tile++;
		}
		else if(face < 6)
		{
			glfwSwapBuffers();
			saver.SaveFrame(app_data, raytrace_tgt, face);
			if(face < 5)
			{
				tile = 0;
				face++;
			}
			else break;
		}
		glfwPollEvents();

		int new_x, new_y;
		glfwGetWindowSize(&new_x, &new_y);
		if(new_x > 0)
		{
			app_data.render_width = unsigned(new_x);
		}
		if(new_y > 0)
		{
			app_data.render_height = unsigned(new_y);
		}

		if(glfwGetKey(GLFW_KEY_ESC))
		{
			glfwCloseWindow();
			break;
		}
		if(!glfwGetWindowParam(GLFW_OPENED))
		{
			break;
		}
	}
}
Beispiel #10
0
bool draw( char* outputName, scene *myScene )
{
	ofstream imageFile(outputName,ios_base::binary);
	if (!imageFile)
		return false;
	// Addition of the TGA header
	imageFile.put(0).put(0);
	imageFile.put(2);        // RGB not compressed

	imageFile.put(0).put(0);
	imageFile.put(0).put(0);
	imageFile.put(0);

	imageFile.put(0).put(0); // origin X
	imageFile.put(0).put(0); // origin Y

	imageFile.put((unsigned char)(myScene->sizex & 0x00FF)).put(
		(unsigned char)((myScene->sizex & 0xFF00) / 256));
	imageFile.put((unsigned char)(myScene->sizey & 0x00FF)).put(
		(unsigned char)((myScene->sizey & 0xFF00) / 256));
	imageFile.put(24);       // 24 bit bitmap
	imageFile.put(0);
	// end of the TGA header

	pairInt intersections[6];
	TrigRefList* tref;
	kdTreeNode* currentPacket;

	// Scanning
	for (integer y = 0; y < myScene->sizey; ++y)
	{
		for (integer x = 0; x < myScene->sizex; ++x)
		{
			cout << "treating: ( " << x << " , " << y << " )" << endl;
			//float red = 0, green = 0, blue = 0;
			//float coef = 1.0f;
			//int level = 0;
			color_fixed c1={0,0,0};
			// Cast the ray
			// Because we are not in conic perspective (point of origin) but
			// in orthographic perspective, there is no natural starting point
			// We have to put it far enough to enclose the whole scene
			// but not too far to avoid floating point precision problems
			// (acne and other)
			// 1000.0f seems like a good compromise for now..
			ray viewRay = { {(coord)( x / ( ( integer ) LIMIT_SCALE ) ),
			                 (coord)( y / ( ( integer ) LIMIT_SCALE ) ),
			                 (coord)(  -1000 / ( ( integer ) LIMIT_SCALE )  ) },
			                {0,0,1} };

			point_fixed searchPoint;
			//cout << "getting first intersection" << endl;
			getKdNodeIntersections( &viewRay, myScene->tree, intersections );
			{
				int i, j;
				for( j = 0; j < 6; j ++ )
				{
					if( intersections[j].valid == true )
					{
						break;
					}
				}
				for( i = j + 1; i < 6; i++ )
				{
					if( intersections[i].valid == true )
					{
						break;
					}
				}
				if( disq( myScene->tree, intersections[i].I )
				    < disq( myScene->tree, intersections[j].I ) )
				{
					searchPoint = intersections[i].I;
				}
				else
				{
					searchPoint = intersections[j].I;
				}
			}
			// cout << "search Point - closer point - x: "
			//      << searchPoint.x << " y: " << searchPoint.y
			//      << " z: " << searchPoint.z << endl;

			//finding the first packet
			currentPacket = traverse( myScene->tree, searchPoint );
			//putting the ray start point inside the packet
			viewRay.start = searchPoint;

			// cout << "first triangle packet: " << endl;
			// tref = currentPacket->trig;
			// for( int k = 0; k < currentPacket->pnum; k++ )
			// {
			// 	cout << "index: " << tref->ref->index << endl;
			// 	cout << "v1  - x: " << tref->ref->V1.x << " y: "
			// 	     << tref->ref->V1.y << " z: " << tref->ref->V1.z << endl;
			// 	cout << "v2  - x: " << tref->ref->V2.x << " y: "
			// 	     << tref->ref->V2.y << " z: " << tref->ref->V2.z << endl;
			// 	cout << "v3  - x: " << tref->ref->V3.x << " y: "
			// 	     << tref->ref->V3.y << " z: " << tref->ref->V3.z << endl;
			// 	cout << endl;
			// 	tref = tref->next;
			// }

			c1 = raytracer( &viewRay, currentPacket, myScene );
			// cout << " c1 R " << c1.red *255
			//      << " G " << c1.green*255
			//      << " B " << c1.blue*255 << endl;
			imageFile.put((unsigned char)min(float(c1.blue)*255.0f,255.0f)).put(
				(unsigned char)min(float(c1.green)*255.0f, 255.0f)).put(
					(unsigned char)min(float(c1.red)*255.0f, 255.0f));
		}
	}
	return true;
}