std::vector<Pixel> const Scene::render() const { std::vector<Pixel> rendered; Raytracer rt; std::shared_ptr<Camera> rcam(rcam_); double d(rcam->calc_distance(resx_)); for(unsigned short y = 0 ; y < resy_ ; y++) { for(unsigned short x = 0 ; x < resx_ ; x++) { Pixel p(x,y); double viewx,viewy; if(resx_ % 2 == 0) viewx = -resx_/2 + x; else viewx = (-resx_ + 1)/2 + x; if(resy_ % 2 == 0) viewy = -resy_/2 + y; else viewy = (-resy_ + 1)/2 + y; math3d::Ray r(rcam->cast_ray(viewx,viewy,d)); p.color = rt.trace(r,shapes_,lights_); rendered.push_back(p); } } return rendered; }
int main(int argc,char** argv) { Raytracer* raytraceDemo = new Raytracer(); raytraceDemo->initPhysics(); raytraceDemo->setCameraDistance(6.f); return glutmain(argc, argv,screenWidth,screenHeight,"Minkowski-Sum Raytracer Demo",raytraceDemo); }
int main(int argc,char** argv) { Raytracer* raytraceDemo = new Raytracer(); raytraceDemo->initPhysics(); raytraceDemo->setCameraDistance(6.f); return glutmain(argc, argv,640,640,"Bullet GJK Implicit Shape Raytracer Demo",raytraceDemo); }
int main(int argc, char **argv) { string scenefile; //If aren't specified, it will render the default scene with only a cube string outfilename("scenes/default_output.bmp"); //NOTE: here we only support bmp output string outfilename_depth("scenes/default_depth_output.bmp"); // Scene filename specified if (argc > 1) { scenefile = string(argv[1]); int dot = scenefile.find_last_of('.'); outfilename = scenefile.substr(0, dot) + "_output.bmp"; outfilename_depth = scenefile.substr(0, dot) + "_depth_output.bmp"; } std::cout << "Rendering " << (scenefile.empty() ? "default scene" : scenefile) << std::endl; std::cout << "Output to " << outfilename << std::endl; Raytracer raytracer; if (scenefile.empty()) { //render default scene raytracer.render( outfilename.c_str(), outfilename_depth.c_str(), Scene() ); } else { // Parse the scene file Parser parser(new std::ifstream(scenefile.c_str())); if (!parser.parse()) { puts("Scene file can't be parsed. Use default scene."); raytracer.render( outfilename.c_str(), outfilename_depth.c_str(), Scene() ); } else { // Render the input scene with our raytracer. raytracer.render( outfilename.c_str(), outfilename_depth.c_str(), parser.scene ); } } // Use if you're running visual studio // system("pause"); return 0; }
void Scene::raytrace() { camera.setWidthAndHeight(screen.getWidth(), screen.getHeight()); srand ( time(NULL) ); int numberOfPixels = screen.getWidth() * screen.getHeight(); double spp = (double) samplesPerPixel; double invSpp = 1.0 / spp; if (traceType == RAY) { cout << "Raytracing\n"; } else if (traceType == PATH) { cout << "Pathtracing\n"; } cout << "Samples per pixel: " << samplesPerPixel << "\n"; cout << "Indirect lighting: "; if (indirectLighting == true) { cout << "on\n"; } else { cout << "off\n"; } cout << "Direct lighting: "; if (directLighting == true) { cout << "on\n"; } else { cout << "off\n"; } Raytracer tracer = Raytracer(this); #pragma omp parallel for for (int i = 0; i < numberOfPixels; i++) { Ray ray; dvec3 color; dvec3 totalColor = dvec3(0.0f); int x = i % screen.getWidth(); int y = i / screen.getWidth(); float t = 0; Sample sample; Sampler sampler = Sampler(x, y, samplesPerPixel, samplerType); while (sampler.hasSamples() != false) { sample = sampler.getSample(); camera.generateRay(ray,sample.x,sample.y, t); t += invSpp; color = tracer.pathTraceRay(ray, 0, 1.0f, 0); totalColor += color; } totalColor /= spp; screen.writePixel(totalColor,x,y); } screen.saveScreenshot(("testscenes/" + outputFilename).c_str()); }
void runCoordinator(igcl::Coordinator * coord) { Raytracer raytracer; raytracer.setupScene(); // setup scene objects ::raytracer = &raytracer; ::coord = coord; coord->start(); coord->waitForNodes(nParticipants); th = new std::thread(runThread); th->detach(); image = new BmpImage(IMAGE_WIDTH, IMAGE_HEIGHT); imageSize = IMAGE_HEIGHT*IMAGE_WIDTH; for (int i=0; i<nTests; i++) { countJobs = 0; std::unique_lock<std::mutex> bufferLock(bufferingAccessMutex); igcl::NBuffering buf(bufferingLevel, imageSize, blockSize, sendJob); buffering = &buf; buffering->addPeers(coord->downstreamPeers()); buffering->addPeer(0); bufferLock.unlock(); timeval iniTime; start(iniTime); bufferLock.lock(); buffering->bufferToAll(); bufferLock.unlock(); while (1) { //cout << "lock buffering->allJobsCompleted()" << endl; bufferLock.lock(); //cout << buffering->allJobsCompleted() << " " << buffering->getNCompletedJobs() << endl; if (buffering->allJobsCompleted()) { bufferLock.unlock(); break; } bufferLock.unlock(); receiveResult(); } cout << "count: " << countJobs << endl; finish(iniTime); } coord->terminate(); }
void RaytracerApplication::handle_event( const SDL_Event& event ) { int width, height; if ( !raytracing ) { camera_control.handle_event( this, event ); } switch ( event.type ) { case SDL_KEYDOWN: switch ( event.key.keysym.sym ) { case KEY_RAYTRACE: get_dimension( &width, &height ); toggle_raytracing( width, height ); break; case KEY_SEND_PHOTONS: raytracer.initialize(&scene, options.num_samples, 0, 0); queue_render_photon=true; case KEY_SCREENSHOT: output_image(); break; default: break; } default: break; } }
void RaytracerApplication::update( real_t delta_time ) { if ( raytracing ) { // do part of the raytrace if ( !raytrace_finished ) { assert( buffer ); raytrace_finished = raytracer.raytrace( buffer, &delta_time ); } else { raytracer.progressive_photon_mapping(buffer, &delta_time); } } else { // copy camera over from camera control (if not raytracing) camera_control.update( delta_time ); scene.camera = camera_control.camera; } }
void RaytracerApplication::toggle_raytracing( int width, int height ) { assert( width > 0 && height > 0 ); // do setup if starting a new raytrace if ( !raytracing ) { // only re-allocate if the dimensions changed if ( buf_width != width || buf_height != height ) { free( buffer ); buffer = (unsigned char*) malloc( BUFFER_SIZE( width, height ) ); if ( !buffer ) { std::cout << "Unable to allocate buffer.\n"; return; // leave untoggled since we have no buffer. } buf_width = width; buf_height = height; } // initialize the raytracer (first make sure camera aspect is correct) scene.camera.aspect = real_t( width ) / real_t( height ); if (!raytracer.initialize(&scene, options.num_samples, width, height)) { std::cout << "Raytracer initialization failed.\n"; return; // leave untoggled since initialization failed. } // reset flag that says we are done raytrace_finished = false; } raytracing = !raytracing; }
void RaytracerApplication::toggle_raytracing( int width, int height ) { assert( width > 0 && height > 0 ); if ( !raytracing ) { if ( buf_width != width || buf_height != height ) { free( buffer ); buffer = (unsigned char*) malloc( BUFFER_SIZE( width, height ) ); if ( !buffer ) { std::cout << "Unable to allocate buffer.\n"; return; } buf_width = width; buf_height = height; } scene.camera.aspect = real_t( width ) / real_t( height ); if ( !raytracer.initialize( &scene, width, height ) ) { std::cout << "Raytracer initialization failed.\n"; return; } raytrace_finished = false; } raytracing = !raytracing; }
Color Reflective::shade(Raytracer &tracer, HitInfo &info) { //direct lights from Phong Color directColor = directLight.shade(tracer, info); //calculate ray to camera and create an instance of Ray Vector3 toCam = -info.ray.getDirection(); Vector3 reflDir = Vector3::reflect(toCam, info.normal); Ray reflRay = Ray(info.hitPoint, reflDir); //trace it! Color traced = tracer.shadeRay(*(info.scene), reflRay); return directColor + traced * reflectionColor * reflectivity; }
int main(int argc, char* argv[]) { char* input_filename; char* output_filename; int height, width; if (argc > 5) { input_filename = argv[2]; output_filename = argv[3]; width = atoi(argv[4]); height = atoi(argv[5]); } else { cout<<"Error, not called with correct arguments."<<endl; cout<<"./start_trace input_file output_file width height"<<endl; return 1; } Raytracer r; r.ray_trace_start(input_filename,output_filename,width,height); return 0; }
void runPeer(igcl::Peer * peer) { peer->start(); Raytracer raytracer; raytracer.setupScene(); // setup scene objects timeval globalIni, end; gettimeofday(&globalIni, NULL); imageSize = IMAGE_HEIGHT*IMAGE_WIDTH; while (1) { uint startIndex = 0; igcl::result_type res = peer->waitRecvFrom(0, startIndex); if (res != igcl::SUCCESS) break; //std::cout << "received " << startIndex << endl; uint nIndexes = std::min(blockSize, imageSize-startIndex); uint endIndex = startIndex + nIndexes; color_s_char array[nIndexes]; for (uint i=startIndex; i<endIndex; ++i) { int row = i / IMAGE_WIDTH; int col = i % IMAGE_WIDTH; const color_s & c = raytracer.castRayFromScreen(row, col); // calculate color of pixel (CPU-HEAVY) array[i-startIndex] = color_s_char(c); } peer->sendTo(0, array+0, nIndexes); } gettimeofday(&end, NULL); cout << "total time:\t" << timeDiff(globalIni, end) << " ms\n"; peer->hang(); }
void runCoordinator(igcl::Coordinator * node) { const uint blockSize = 10000; Raytracer raytracer; raytracer.setupScene(); // setup scene objects for (int t=0; t<30; t++) { timeval iniTime; start(iniTime); //#pragma omp parallel for num_threads(N_THREADS) #pragma omp parallel for schedule(dynamic, 10000) num_threads(N_THREADS) for (uint i=0; i<imageSize; i++) { int row = i / IMAGE_WIDTH; int col = i % IMAGE_WIDTH; color_s c = raytracer.castRayFromScreen(row, col); // calculate color of pixel (CPU-HEAVY) image->setPixel(row, col, c.r, c.g, c.b); } finish(iniTime); } }
int main(int argc, char *argv[]) { cout << "Computer Graphics and Scientific Visualization - Raytracer" << endl << endl; if (argc < 2 || argc > 5) { cerr << "Usage: " << argv[0] << " in-file [out-file.png] [width] [height]" << endl; return 1; } Raytracer raytracer; if (!raytracer.readScene(argv[1])) { cerr << "Error: reading scene from " << argv[1] << " failed - no output generated."<< endl; return 1; } std::string ofname; if (argc>=3) { ofname = argv[2]; } else { ofname = argv[1]; if (ofname.size()>=5 && ofname.substr(ofname.size()-5)==".yaml") { ofname = ofname.substr(0,ofname.size()-5); } ofname += ".png"; } // read width and height arguments int w = 400; int h = 400; if (argc>=5) { w = atoi(argv[3]); h = atoi(argv[4]); } raytracer.renderToFile(ofname, w, h); return 0; }
void dofDemo(Raytracer& raytracer, Material& glass, Material& jade, Material& gold, Material& weird, Material& shiny ){ // Defines a point light source. raytracer.setAmbientLight(Colour(0.9, 0.9, 0.9)); raytracer.addLightSource( new PointLight(Point3D(0, 0, 5), Colour(0.9, 0.9, 0.9)) ); raytracer.addLightSource( new PointLight(Point3D(0, 5, -5), Colour(0.4, 0.4, 0.4)) ); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere2 = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere3 = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere4 = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere5 = raytracer.addObject( new UnitSphere(), &gold ); // Apply some transformations to the unit square. raytracer.translate(sphere , Vector3D(-3 , -1 , -5)); raytracer.translate(sphere2, Vector3D(-1.5, -0.5, -7)); raytracer.translate(sphere3, Vector3D( 0 , -0 , -9)); raytracer.translate(sphere4, Vector3D( 1.5, 0.5, -11)); raytracer.translate(sphere5, Vector3D( 3 , 1 , -13)); }
void Worker::Render(void) { QTime timer; timer.start(); Raytracer R; if(!R.loadScene(filePath)){ emit renderInvalid(); return; } float setupTime = (float)timer.elapsed() / 1000.0f; Log::writeLine("Setup Time: " + Log::floatToString(setupTime)); timer.restart(); image = new QImage(R.getWidth(), R.getHeight(), QImage::Format_ARGB32); img = new UIimage("image", R.getWidth(), R.getHeight(), image->bits()); emit imageReady(img); manager = new Manager(threads, blocks, img, &R); manager->setEventHandler(handler); if(!interrupted) manager->Render(); float renderTime = (float)timer.elapsed() / 1000.0f; Log::writeLine("Render Time: " + Log::floatToString(renderTime)); image->save("image.png", "PNG"); if(interrupted) emit renderInterrupted(); else emit renderComplete(); }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; int width = 320; int height = 240; int aa = 2; int sceneNum = 0; double toRadian = 2*M_PI/360.0; fprintf(stderr, "Using options:\n"); #ifdef USE_EXTENDEDLIGHTS fprintf(stderr, "\tExtended light sources\n"); #else fprintf(stderr, "\tPoint light sources\n"); #endif #ifdef USE_REFRACTIONS fprintf(stderr, "\tRefractions\n"); #else fprintf(stderr, "\tNo refractions\n"); #endif #ifdef USE_REFLECTIONS fprintf(stderr, "\tReflections\n"); #else fprintf(stderr, "\tNo reflections\n"); #endif #ifdef IGNORE_SHADOWS fprintf(stderr, "\tNo shadows\n"); #else { #ifdef USE_TRANSMISSIONSHADOWS fprintf(stderr, "\tTransmission-based shadows\n"); #else fprintf(stderr, "\tSimple shadows\n"); #endif } #endif #ifdef USE_FINERFLUX fprintf(stderr, "\tFiner numerical flux intergrations\n"); #else fprintf(stderr, "\tCoarser numerical flux intergrations\n"); #endif if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } else if (argc == 4) { width = atoi(argv[1]); height = atoi(argv[2]); aa = atoi(argv[3]); } else if (argc == 5) { width = atoi(argv[1]); height = atoi(argv[2]); aa = atoi(argv[3]); sceneNum = atoi(argv[4]); } // SceneNum should not exceed total scenes if ((sceneNum > 3)|| (sceneNum <0)) { sceneNum = 0; } // Camera parameters. Point3D eye(0, 0, 1); Vector3D view(0, 0, -1); Vector3D up(0, 1, 0); double fov = 60; // Defines materials for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2, 0.001, 0.0, 1/2.4 ); Material jade( Colour(0.22, 0.38, 0.33), Colour(0.52, 0.73, 0.57), Colour(0.316228, 0.316228, 0.316228), 12.8, 0.2 , 0.0, 0.0 ); Material polishedGold( Colour(0.24725, 0.2245, 0.0645), Colour(0.34615, 0.3143, 0.0903), Colour(0.797357, 0.723991, 0.208006), 83.2, 0.01,0.0,0.0); Material glass( Colour(0.15, 0.15, 0.15), Colour(0.08, 0.08, 0.08), Colour(0.2, 0.2, 0.2), 50.1,0.08,0.9,0.6667 ); Material glass1( Colour(0.2, 0.2, 0.2), Colour(0.2, 0.2, 0.2), Colour(0.7, 0.7, 0.7), 10.1,0.03,0.9,0.6667 ); Material steel( Colour(0.1, 0.1, 0.1), Colour(0.1, 0.1, 0.1), Colour(0.8, 0.8, 0.8), 80, 0.03, 0.0, 1.0 ); Material blueSolid( Colour(0, 0, 1), Colour(0, 0, 1), Colour(0, 0, 0), 0, 0.0, 0.0, 1.0 ); Material redSolid( Colour(1, 0, 0), Colour(1, 0, 0), Colour(0, 0, 0), 0, 0.0, 0.0, 1.0 ); Material chrome( Colour(0.25, 0.25, 0.25), Colour(0.4,0.4,0.4), Colour(0.7746, 0.7746, 0.7746), 77, 0.42, 0.0, 1.0); Material ruby( Colour(0.1745, 0.01175, 0.01175), Colour(0.61424, 0.04136, 0.04136), Colour(0.727811, 0.626959, 0.626959) , 76.8, 0.01, 0.0, 0.565); Material pearl( Colour(0.25, 0.20725, 0.20725), Colour(1, 0.829, 0.829), Colour(0.296648, 0.296648, 0.296648), 11.264, 0.1,0.0,1.0 ); Material silver(Colour(0.23125, 0.23125, 0.23125), Colour(0.2775, 0.2775, 0.2775), Colour(0.773911, 0.773911, 0.773911), 89.6, 0.4,0.0, 1.0); Material emerald(Colour(0.0215, 0.1745, 0.0215),Colour(0.07568, 0.61424, 0.07568), Colour(0.633, 0.727811, 0.633), 76.8, 0.1, 0.25, 0.637); Material brass(Colour(0.329412, 0.223529, 0.027451),Colour(0.780392, 0.568627, 0.113725), Colour(0.992157, 0.941176, 0.807843),27.8974, 0.3, 0.0, 1.0 ); Material bronze(Colour(0.2125, 0.1275, 0.054), Colour(0.714, 0.4284, 0.18144), Colour(0.393548, 0.271906, 0.166721), 25.6, 0.1, 0.0, 1.0 ); Material bronzeShiny(Colour(0.25, 0.148, 0.06475), Colour(0.4, 0.2368, 0.1036), Colour(0.774597, 0.458561, 0.200621), 76.86, 0.15, 0.0, 1.0 ); Material turquoise(Colour(0.1, 0.18725, 0.1745), Colour(0.396, 0.74151, 0.69102), Colour(0.297254, 0.30829, 0.306678), 12.8, 0.01, 0.2, 0.9); Material obsidian(Colour(0.05375, 0.05, 0.06625), Colour(0.18275, 0.17, 0.22525), Colour(0.332741, 0.328634, 0.346435), 38.4, 0.05, 0.18, 0.413); Material copper(Colour(0.19125, 0.0735, 0.0225), Colour(0.7038, 0.27048, 0.0828), Colour(0.256777, 0.137622, 0.086014), 12.8, 0.1, 0.0, 1.0 ); Material copperPolished(Colour(0.2295, 0.08825, 0.0275), Colour(0.5508, 0.2118, 0.066), Colour(0.580594, 0.223257, 0.0695701), 51.2, 0.15, 0.0, 1.0 ); Material pewter(Colour(0.105882, 0.058824, 0.113725), Colour(0.427451, 0.470588, 0.541176), Colour(0.333333, 0.333333, 0.521569), 9.84615, 0.0, 0.0, 1.0 ); // Light Sources //===================== //raytracer.addLightSource( new PointLight(Point3D(1, 1, 2),Colour(0.5, 0.5, 0.5)) ); #ifdef USE_EXTENDEDLIGHTS // Defines a ball light source raytracer.addLightSource( new BallLight(Point3D(-1, 1, 1), 2.0, Colour(0.9, 0.9, 0.9), 4) ); #else // Defines a point light source. raytracer.addLightSource( new PointLight(Point3D(0, 0, 5), Colour(0.9, 0.9,0.9) ) ); #endif if (sceneNum==0) { // Defines a point light source. //raytracer.addLightSource( new PointLight(Point3D(0, 0, 5), // Colour(0.9, 0.9, 0.9) ) ); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &gold); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 1.0 }; double factor3[3] = { 4.0, 4.0, 4.0 }; double factor4[3] = { 3.7, 3.7, 3.7 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0, 0, 0), factor1); raytracer.translate(plane, Vector3D(0, 0, -7)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0, 0, 0), factor2); /* SceneDagNode* bigSphere = raytracer.addObject( new UnitSphere(), &glass1); raytracer.scale(bigSphere, Point3D(0, 0, 0), factor3); raytracer.translate(bigSphere, Vector3D(0, 0, -7)); SceneDagNode* bigSphere2 = raytracer.addObject( new UnitSphere(), &glass1); raytracer.scale(bigSphere2, Point3D(0, 0, 0), factor4); raytracer.translate(bigSphere2, Vector3D(0, 0, -7)); */ }// end of scene 0 if (sceneNum==1) { /* raytracer.addLightSource( new BallLight(Point3D(-1, 1, 1), 5.0, Colour(0.9, 0.9, 0.9), 0.888) ); raytracer.addLightSource( new PointLight(Point3D(0, 0, 2),Colour(0.5, 0.5, 0.5)) ); */ // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &glass); SceneDagNode* sphere1 = raytracer.addObject( new UnitSphere(), &brass); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade); SceneDagNode* cylinder = raytracer.addObject( new UnitCylinder(), &brass); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 1.0 }; double factor3[3] = { 0.5, 0.5, 2.0 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0, 0, 0), factor1); raytracer.translate(sphere1, Vector3D(-2.5, 0, -5)); raytracer.translate(plane, Vector3D(0, 0, -7)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0, 0, 0), factor2); raytracer.translate(cylinder, Vector3D(3, 0, -5)); //raytracer.rotate(cylinder, 'y', -20); raytracer.rotate(cylinder, 'z', 45); raytracer.rotate(cylinder, 'x', -75); raytracer.scale(cylinder, Point3D(0, 0, 0), factor3); }// end of scene1 //=============== Scene 2 ============================== //===================================================== if(sceneNum == 2) { /* raytracer.addLightSource( new BallLight(Point3D(-1, 1, 1), 5.0, Colour(0.9, 0.9, 0.9), 0.888) );*/ //raytracer.addLightSource( new PointLight(Point3D(0, 0, 2),Colour(0.5, 0.5, 0.5)) ); //Set up walls //======================================================== SceneDagNode* planeBack = raytracer.addObject( new UnitSquare(), &brass); SceneDagNode* planeBottom = raytracer.addObject( new UnitSquare(), &chrome); SceneDagNode* planeTop = raytracer.addObject( new UnitSquare(), &copperPolished); SceneDagNode* planeLeft = raytracer.addObject( new UnitSquare(), &bronzeShiny); SceneDagNode* planeRight = raytracer.addObject( new UnitSquare(), &brass); SceneDagNode* planeRear = raytracer.addObject( new UnitSquare(), &brass); double scaleFactor[3] = {8.0,8.0,1.0}; double scaleFactor1[3] = {20.01,20.01,1.0}; raytracer.translate(planeBottom, Vector3D(0, -10, 0)); raytracer.translate(planeTop, Vector3D(0, 10, 0)); raytracer.translate(planeLeft, Vector3D(-10, 0, 0)); raytracer.translate(planeRight, Vector3D(10, 0, 0)); raytracer.translate(planeBack, Vector3D(0, 0, -19.9)); raytracer.translate(planeBottom, Vector3D(0, 0, -10)); raytracer.translate(planeTop, Vector3D(0, 0, -10)); raytracer.translate(planeLeft, Vector3D(0, 0, -10)); raytracer.translate(planeRight, Vector3D(0, 0, -10)); raytracer.translate(planeRear, Vector3D(0, 0, 20)); raytracer.rotate(planeTop, 'x', 90); raytracer.rotate(planeBottom, 'x',-90); raytracer.rotate(planeLeft, 'y', -90); raytracer.rotate(planeRight, 'y', 90); raytracer.rotate(planeRear, 'x', 180); raytracer.scale(planeBack, Point3D(0, 0, 0), scaleFactor1); raytracer.scale(planeBottom, Point3D(0, 0, 0), scaleFactor1); raytracer.scale(planeTop, Point3D(0, 0, 0), scaleFactor1); raytracer.scale(planeLeft, Point3D(0, 0, 0), scaleFactor1); raytracer.scale(planeRight, Point3D(0, 0, 0), scaleFactor1); raytracer.scale(planeRear, Point3D(0, 0, 0), scaleFactor1); //=========================================================== double scaleEgg[3] = { 1.0, 1.5, 1.0 }; double scaleBall[3] = {2,2,2}; SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &glass1); SceneDagNode* sphere1 = raytracer.addObject( new UnitSphere(), &ruby); SceneDagNode* sphere2 = raytracer.addObject( new UnitSphere(), &chrome); //SceneDagNode* cone = raytracer.addObject(sphere, new UnitCone(), &emerald); //raytracer.translate(cone, Vector3D(0,0,-2)); raytracer.translate(sphere, Vector3D(-1,-1,-11)); raytracer.scale(sphere, Point3D(0,0,0), scaleBall); raytracer.translate(sphere1, Vector3D(2.5,-1,-11)); raytracer.translate(sphere2, Vector3D(2,3,-11)); //raytracer.translate(cone, Vector3D(-1,-1,-12)); raytracer.rotate(sphere1, 'z', -45); raytracer.scale(sphere1, Point3D(0,0,0), scaleEgg); //raytracer.rotate(cone, 'x', 90); }//end of scene 2 //==================== Scene 3 ================= //=============================================== if(sceneNum == 3) { #ifdef USE_EXTENDEDLIGHTS raytracer.addLightSource( new BallLight(Point3D(-5, 5, -3), 2.0, Colour(0.4, 0.4, 0.4), 2) ); raytracer.addLightSource( new BallLight(Point3D(5, 5, -3), 2.0, Colour(0.4, 0.4, 0.4), 2) ); #else raytracer.addLightSource( new PointLight(Point3D(-5, 5, 0), Colour(0.5, 0.0, 0.0) ) ); raytracer.addLightSource( new PointLight(Point3D(5, 5, 0), Colour(0.0, 0.5, 0.0) ) ); raytracer.addLightSource( new PointLight(Point3D(0, -5, 0), Colour(0.0, 0.0, 0.5) ) ); #endif double planeScale[3] = {10.0, 10.0, 1.0}; double sphereScale[3]= {1.5,1.5,1.5}; double coneScale[3] = {1.5,1.5,5}; SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &pearl); SceneDagNode* sphere1 = raytracer.addObject( new UnitSphere(), &chrome); SceneDagNode* sphere2 = raytracer.addObject( new UnitSphere(), &brass); //SceneDagNode* cone = raytracer.addObject( new UnitCone(), &turquoise); raytracer.translate(sphere1, Vector3D(1, 1.5, -6.5)); raytracer.translate(sphere2, Vector3D(-1, -1.5, -6.5)); raytracer.scale(sphere2, Point3D(0,0,0), sphereScale); raytracer.scale(sphere1, Point3D(0,0,0), sphereScale); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0,0,0), planeScale); raytracer.translate(plane, Vector3D(0, 0, -8)); /* raytracer.translate(cone, Vector3D(2.0,-1.0,-3)); raytracer.rotate(cone, 'x', 180); raytracer.scale(cone, Point3D(0,0,0), coneScale); */ } // Render the scene, feel free to make the image smaller for // testing purposes. raytracer.render(width, height, eye, view, up, fov, aa, "sig1.bmp", 's'); //raytracer.render(width, height, eye, view, up, fov, aa, "diffuse1.bmp",'d'); //raytracer.render(width, height, eye, view, up, fov, aa, "view1.bmp",'p'); // Render it from a different point of view. Point3D eye2(4, 2, 1); Vector3D view2(-4, -2, -6); raytracer.render(width, height, eye2, view2, up, fov, aa, "sig2.bmp", 's'); //raytracer.render(width, height, eye2, view2, up, fov, aa, "diffuse2.bmp",'d'); //raytracer.render(width, height, eye2, view2, up, fov, aa, "view2.bmp",'p'); Point3D eye3(-4, -2, 1); Vector3D view3(4, 2, -6); raytracer.render(width, height, eye3, view3, up, fov, aa, "sig3.bmp", 's'); //raytracer.render(width, height, eye3, view3, up, fov, aa, "diffuse3.bmp",'d'); raytracer.render(width, height, eye3, view3, up, fov, aa, "view3.bmp",'p'); return 0; }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; int width = 320; int height = 240; if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } // Camera parameters. Point3D eye(0., 0., 1.); Vector3D view(0., 0., -1.); Vector3D up(0., 1., 0.); double fov = 60; // Defines a material for shading. Material::Ptr gold = std::make_shared<Material>( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2 ); Material::Ptr jade = std::make_shared<Material>( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8 ); // Defines a point light source. raytracer.addLightSource( std::make_shared<PointLight>(Point3D(0., 0., 5.), Colour(0.9, 0.9, 0.9) ) ); // Add a unit square into the scene with material mat. SceneDagNode::Ptr sphere = raytracer.addObject( std::make_shared<UnitSphere>(), gold ); SceneDagNode::Ptr plane = raytracer.addObject( std::make_shared<UnitSquare>(), jade ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; raytracer.translate(sphere, Vector3D(0., 0., -5.)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0., 0., 0.), factor1); raytracer.translate(plane, Vector3D(0., 0., -7.)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0., 0., 0.), factor2); // Render the scene, feel free to make the image smaller for // testing purposes. raytracer.render(width, height, eye, view, up, fov, "view1.bmp"); // Render it from a different point of view. Point3D eye2(4., 2., 1.); Vector3D view2(-4., -2., -6.); raytracer.render(width, height, eye2, view2, up, fov, "view2.bmp"); return 0; }
void View::drawSkeleton(bool drawTransparent) const { if (doc->mesh.balls.isEmpty()) return; // draw model if (drawTransparent) { // set depth buffer before so we never blend the same pixel twice glClear(GL_DEPTH_BUFFER_BIT); glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); doc->mesh.drawKeyBalls(); if (drawInterpolated) doc->mesh.drawInBetweenBalls(); else doc->mesh.drawBones(); glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); // draw blended key balls and bones glDepthFunc(GL_EQUAL); glEnable(GL_BLEND); glEnable(GL_LIGHTING); doc->mesh.drawKeyBalls(0.25); glColor4f(0.75, 0.75, 0.75, 0.25); if (drawInterpolated) doc->mesh.drawInBetweenBalls(); else doc->mesh.drawBones(); glDisable(GL_LIGHTING); glDisable(GL_BLEND); glDepthFunc(GL_LESS); } else { // draw key balls and in-between balls glEnable(GL_LIGHTING); doc->mesh.drawKeyBalls(); glColor3f(0.75, 0.75, 0.75); if (drawInterpolated) doc->mesh.drawInBetweenBalls(); else doc->mesh.drawBones(); glDisable(GL_LIGHTING); } // draw box around selected ball if (selectedBall != -1) { const Ball &selection = doc->mesh.balls[selectedBall]; float radius = selection.maxRadius(); // enable line drawing glDepthMask(GL_FALSE); glEnable(GL_BLEND); if (mode == MODE_ADD_JOINTS || mode == MODE_ANIMATE_MESH) { glDisable(GL_DEPTH_TEST); glColor4f(0, 0, 0, 0.25); drawWireCube(selection.center - radius, selection.center + radius); glEnable(GL_DEPTH_TEST); glColor3f(0, 0, 0); drawWireCube(selection.center - radius, selection.center + radius); // find the currently selected cube face and display the cursor Raytracer tracer; Vector3 ray = tracer.getRayForPixel(mouseX, mouseY); HitTest result; if (Raytracer::hitTestCube(selection.center - radius, selection.center + radius, currentCamera->eye, ray, result)) { float size = (result.hit - currentCamera->eye).length() * CURSOR_SIZE / height(); Vector2 angles = result.normal.toAngles(); glColor3f(0, 0, 0); glDisable(GL_DEPTH_TEST); glPushMatrix(); glTranslatef(result.hit.x, result.hit.y, result.hit.z); glRotatef(90 - angles.x * 180 / M_PI, 0, 1, 0); glRotatef(-angles.y * 180 / M_PI, 1, 0, 0); glScalef(size, size, size); drawMoveCursor(); glPopMatrix(); glEnable(GL_DEPTH_TEST); } } else if (mode == MODE_SCALE_JOINTS) { // display the cursor Raytracer tracer; Vector3 ray = tracer.getRayForPixel(mouseX, mouseY); HitTest result; if (Raytracer::hitTestSphere(selection.center, radius, currentCamera->eye, ray, result)) { camera2D(); glColor3f(0, 0, 0); glDisable(GL_DEPTH_TEST); glTranslatef(mouseX, mouseY, 0); glScalef(CURSOR_SIZE, CURSOR_SIZE, 0); drawScaleCursor(); glEnable(GL_DEPTH_TEST); camera3D(); } Vector3 delta = currentCamera->eye - selection.center; Vector2 angles = delta.toAngles(); // adjust the radius to the profile of the ball as seen from the camera radius = radius / sinf(acosf(radius / delta.length())); // draw a circle around the selected ball radius *= 1.1; glPushMatrix(); glTranslatef(selection.center.x, selection.center.y, selection.center.z); glRotatef(90 - angles.x * 180 / M_PI, 0, 1, 0); glRotatef(-angles.y * 180 / M_PI, 1, 0, 0); glScalef(radius, radius, radius); glDisable(GL_DEPTH_TEST); glColor4f(0, 0, 0, 0.25); drawWireDisk(); glEnable(GL_DEPTH_TEST); glColor3f(0, 0, 0); drawWireDisk(); glPopMatrix(); } // disable line drawing glDisable(GL_BLEND); glDepthMask(GL_TRUE); } }
/** * Wooden Monkey Scene 1 */ void wmonkey_scene_1() { printf("WOODEN MONKEY SCENE : 1 ----------------------------------\n\n"); Raytracer rt; int width = 16 * 20 * 2; int height = 12 * 20 * 2; // Camera parameters. Point3D eye1(0, 0, 1), eye2(4, 2, 1); Vector3D view1(0, 0, -1), view2(-4, -2, -6); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2, 0.8 ); Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8); // Defines a point light source. double l0c = 0.5; PointLight * light0 = new PointLight( Point3D(-2, 2, 5), Colour(l0c, l0c, l0c), 0.2); rt.addLightSource(light0); // Add a unit square into the scene with material mat. SceneDagNode* sphere = rt.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere2 = rt.addObject( new UnitSphere(), &gold ); SceneDagNode* plane = rt.addObject( new UnitSquare(), &jade ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; rt.translate(sphere, Vector3D(0, 0, -5)); rt.rotate(sphere, 'x', -45); rt.rotate(sphere, 'z', 45); rt.scale(sphere, Point3D(0, 0, 0), factor1); rt.translate(plane, Vector3D(0, 0, -7)); rt.rotate(plane, 'z', 45); rt.scale(plane, Point3D(0, 0, 0), factor2); double f[3] = { 0.5, 0.5, 0.5 }; rt.translate(sphere2, Vector3D(3, 0, -5)); rt.scale(sphere2, Point3D(0, 0, 0), f); rt.setAAMode(Raytracer::AA_SUPER_SAMPLING); rt.setShadingMode(Raytracer::SCENE_MODE_PHONG); rt.setShadows(Raytracer::SHADOW_CAST); rt.setEnvMapMode(Raytracer::ENV_MAP_CUBE_SKYBOX); rt.setColorSpaceMode(Raytracer::COLOR_ENC_SRGB_GAMMA_CORRECT); rt.setReflDepth(4); if ( rt.getEnvMapMode() != Raytracer::NONE ) { // load images EnvMap env; if ( _DEBUG ) { env = EnvMap( "EnvMaps/DebugMaps/posx.bmp", "EnvMaps/DebugMaps/posy.bmp", "EnvMaps/DebugMaps/posz.bmp", "EnvMaps/DebugMaps/negx.bmp", "EnvMaps/DebugMaps/negy.bmp", "EnvMaps/DebugMaps/negz.bmp" ); } else { env = EnvMap( "EnvMaps/SaintLazarusChurch/posx.bmp", "EnvMaps/SaintLazarusChurch/posy.bmp", "EnvMaps/SaintLazarusChurch/posz.bmp", "EnvMaps/SaintLazarusChurch/negx.bmp", "EnvMaps/SaintLazarusChurch/negy.bmp", "EnvMaps/SaintLazarusChurch/negz.bmp" ); } rt.setEnvMap(env); } printf("WOODEN MONKEY SCENE : 1 :: Rendering...\n"); rt.render(width, height, eye2, view2, up, fov, "wmonkey_1.bmp"); printf("WOODEN MONKEY SCENE : 1 :: Done!\n"); }
int main(int argc, char* argv[]) { // init random number generator for distribution rendering std::srand(int(time(nullptr))); if (argc <= 1) { std::cerr << "No Scene XML specified. If more than one XML spceified, scenes and settings will be combined into one." << std::endl; std::cerr << " Usage:" << std::endl; std::cerr << " ./raytracer <path to xml> ..." << std::endl; return 0; } // Create the 3 main objects that handle all functionality Scene scene; Raytracer raytracer; raytracer.setScene(&scene); CameraContainer cameras; // parse all scene info SceneXmlParser xmlParser(raytracer, scene, cameras); for (int i = 1; i < argc; ++i) { std::string sceneFilename(argv[i]); if(!xmlParser.parseSceneDefinition(sceneFilename) ) { std::cerr << "Parsing failed... Exiting." << std::endl; return 1; } } // preprocess the scene before rendering scene.preprocess(); std::string bmpSuffix(".bmp"); std::string rawSuffix(".rsd"); // Render for each camera. for (auto& cam : cameras) { std::cout << "Rendering camera \"" << cam->name << "\"" << std::endl; // if previous raw sensor data exists, // use it as starting point std::string rawFileName = cam->name + rawSuffix; std::ifstream rawFile(rawFileName.c_str()); if (rawFile.good()) { Image<SensorPixel> accummulatedSensor = readImageFromFile<Image<SensorPixel> >(rawFile); if (accummulatedSensor) { std::cout << "Reusing previously rendered data for iterative raytacing." << std::endl; cam->mergeSensor(accummulatedSensor); } rawFile.close(); } // render and dump to file raytracer.render(*cam.get()); cam->dumpToBMP(cam->name + bmpSuffix); // if raytracer flag says to also dump raw, do so if (raytracer.dumpRaw) { cam->dumpRawData(rawFileName); } } return 0; }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; int width = 16 * 20 * 2; int height = 12 * 20 * 2; if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } // Camera parameters. Point3D eye1(0, 0, 1), eye2(4, 2, 1); Vector3D view1(0, 0, -1), view2(-4, -2, -6); // Point3D eye1(0, 0, 1), eye2(4, 2, -6); // Vector3D view1(0, 0, -1), view2(-4, -2, 1); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2, LARGE_SPH_REFLECT, LARGE_SPH_REFRAC_INDX, LARGE_SPH_REFRACT); Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8); Material red( Colour(0, 0, 0), Colour(0.9, 0.05, 0.05), Colour(0.4, 0.2, 0.2), 12.8); // Defines a point light source. Point3D light_pos; if (LIGHT_DEFAULT) { light_pos = Point3D(0, 0, 5); } else { light_pos = LIGHT_POS_TEST; } PointLight * light0 = new PointLight( light_pos, Colour(0.9, 0.9, 0.9), 0.1); raytracer.addLightSource(light0); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* sphere2 = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); //set the texture map for the objects of interest in the scene if texture map flag is ON if (TEXTURE_MAP_FLAG) { // load texture image TextureMap txtmp; txtmp = TextureMap(TEXTURE_IMG); raytracer.setTextureMap(txtmp); //for now, we are only using texture map for sphere sphere->useTextureMapping = true; sphere->obj->setTextureMap(txtmp); } // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0, 0, 0), factor1); raytracer.translate(plane, Vector3D(0, 0, -7)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0, 0, 0), factor2); double f[3] = { 0.5, 0.5, 0.5 }; raytracer.translate(sphere2, Vector3D(0, 0, -8)); raytracer.scale(sphere2, Point3D(0, 0, 0), f); bool DO_SIGNATURE = false; bool DO_SIGNATURE_SS = false; bool DO_DIFFUSE = false; bool DO_PHONG = false; bool DO_PHONG_SS = false; bool DO_FULL_FEATURED = false; bool DO_WOODEN_MONKEY_SCENES = true; bool DO_REFRACTION_SCENE = false; bool RENDER_FIRST_VIEW = true; bool RENDER_SECOND_VIEW = true; raytracer.setReflDepth(0); raytracer.setEnvMapMode(Raytracer::NONE); // render signature if ( DO_SIGNATURE ) { raytracer.setAAMode(Raytracer::NONE); raytracer.setShadingMode(Raytracer::SCENE_MODE_SIGNATURE); if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "sig1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "sig2.bmp"); } // render signature with SS AA if ( DO_SIGNATURE_SS ) { raytracer.setAAMode(Raytracer::AA_SUPER_SAMPLING); raytracer.setShadingMode(Raytracer::SCENE_MODE_SIGNATURE); if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "sigSS1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "sigSS2.bmp"); } // render diffuse if ( DO_DIFFUSE ) { raytracer.setAAMode(Raytracer::NONE); raytracer.setShadingMode(Raytracer::SCENE_MODE_DIFFUSE); if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "diffuse1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "diffuse2.bmp"); } // render phong if ( DO_PHONG ) { raytracer.setAAMode(Raytracer::NONE); raytracer.setShadingMode(Raytracer::SCENE_MODE_PHONG); if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "phong1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "phong2.bmp"); } // phong with super sampling AA if ( DO_PHONG_SS ) { raytracer.setAAMode(Raytracer::AA_SUPER_SAMPLING); raytracer.setShadingMode(Raytracer::SCENE_MODE_PHONG); if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "phongSS1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "phongSS2.bmp"); } // refraction if it's turned on if (REFRACTION_FLAG) { raytracer.setRefractionMode(REFRACTION_FLAG); } // all features enabled or turned to max if ( DO_FULL_FEATURED ) { raytracer.setAAMode(Raytracer::NONE); raytracer.setAAMode(Raytracer::AA_SUPER_SAMPLING); raytracer.setShadingMode(Raytracer::SCENE_MODE_PHONG); raytracer.setShadows(Raytracer::SHADOW_CAST); // raytracer.setShadows(Raytracer::NONE); raytracer.setEnvMapMode(Raytracer::ENV_MAP_CUBE_SKYBOX); // raytracer.setEnvMapMode(Raytracer::NONE); raytracer.setReflDepth(4); if ( raytracer.getEnvMapMode() != Raytracer::NONE ) { // load images EnvMap env; if ( _DEBUG ) { env = EnvMap( "EnvMaps/DebugMaps/posx.bmp", "EnvMaps/DebugMaps/posy.bmp", "EnvMaps/DebugMaps/posz.bmp", "EnvMaps/DebugMaps/negx.bmp", "EnvMaps/DebugMaps/negy.bmp", "EnvMaps/DebugMaps/negz.bmp" ); } else { env = EnvMap( "EnvMaps/SaintLazarusChurch/posx.bmp", "EnvMaps/SaintLazarusChurch/posy.bmp", "EnvMaps/SaintLazarusChurch/posz.bmp", "EnvMaps/SaintLazarusChurch/negx.bmp", "EnvMaps/SaintLazarusChurch/negy.bmp", "EnvMaps/SaintLazarusChurch/negz.bmp" ); } raytracer.setEnvMap(env); } // adjust lighting? if ( raytracer.getReflDepth() > 0 ) { double l0i = 0.5; light0->setAmbient(Colour(l0i, l0i, l0i)); } if ( RENDER_FIRST_VIEW ) raytracer.render(width, height, eye1, view1, up, fov, "all1.bmp"); if ( RENDER_SECOND_VIEW ) raytracer.render(width, height, eye2, view2, up, fov, "all2.bmp"); } // different scenes just for the wooden monkey thing if ( DO_WOODEN_MONKEY_SCENES ) { // wmonkey_scene_1(); wmonkey_scene_2(); // TODO add more scenes here as required... } //render the 2nd refraction scene if ( REFRACTION_FLAG && DO_REFRACTION_SCENE ) { refraction_scene_1(); } printf("Press enter to terminate...\n"); std::string s; std::getline(std::cin, s); return 0; }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; //_render_mode = MODE_SIGNATURE; //_render_mode = MODE_SPECULAR; _render_mode = MODE_FULL_PHONG; //_render_mode = (mode)(MODE_AMBIENT | MODE_DIFFUSE); int width = 320; int height = 240; if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } // Camera parameters. Point3D eye(0, 0, 1); Vector3D view(0, 0, -1); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2 ); Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8 ); // Defines a point light source. raytracer.addLightSource( new PointLight(Point3D(0, 0, 5), Colour(0.9, 0.9, 0.9) ) ); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0, 0, 0), factor1); raytracer.translate(plane, Vector3D(0, 0, -7)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0, 0, 0), factor2); // Render the scene, feel free to make the image smaller for // testing purposes. raytracer.render(width, height, eye, view, up, fov, "phong1.bmp"); // Render it from a different point of view. Point3D eye2(4, 2, 1); Vector3D view2(-4, -2, -6); raytracer.render(width, height, eye2, view2, up, fov, "phong2.bmp"); return 0; }
void RenderTask::operator()(const unsigned int depth, RenderProgressCallback* callback) { double pixelWidth = 1.0/m_frameBuffer->getWidth(); double pixelHeight = 1.0/m_frameBuffer->getHeight(); Raytracer* rt = new Raytracer(m_scene); mutexBuckets.lock(); Bucket bucket = m_order->getNextBucket(); mutexBuckets.unlock(); while (bucket != Bucket::INVALID) { if (callback) { mutexCallback.lock(); callback->onBucketStart(bucket); mutexCallback.unlock(); } for (unsigned int y=bucket.y0; y<=bucket.yd; y++) { for (unsigned int x=bucket.x0; x<=bucket.xd; x++) { Color rawColor(0,0,0); Camera* camera = m_scene->getCameraMount()->getCamera(); double normX = XScreenToViewPlane(x); double normY = YScreenToViewPlane(y); unsigned int haltonSeq = 0/*Random::Instance().generate(256)*/; for (unsigned int i=1; i<=m_aaSamples; i++) { haltonSeq++; std::vector<Ray> rays = camera->makeSampleRays( normX+Random::Instance().haltonSeq(haltonSeq,2)*pixelWidth-pixelWidth*0.5, normY+Random::Instance().haltonSeq(haltonSeq,3)*pixelHeight-pixelHeight*0.5); for (unsigned int r=0; r<rays.size(); r++) { //m_primaryRaysCount++; rays[r] = m_scene->getCameraMount()->T(rays[r]); // Local to World double distance; rawColor += rt->traceRay(rays[r], depth, distance) * (1.0/(double)rays.size()); } } rawColor.r /= (double)m_aaSamples; rawColor.g /= (double)m_aaSamples; rawColor.b /= (double)m_aaSamples; rawColor.r = GAMMA(rawColor.r, 2.2); rawColor.g = GAMMA(rawColor.g, 2.2); rawColor.b = GAMMA(rawColor.b, 2.2); m_frameBuffer->setColorAt(x,y, rawColor); } } if (callback) { mutexCallback.lock(); callback->onBucketEnd(bucket); mutexCallback.unlock(); } mutexBuckets.lock(); bucket = m_order->getNextBucket(); mutexBuckets.unlock(); } delete rt; }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; int width = 320; int height = 240; if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } /***********************************************************Testing ******************************** // Camera parameters. Point3D eye(0, 0, 1); Vector3D view(0, 0, -1); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2,0.3,0,NULL ); Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8,0.3,0,NULL); // Defines a point light source. raytracer.addLightSource( new PointLight(Point3D(0.0, 0, 5), Colour(0.9, 0.9, 0.9) ) ); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &gold ); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); raytracer.scale(sphere, Point3D(0, 0, 0), factor1); raytracer.translate(plane, Vector3D(0, 0, -7)); raytracer.rotate(plane, 'z', 45); raytracer.scale(plane, Point3D(0, 0, 0), factor2); // Render the scene, feel free to make the image smaller for // testing purposes. raytracer.render(width, height, eye, view, up, fov, "view4.bmp"); // Render it from a different point of view. Point3D eye2(4, 2, 1); Vector3D view2(-4, -2, -6); raytracer.render(width, height, eye2, view2, up, fov, "view5.bmp"); ***********************************************************Testing ********************************/ /***********************************************************Final Scene********************************/ // Camera parameters. // Point3D eye(0, 8, -3); // Vector3D view(0, -1,0); Point3D eye(0, 0, 1); Vector3D view(0, 0, -1); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material gold( Colour(0.3, 0.3, 0.3), Colour(0.75164, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2,0.2,NULL); // Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), // Colour(0.316228, 0.316228, 0.316228), // 12.8,0.5,NULL); Material jade( Colour(0, 0, 0), Colour(0.47, 0.576, 0.859), Colour(0.316228, 0.316228, 0.316228), 12.8,0.5,NULL); Material red( Colour(0.3, 0.3, 0.3), Colour(1, 0, 0), Colour(0.628281, 0.555802, 0.366065), 51.2,0.2,NULL); Material white( Colour(0.3, 0.3, 0.3), Colour(1, 0.8549, 0.7255), Colour(0.628281, 0.555802, 0.366065), 51.2,0.2,NULL); Material pink( Colour(0.3, 0.3, 0.3), Colour(0.9412, 0.502, 0.502), Colour(0.628281, 0.555802, 0.366065), 51.2,0.2,NULL); Material mirror( Colour(0.0, 0.0, 0.0), Colour(0.0, 0.0, 0.0), Colour(0.0, 0.0, 0.0), 51.2,1,NULL); Material glass( Colour(0.3, 0.3, 0.3), Colour(1, 1, 1), Colour(0.628281, 0.555802, 0.366065), 51.2,0,1,NULL); glass.R_index = 1.3; glass.transparency_coef=1; // Defines a point light source. raytracer.addLightSource( new PointLight(Point3D(0, 0, 5), Colour(0.9, 0.9, 0.9) ) ); raytracer.addLightSource( new PointLight(Point3D(0, 6, -1), Colour(0.9, 0.3, 0.1) ) ); Material test( Colour(0.3, 0.3, 0.3), Colour(0.3, 0.60648, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2 ,0.1,NULL); Material test3( Colour(0.3, 0.3, 0.3), Colour(0.3, 0.5, 0.22648), Colour(0.628281, 0.555802, 0.366065), 51.2,1,NULL ); Material test2( Colour(0, 0, 0), Colour(0.3, 0.3, 0.3), Colour(1.0, 1.0, 1.0), 51.2,0,NULL ); Texture sky("/Users/bingxu/Documents/graphics/COMP3271_assignment_4_template/raytracerMacOS/sky.bmp"); Texture board("/Users/bingxu/Documents/graphics/COMP3271_assignment_4_template/raytracerMacOS/board.bmp"); Material starrysky(Colour(0, 0, 0),Colour(0, 0, 0), Colour(0.1, 0.1, 0.1), 11.264, 0, &sky); Material board_mat(Colour(0, 0, 0),Colour(0, 0, 0), Colour(0.1, 0.1, 0.1), 11.264, 1, &board); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); SceneDagNode* plane1 = raytracer.addObject( new UnitSquare(), &jade ); SceneDagNode* plane2 = raytracer.addObject( new UnitSquare(), &board_mat );//the bottom SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &mirror); SceneDagNode* sphere1 = raytracer.addObject( new UnitSphere(), &white ); SceneDagNode* mars = raytracer.addObject( new UnitSphere(), &glass ); SceneDagNode* earth = raytracer.addObject( new UnitSphere(), &pink ); SceneDagNode* cylinder1 = raytracer.addObject( new UnitFiniteCylinder(), &gold ); SceneDagNode* cylinder2 = raytracer.addObject( new UnitFiniteCylinder(), &gold ); SceneDagNode* cylinder3 = raytracer.addObject( new UnitFiniteCylinder(), &gold ); SceneDagNode* cone = raytracer.addObject( new UnitFiniteCone(), &red ); double factor1[3] = { 2.0, 2.0, 2.0 }; double factor2[3] = {50,50,50}; double factor3[3] = { 1.0, 1.0, 1.0}; double factor4[3] = { 1.0, 2, 1.0}; double factor5[3] = {0.5,0.5,0.5}; double factor6[3] = {0.5,1.5,0.5}; double factor7[3] = {1.0,4.0,1.0}; //3 squares raytracer.translate(plane, Vector3D(0, 0, -15)); raytracer.scale(plane, Point3D(0, 0, 0), factor2); raytracer.translate(plane1, Vector3D(-15, 0, 0)); raytracer.rotate(plane1, 'y', 90); raytracer.scale(plane1, Point3D(0, 0, 0), factor2); raytracer.translate(plane2, Vector3D(0, -8, 0)); raytracer.rotate(plane2, 'x', -90); raytracer.scale(plane2, Point3D(0, 0, 0), factor2); //four balls raytracer.translate(sphere, Vector3D(-1, -6, -2)); raytracer.scale(sphere, Point3D(0, 0, 0), factor3); raytracer.translate(sphere1,Vector3D(-4.5, -6, 1)); raytracer.scale(sphere1, Point3D(0, 0, 0), factor3); raytracer.translate(mars, Vector3D(3, -3, -1)); raytracer.scale(mars, Point3D(0, 0, 0), factor3); raytracer.translate(earth, Vector3D(-8, -6, -2)); raytracer.scale(earth, Point3D(0, 0, 0), factor3); raytracer.rotate(cylinder1, 'z', -30); //raytracer.rotate(cylinder1, 'x', -15); raytracer.translate(cylinder1, Vector3D(0, -4, -2)); raytracer.scale(cylinder1, Point3D(0, 0, 0), factor7); raytracer.rotate(cylinder2, 'z', -30); raytracer.translate(cylinder2, Vector3D(1.5, -3, -2)); raytracer.scale(cylinder2, Point3D(0, 0, 0), factor6); raytracer.rotate(cylinder3, 'z', -30); raytracer.translate(cylinder3, Vector3D(-1.5, -3, -2)); raytracer.scale(cylinder3, Point3D(0, 0, 0), factor6); raytracer.rotate(cone, 'z', -30); raytracer.translate(cone, Vector3D(0, 2, -2)); raytracer.scale(cone, Point3D(0, 0, 0), factor4); std::clock_t start; double duration; start = std::clock(); // raytracer.render(width, height, eye, view, up, fov, "view4.bmp"); duration = ( std::clock() - start ) / (double) CLOCKS_PER_SEC; //std::cout<<"The rendering duration 1 is .......: "<< duration <<'\n'; // Render it from a different point of view. Point3D eye2(3, 1, 5); Vector3D view2(-10, -8, -15); std::clock_t start1; double duration1; start1 = std::clock(); raytracer.render(width, height, eye2, view2, up, fov, "view5.bmp"); duration1 = ( std::clock() - start1 ) / (double) CLOCKS_PER_SEC; // std::cout<<"The rendering duration 2 is .......: "<< duration1 <<'\n'; /***********************************************************Final Scene********************************/ return 0; }
int main(int argc, char* argv[]) { // Build your scene and setup your camera here, by calling // functions from Raytracer. The code here sets up an example // scene and renders it from two different view points, DO NOT // change this if you're just implementing part one of the // assignment. Raytracer raytracer; int width = 160; int height = 120; if (argc == 3) { width = atoi(argv[1]); height = atoi(argv[2]); } // Camera parameters. Point3D eye(0, 0, 1); Vector3D view(0, 0, -1); Vector3D up(0, 1, 0); double fov = 60; // Defines a material for shading. Material chrome( Colour(0.25, 0.25, 0.25), Colour(0.4, 0.4, 0.4), Colour(0.774597, 0.774597, 0.774597), 51.2 ); Material jade( Colour(0, 0, 0), Colour(0.54, 0.89, 0.63), Colour(0.316228, 0.316228, 0.316228), 12.8 ); // Defines a point light source. raytracer.addLightSource( new PointLight(Point3D(0, 3, 2), Colour(0.9, 0.9, 0.9) ) ); // Add a unit square into the scene with material mat. SceneDagNode* sphere = raytracer.addObject( new UnitSphere(), &chrome ); SceneDagNode* plane = raytracer.addObject( new UnitSquare(), &jade ); //SceneDagNode* sphere2 = raytracer.addObject( new UnitSphere(), &chrome ); // Apply some transformations to the unit square. double factor1[3] = { 1.0, 2.0, 1.0 }; double factor2[3] = { 6.0, 6.0, 6.0 }; raytracer.translate(sphere, Vector3D(0, 0, -5)); raytracer.rotate(sphere, 'x', -45); raytracer.rotate(sphere, 'z', 45); //raytracer.scale(sphere, Point3D(0, 0, 0), factor1); //raytracer.translate(sphere2, Vector3D(0, 0, -2)); //raytracer.rotate(sphere2, 'x', -45); //raytracer.rotate(sphere2, 'z', 45); raytracer.translate(plane, Vector3D(0, -3, -5)); raytracer.rotate(plane, 'x', -80); raytracer.scale(plane, Point3D(0, 0, 0), factor2); // Render the scene, feel free to make the image smaller for // testing purposes. raytracer.render(width, height, eye, view, up, fov, "view1.bmp", 3, 5, true); // Render it from a different point of view. Point3D eye2(4, 2, 1); Vector3D view2(-4, -2, -6); //raytracer.render(width, height, eye2, view2, up, fov, "view2.bmp", 3, 15, false); //std::cin.get(); return 0; }
int raytrace(string fileName) { Raytracer *raytracer = new Raytracer(); int retValue = raytracer->start(fileName); delete raytracer; return retValue; }
int main(int argc, char** argv) { ObjectProperties *redSphereProperties = new ObjectProperties(); redSphereProperties->setColour({ MAX_COLOUR, MIN_COLOUR, MIN_COLOUR }); redSphereProperties->setSpecularColor({ 0.0f, 0.0f, 0.0f }); redSphereProperties->setPhongExponent(10000); Sphere *redSphere = new Sphere(Vec3(-6, 0, 5), 3, redSphereProperties); ObjectProperties *greenSphereProperties = new ObjectProperties(); greenSphereProperties->setColour({ MAX_COLOUR, 0.84f, MIN_COLOUR }); greenSphereProperties->setSpecularColor({ 1.0f, 0.84f, 0.2f }); greenSphereProperties->setPhongExponent(10000); Sphere *greenSphere = new Sphere(Vec3(0, 3, 14), 6, greenSphereProperties); ObjectProperties *blueSphereProperties = new ObjectProperties(); blueSphereProperties->setColour({ MIN_COLOUR, MIN_COLOUR, MAX_COLOUR }); blueSphereProperties->setSpecularColor({ 0.0f, 0.0f, 0.0f }); blueSphereProperties->setPhongExponent(10000); Sphere *blueSphere = new Sphere(Vec3(4, -2, 7), 1, blueSphereProperties); ObjectProperties *whiteSphereProperties = new ObjectProperties(); whiteSphereProperties->setColour({ 0.3f, 0.3f, 0.3f }); whiteSphereProperties->setSpecularColor({ MAX_COLOUR, MAX_COLOUR, MAX_COLOUR }); // whiteSphereProperties->setSpecularColor({ MIN_COLOUR, MIN_COLOUR, MIN_COLOUR }); whiteSphereProperties->setPhongExponent(10000); ObjectProperties *darkSphereProperties = new ObjectProperties(); darkSphereProperties->setColour({ 0.3f, 0.3f, 0.3f }); darkSphereProperties->setSpecularColor({ MIN_COLOUR, MIN_COLOUR, MIN_COLOUR }); darkSphereProperties->setPhongExponent(10000); Dimension xLeft = -CAMERA_WIDTH / 2; Dimension xRight = CAMERA_WIDTH / 2; Dimension yBottom = -3; Dimension yTop = CAMERA_HEIGHT - 3; Dimension zFront = 1; Dimension zBack = 50; Quad *floor = new Quad({ xLeft, yBottom, zFront }, { xRight, yBottom, zFront }, { xRight, yBottom, zBack }, { xLeft, yBottom, zBack }, whiteSphereProperties); Quad *leftWall = new Quad({ xLeft, yBottom, zFront }, { xLeft, yBottom, zBack }, { xLeft, yTop, zBack }, { xLeft, yTop, zFront }, whiteSphereProperties); Quad *rightWall = new Quad({ xRight, yBottom, zFront }, { xRight, yTop, zFront }, { xRight, yTop, zBack }, { xRight, yBottom, zBack }, whiteSphereProperties); Quad *backWall = new Quad({ xLeft, yBottom, zBack }, { xRight, yBottom, zBack }, { xRight, yTop, zBack }, { xLeft, yTop, zBack }, whiteSphereProperties); Quad *ceiling = new Quad({ xLeft, yTop, zFront }, { xLeft, yTop, zBack }, { xRight, yTop, zBack }, { xRight, yTop, zFront }, whiteSphereProperties); PointLight light(Vec3(-5, 8, 4), { 0.6f, 0.6f, 0.6f }); PointLight light2(Vec3(0, 15, 4), { 0.4f, 0.4f, 0.4f }); PointLight light3(Vec3(14, 13, 2), { 0.5f, 0.5f, 0.5f }); Scene scene; scene.addObject(redSphere); scene.addObject(greenSphere); scene.addObject(blueSphere); scene.addObject(floor); // scene.addObject(leftWall); // scene.addObject(rightWall); // scene.addObject(backWall); // scene.addObject(ceiling); scene.addPointLight(&light); scene.addPointLight(&light2); // scene.addPointLight(&light3); std::clock_t start = clock(); raytracer.traceScene(scene, p_camera, image); std::clock_t finish = clock(); double duration = (finish - start) / (double)CLOCKS_PER_SEC; std::cout << "Time taken to raytrace scene: " << duration << " seconds." << std::endl; // Initialize glut glutInit(&argc, argv); glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glEnable(GL_DEPTH_TEST); glClearColor(0.0, 0.0, 0.0, 1.0); // Set up display window glutInitWindowSize(WIDTH, HEIGHT); glutInitWindowPosition(50, 50); glutCreateWindow("Display"); glutDisplayFunc(display); glutMainLoop(); return 0; }
void display(char* fileName) { Ray ray; long startTime = getMsTime(); long setupTime = 0; long fileWritingTime = 0; Intersection intersection; if (!state->drawn) { vector float negZero = (vector float) vec_splat_u32(-1); negZero = (vector float) vec_sl( (vector unsigned int) negZero, (vector unsigned int) negZero); VertexGroup raystart = VertexGroup(*state->camera); float xStepFactor __attribute__ ((aligned (16))) = 4.0 * params->screenXStep; float yStepFactor __attribute__ ((aligned (16))) = params->screenYStep; VertexGroup start = VertexGroup(*state->camera); VertexGroup xStep = VertexGroup(*state->xBasis * xStepFactor); VertexGroup yStep = VertexGroup(*state->yBasis * yStepFactor); VertexGroup screenCorner = VertexGroup((*state->screenCorner - *state->camera)); vector float offset = (vector float){0, .25, .5, .75}; VertexGroup pixel = xStep.madd(offset, screenCorner); BMP myImage; myImage.SetSize(params->xPixels, params->yPixels); myImage.SetBitDepth(24); VertexGroup rowPixel = VertexGroup(Vertex()); VertexGroup raydir = VertexGroup(Vertex()); vector bool int allHit = vec_cmplt(negativeZero(), vectorOne()); VectorSInt rInt; VectorSInt gInt; VectorSInt bInt; int xPixel_4 = params->xPixels / 4; setupTime = getMsTime() - startTime; for (int i = 0; i < params->yPixels; i++) { rowPixel = pixel; for (int j = 0; j < xPixel_4; j++) { raydir = rowPixel; raydir.normalize(); int index = i * xPixel_4 + j; ray.start = &raystart; ray.direction = &raydir; intersection.hit = allHit; raytracer.raytrace(&ray, &pixels[index], scene, params, intersection); // cout << "Raycasting (" << index << ") - (" << 4*j << "," << i << ")" << endl; rInt.vec = vec_cts(pixels[index].r, 8); gInt.vec = vec_cts(pixels[index].g, 8); bInt.vec = vec_cts(pixels[index].b, 8); for (int k = 0; k < 4; k++) { myImage(4*j+k,i)->Red = round(rInt.points[k]); myImage(4*j+k,i)->Green = round(gInt.points[k]); myImage(4*j+k,i)->Blue = round(bInt.points[k]); } rowPixel += xStep; } pixel -= yStep; } long fileStart = getMsTime(); myImage.WriteToFile(fileName); fileWritingTime = getMsTime() - fileStart; state->drawn = true; } long rayCasting = getMsTime() - startTime; long tracing = rayCasting - setupTime - fileWritingTime; cout << "Ray casting elapsed time: " << rayCasting << " ms (setup: " << setupTime << " ms, tracing: " << tracing << " ms, file writing: " << fileWritingTime << " ms)" << endl; }