kmVec3 CameraProxy::project_point(const RenderTarget& target, const Viewport &viewport, const kmVec3& point) { return camera()->project_point(target, viewport, point); }
int main(int argc, char *argv[]) #endif { #ifdef USE_SDL if (SDL_Init(SDL_INIT_VIDEO) < 0) { report("Can't initialize SDL\n"); return 1; } // TODO fullscreen + get screen size #ifndef __ANDROID__ width=height=700; #else width=320; height=480; // const SDL_VideoInfo* vinfo=SDL_GetVideoInfo(); // width = vinfo->current_w; // height = vinfo->current_h; // report("Detected %dx%d resolution.\n",width,height); #endif window = SDL_CreateWindow("Bezier Fragment Shader Demo", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_RESIZABLE | SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL); if(window == NULL) { report("Can't create window: %s\n", SDL_GetError()); return -1; } glcontext = SDL_GL_CreateContext(window); if(glcontext == NULL) { report("Can't create context: %s\n", SDL_GetError()); return -1; } SDL_GL_MakeCurrent(window, glcontext); SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); #else glutInit(&argc,argv); glutInitWindowSize(700,700); glutInitDisplayMode(GLUT_RGB|GLUT_DOUBLE|GLUT_DEPTH); glutCreateWindow("Bezier Fragment Shader Demo"); #endif glViewport(0, 0, width, height); glClearColor(0.0, 0.0, 0.0, 0.0); glEnable(GL_DEPTH_TEST); fshader = loadShader("bezier.glsl",GL_FRAGMENT_SHADER); vshader = loadShader("bezier-vertex.glsl",GL_VERTEX_SHADER); if (!(fshader&&vshader)) { report("One of shaders failed, aborting.\n"); return -1; } program = glCreateProgram(); glAttachShader(program, fshader); glAttachShader(program, vshader); glBindAttribLocation(program, 0, "vertexPos"); glBindAttribLocation(program, 1, "bezCoordsAttr"); glLinkProgram(program); glGetProgramiv(program, GL_LINK_STATUS, &linked); if (!linked) { report("Can't link the shader\n"); return -1; } glUseProgram(program); glUniform1i(glGetUniformLocation(program, "drawFill"), 1); glUniform1i(glGetUniformLocation(program, "useBezier"), 1); glUniform1i(glGetUniformLocation(program, "drawStroke"), 1); #ifndef __ANDROID__ // glEnableClientState(GL_VERTEX_ARRAY); // Why don't they work like glEnable(A|B) did before? or am I dumb? // glEnableClientState(GL_TEXTURE_COORD_ARRAY); #endif glEnableVertexAttribArray(0); glEnableVertexAttribArray(1); createShapes(); #ifdef USE_SDL int running = 1; timemark=SDL_GetTicks(); while(running) { if ( SDL_PollEvent(&event) > 0 ) { // SDL_WaitEvent(&event); switch(event.type) { case SDL_KEYDOWN: switch(event.key.keysym.sym) { #ifdef __ANDROID__ case SDLK_AC_SEARCH: #endif case SDLK_F1: performanceReport(); break; #ifndef __ANDROID__ case SDLK_ESCAPE: running = 0; break; #else case SDLK_AC_BACK: running = 0; break; #endif case SDLK_LEFT: Camera.beta += M_PI / 36; camera(); break; case SDLK_RIGHT: Camera.beta -= M_PI / 36; camera(); break; case SDLK_UP: Camera.alpha += M_PI / 36; camera(); break; case SDLK_DOWN: Camera.alpha -= M_PI / 36; camera(); break; default: keyb(event.key.keysym.scancode); break; } break; case SDL_MOUSEWHEEL: Camera.dist*= (event.wheel.y < 0)? 1.1 : 0.9; camera(); break; case SDL_MOUSEMOTION: if(event.motion.state == 1) motion(event.motion.xrel, event.motion.yrel); break; // Note, the first frame flickers, TODO workaround // TODO: track the real sequence of WINDOWEVENT_ENTER and WINDOWEVENT_SIZE_CHANGED events case SDL_WINDOWEVENT: if (event.window.event==SDL_WINDOWEVENT_SIZE_CHANGED) size(event.window.data1, event.window.data2); camera(); break; // handle touch events here case SDL_QUIT: running = 0; break; } } draw(); } performanceReport(); SDL_GL_MakeCurrent(NULL, NULL); SDL_GL_DeleteContext(glcontext); SDL_DestroyWindow(window); SDL_Quit(); #else glutReshapeFunc(size); glutDisplayFunc(draw); glutIdleFunc(draw); glutMouseFunc(mouse); glutMotionFunc(motion); glutSpecialFunc(keybs); glutKeyboardFunc(keyb); glutTimerFunc(TIMER_RESOLUTION,timer,0); glutMainLoop(); #endif return 0; }
void display (void) { glClearColor (0.0,0.0,0.0,1.0); //clear the screen to black glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //clear the color buffer and the depth buffer glMatrixMode( GL_MODELVIEW ); glColor4f(1.0f,0.0f,0.0f,1.0f); glEnable(GL_SCISSOR_TEST); glMatrixMode (GL_PROJECTION); //set the matrix to projection // Draw the primary view glViewport (0, 0, (GLsizei)screen_width, (GLsizei)screen_height); //set the viewport to the current window specifications glScissor(0, 0, (GLsizei)screen_width, (GLsizei)screen_height); glLoadIdentity (); if (lighting) { GLfloat AmbientLight[] = {0.1, 0.1, 0.2}; glLightfv (GL_LIGHT0, GL_AMBIENT, AmbientLight); GLfloat DiffuseLight[] = {1, 1, 1}; glLightfv (GL_LIGHT1, GL_DIFFUSE, DiffuseLight); GLfloat LightPosition[] = {xpos, ypos, zpos, 1}; glLightfv(GL_LIGHT1, GL_POSITION, LightPosition); } gluPerspective (60, (GLfloat)screen_width / (GLfloat)screen_height, 1.0, 100.0); gluLookAt (0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0); //camera position, x,y,z, looking at x,y,z, Up Positions of the camera camera(); glPushMatrix(); if (lighting) { texture1 = LoadTexture( "texture.raw", 256, 256 ); texture2 = LoadTexture( "/dev/urandom", 256, 256 ); texture3 = LoadTexture( "water.raw", 500, 375 ); texture4 = LoadTexture( "bubbles.raw", 200, 200 ); glEnable( GL_TEXTURE_2D ); //enable 2D texturing glEnable(GL_TEXTURE_GEN_S); //enable texture coordinate generation glEnable(GL_TEXTURE_GEN_T); if (fog) { GLfloat fogColor[4]= {0.5f, 1.0f, 0.5f, 1}; // Fog Color glFogi(GL_FOG_MODE, GL_EXP); // Fog Mode glFogfv(GL_FOG_COLOR, fogColor); // Set Fog Color glFogf(GL_FOG_DENSITY, 1.0f); // How Dense Will The Fog Be glHint(GL_FOG_HINT, GL_DONT_CARE); // Fog Hint Value glFogf(GL_FOG_START, n*space); // Fog Start Depth glFogf(GL_FOG_END,-n*space); // Fog End Depth } if (nurb) { glPushMatrix(); glRotatef(270,0.0,1.0,0.0); glTranslated(n*space/2,n*space/2,-n*space); GLfloat ctlpoints[4][4][3]; GLUnurbsObj *theNurb; GLfloat knots[8] = {0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0}; int u, v; for (u = 0; u < 4; u++) { for (v = 0; v < 4; v++) { ctlpoints[u][v][0] = 2.0*((GLfloat)u - 1.5); ctlpoints[u][v][1] = 2.0*((GLfloat)v - 1.5); if ( (u == 1 || u == 2) && (v == 1 || v == 2)) ctlpoints[u][v][2] = 3.0; else ctlpoints[u][v][2] = -3.0; } } theNurb = gluNewNurbsRenderer(); gluNurbsProperty(theNurb, GLU_SAMPLING_TOLERANCE, 25.0); gluNurbsProperty(theNurb, GLU_DISPLAY_MODE, GLU_FILL); gluBeginSurface(theNurb); gluNurbsSurface(theNurb, 8, knots, 8, knots, 4 * 3, 3, &ctlpoints[0][0][0], 4, 4, GL_MAP2_VERTEX_3); gluEndSurface(theNurb); glPopMatrix(); } } cube(); //call the cube drawing function glPopMatrix(); if (lighting) { FreeTexture( texture1 ); FreeTexture( texture2 ); FreeTexture( texture3 ); FreeTexture( texture4 ); } glPushMatrix(); glTranslated(space*((n/2)+1),space*(n/2),space*(n/2)); glutWireCube(space*n); glPopMatrix(); // Draw the secondary view glViewport (3*(screen_width/4), 3*(screen_height/4), screen_width/4, screen_width/4); //set the viewport to the current window specifications glScissor(3*(screen_width/4), 3*(screen_height/4), screen_width/4, screen_width/4); glLoadIdentity(); glOrtho(-1, 13, -1, 13, -1, 13); glRotatef(90,0.0,1.0,0.0); cube(); glDisable(GL_SCISSOR_TEST); glutSwapBuffers(); //swap the buffers }
MStatus HairToolContext::doPress( MEvent& event ) { // if we have a left mouse click if(event.mouseButton() == MEvent::kLeftMouse) { //Our Viewer m_View = M3dView::active3dView(); //Get Screen click position event.getPosition( m_storage[0], m_storage[1] ); screenPoints = vector<vec2>(); screenPoints.push_back(vec2(m_storage[0], m_storage[1])); //char buffer[200]; //sprintf(buffer, "print \"%i, %i\\n\"", m_storage[0], m_storage[1]); //MGlobal::executeCommand(buffer); //Camera stuff MPoint origin = MPoint(); MVector direction = MVector(); m_View.viewToWorld(m_storage[0], m_storage[1], origin, direction); //Iterate through meshes in scene bool intersection = false; MPointArray points = MPointArray(); MIntArray polygonIds = MIntArray(); MItDag dagIter = MItDag(MItDag::kBreadthFirst, MFn::kInvalid); for( ; !dagIter.isDone(); dagIter.next() ){ MDagPath dagPath; dagIter.getPath(dagPath); MFnDagNode dagNode( dagPath); //Object cannot be intermediate, it must be a mesh if( dagNode.isIntermediateObject() ) continue; if( !dagPath.hasFn(MFn::kMesh) ) continue; if( dagPath.hasFn(MFn::kTransform) ) continue; MGlobal::executeCommand(MString("print \"node is a mesh \\n\"")); //MFnMesh mesh = MFnMesh(dagPath); MFnMesh mesh(dagPath); points = MPointArray(); polygonIds = MIntArray(); intersection = mesh.intersect(origin, direction, points, 1e-010, MSpace::kWorld, &polygonIds); if(intersection){ break; } } if(intersection){ intersectionFound = true; MDagPath dagPath; dagIter.getPath(dagPath); // MFnMesh mesh = MFnMesh(dagPath); MFnMesh mesh(dagPath); //Polygon Normal MVector polygonNormal; mesh.getPolygonNormal(polygonIds[0], polygonNormal, MSpace::kWorld); if(polygonNormal.normal().angle(direction.normal()) < 20.0f){ //polygonNormal = mesh.get } //Camera Right m_View.getCamera(dagPath); MFnCamera camera(dagPath); MVector cameraRight = camera.rightDirection(MSpace::kWorld); //Resulting Plane //Point point = points[0]; //Normal normal = cameraRight^polygonNormal; //pushback point splinePoints = vector<MPoint>(); splinePoints.push_back(MPoint(points[0].x, points[0].y, points[0].z, points[0].w)); /*//Calculate Tvalue tValue = (points[0].x - origin.x)/direction.x;*/ } else{ intersectionFound = false; MGlobal::executeCommand("print \" No Intersection \\n\""); } // yay! return MS::kSuccess; } // just let the base class handle the event*/ return MPxContext::doPress(event); }
int main( int argc, char** argv ) { try { unsigned int id; std::string fields; std::string setattributes; unsigned int discard; boost::program_options::options_description description( "options" ); description.add_options() ( "help,h", "display help message" ) ( "set", boost::program_options::value< std::string >( &setattributes ), "set camera attributes as comma-separated name-value pairs and exit" ) ( "serial", boost::program_options::value< unsigned int >( &id )->default_value( 0 ), "camera serial; default: first available camera" ) ( "discard,d", "discard frames, if cannot keep up; same as --buffer=1" ) ( "buffer", boost::program_options::value< unsigned int >( &discard )->default_value( 0 ), "maximum buffer size before discarding frames" ) ( "fields,f", boost::program_options::value< std::string >( &fields )->default_value( "t,rows,cols,type" ), "header fields, possible values: t,rows,cols,type,size" ) ( "list-attributes", "output current camera attributes" ) ( "list-cameras", "list all cameras and exit" ) ( "verbose,v", "be more verbose" ) ( "header", "output header only" ) ( "no-header", "output image data only" ); boost::program_options::variables_map vm; boost::program_options::store( boost::program_options::parse_command_line( argc, argv, description), vm ); boost::program_options::parsed_options parsed = boost::program_options::command_line_parser(argc, argv).options( description ).allow_unregistered().run(); boost::program_options::notify( vm ); if( vm.count( "header" ) + vm.count( "no-header" ) > 1 ) { COMMA_THROW( comma::exception, "--header, and --no-header are mutually exclusive" ); } if ( vm.count( "help" ) ) { std::cerr << "acquire images from a point grey flycapture camera, same as flycapture-cat but using a callback " << std::endl; std::cerr << "instead of a thread to acquire the images" << std::endl; std::cerr << "output to stdout as serialized cv::Mat" << std::endl; std::cerr << "usage: flycapture-capture [<options>] [<filters>]" << std::endl; std::cerr << "known bug: freezes on slow consumers even with --discard, use flycapture-cat instead" << std::endl; std::cerr << description << std::endl; std::cerr << snark::cv_mat::filters::usage() << std::endl; return 1; } verbose = vm.count( "verbose" ); if( vm.count( "list-cameras" ) ) { const std::vector< unsigned int >& list = snark::camera::flycapture::list_camera_serials(); std::cerr << "got " << list.size() << " cameras." << std::endl; for( std::size_t i = 0; i < list.size(); ++i ) // todo: serialize properly with name-value { std::cout << snark::camera::flycapture::describe_camera(list[i]) << std::endl; } return 0; } if ( vm.count( "discard" ) ) { discard = 1; } discard_more_than = discard; snark::camera::flycapture::attributes_type attributes; if( vm.count( "set" ) ) { comma::name_value::map m( setattributes, ';', '=' ); attributes.insert( m.get().begin(), m.get().end() ); } if( verbose ) { std::cerr << "flycapture-cat: connecting..." << std::endl; } snark::camera::flycapture camera( id, attributes ); if( verbose ) { std::cerr << "flycapture-cat: connected to camera " << camera.id() << std::endl; } if( verbose ) { std::cerr << "flycapture-cat: total bytes per frame: " << camera.total_bytes_per_frame() << std::endl; } if( !attributes.empty() ) { return 0; } if( vm.count( "list-attributes" ) ) { attributes = camera.attributes(); // quick and dirty for( snark::camera::flycapture::attributes_type::const_iterator it = attributes.begin(); it != attributes.end(); ++it ) { if( it != attributes.begin() ) { std::cout << std::endl; } std::cout << it->first; if( it->second != "" ) { std::cout << '=' << it->second; } } return 0; } std::vector< std::string > v = comma::split( fields, "," ); comma::csv::format format; for( unsigned int i = 0; i < v.size(); ++i ) { if( v[i] == "t" ) { format += "t"; } else { format += "ui"; } } std::vector< std::string > filterStrings = boost::program_options::collect_unrecognized( parsed.options, boost::program_options::include_positional ); std::string filters; if( filterStrings.size() == 1 ) { filters = filterStrings[0]; } if( filterStrings.size() > 1 ) { COMMA_THROW( comma::exception, "please provide filters as name-value string" ); } boost::scoped_ptr< snark::cv_mat::serialization > serialization; if( vm.count( "no-header" ) ) { serialization.reset( new snark::cv_mat::serialization( "", format ) ); } else { serialization.reset( new snark::cv_mat::serialization( fields, format, vm.count( "header" ) ) ); } callback.reset( new snark::camera::flycapture::callback( camera, on_frame_ ) ); tbb::task_scheduler_init init; tbb::filter_t< void, Pair > read( tbb::filter::serial_in_order, boost::bind( read_, _1 ) ); tbb::filter_t< Pair, void > write( tbb::filter::serial_in_order, boost::bind( write_, boost::ref( *serialization), _1 ) ); tbb::filter_t< void, Pair > imageFilters = read; if( !filters.empty() ) { std::vector< snark::cv_mat::filter > cvMatFilters = snark::cv_mat::filters::make( filters ); for( std::size_t i = 0; i < cvMatFilters.size(); ++i ) { tbb::filter::mode mode = tbb::filter::serial_in_order; if( cvMatFilters[i].parallel ) { mode = tbb::filter::parallel; } tbb::filter_t< Pair, Pair > filter( mode, boost::bind( cvMatFilters[i].filter_function, _1 ) ); imageFilters = imageFilters & filter; } } if( verbose ) { std::cerr << "flycapture-cat: starting loop" << std::endl; } while( !is_shutdown && running ) { tbb::parallel_pipeline( init.default_num_threads(), imageFilters & write ); queue.wait(); } if( verbose ) { std::cerr << "flycapture-cat: exited loop" << std::endl; } if( is_shutdown && verbose ) { std::cerr << "flycapture-cat: caught signal" << std::endl; } return 0; } catch( std::exception& ex ) { std::cerr << "flycapture-cat: " << ex.what() << std::endl; } catch( ... ) { std::cerr << "flycapture-cat: unknown exception" << std::endl; } return 1; }
void Scene01::update(float deltaTime) { // ############################################################### // Make SuperScene do what it needs to do // - Escape key stops Scene // - Move Camera // ############################################################### SuperScene::update(deltaTime); SuperScene::moveCamera(deltaTime); // ############################################################### // Mouse cursor in screen coordinates // ############################################################### int mousex = input()->getMouseX(); int mousey = input()->getMouseY(); std::string cursortxt = "cursor ("; cursortxt.append(rt2d::to_string<int>(mousex)); cursortxt.append(","); cursortxt.append(rt2d::to_string<int>(mousey)); cursortxt.append(")"); text[9]->message(cursortxt); // ############################################################### // Rotate default_entity // ############################################################### default_entity->rotation -= 90 * DEG_TO_RAD * deltaTime; // 90 deg. per sec. if (default_entity->rotation < TWO_PI) { default_entity->rotation += TWO_PI; } // ############################################################### // alpha child1_entity + child2_entity // ############################################################### static float counter = 0; child1_entity->sprite()->color.a = std::abs(sin(counter)*255); child2_entity->sprite()->color.a = std::abs(cos(counter)*255); counter+=deltaTime/2; if (counter > TWO_PI) { counter = 0; } // ############################################################### // Animate animated_entity // ############################################################### animated_entity->rotation += 22.5 * DEG_TO_RAD * deltaTime; if (animated_entity->rotation > -TWO_PI) { animated_entity->rotation -= TWO_PI; } static int f = 0; if (f > 15) { f = 0; } animated_entity->sprite()->frame(f); if (t.seconds() > 0.25f) { static RGBAColor rgb = RED; animated_entity->sprite()->color = rgb; rgb = Color::rotate(rgb, 0.025f); f++; t.start(); } // ############################################################### // ui_element uvoffset // ############################################################### static float xoffset = 0.0f; xoffset += deltaTime / 2; if (input()->getKey( GLFW_KEY_SPACE )) { xoffset = 0.0f; } ui_element->sprite()->uvoffset.x = xoffset; ui_element->position = Point2(camera()->position.x + SWIDTH/2 - 150, camera()->position.y - SHEIGHT/2 + 20); }
int main() { if(glfwInit() == GL_FALSE) { std::cerr << "Failed to initialize GLFW" << std::endl; return -1; } defer(std::cout << "Calling glfwTerminate()" << std::endl; glfwTerminate()); glfwDefaultWindowHints(); glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); GLFWwindow* window = glfwCreateWindow(800, 600, "Rotating cube", nullptr, nullptr); if(window == nullptr) { std::cerr << "Failed to open GLFW window" << std::endl; return -1; } defer(std::cout << "Calling glfwDestroyWindow()" << std::endl; glfwDestroyWindow(window)); glfwMakeContextCurrent(window); if(glxwInit()) { std::cerr << "Failed to init GLXW" << std::endl; return -1; } glfwSwapInterval(1); glfwSetWindowSizeCallback(window, windowSizeCallback); glfwShowWindow(window); bool errorFlag = false; std::vector<GLuint> shaders; GLuint vertexShaderId = loadShader("shaders/vertexShader.glsl", GL_VERTEX_SHADER, &errorFlag); if(errorFlag) { std::cerr << "Failed to load vertex shader (invalid working directory?)" << std::endl; return -1; } shaders.push_back(vertexShaderId); GLuint fragmentShaderId = loadShader("shaders/fragmentShader.glsl", GL_FRAGMENT_SHADER, &errorFlag); if(errorFlag) { std::cerr << "Failed to load fragment shader (invalid working directory?)" << std::endl; return -1; } shaders.push_back(fragmentShaderId); GLuint programId = prepareProgram(shaders, &errorFlag); if(errorFlag) { std::cerr << "Failed to prepare program" << std::endl; return -1; } defer(glDeleteProgram(programId)); glDeleteShader(vertexShaderId); glDeleteShader(fragmentShaderId); GLuint vertexVBO; glGenBuffers(1, &vertexVBO); defer(glDeleteBuffers(1, &vertexVBO)); glBindBuffer(GL_ARRAY_BUFFER, vertexVBO); glBufferData(GL_ARRAY_BUFFER, sizeof(globVertexBufferData), globVertexBufferData, GL_STATIC_DRAW); GLuint colorVBO; glGenBuffers(1, &colorVBO); defer(glDeleteBuffers(1, &colorVBO)); glBindBuffer(GL_ARRAY_BUFFER, colorVBO); glBufferData(GL_ARRAY_BUFFER, sizeof(globColorBufferData), globColorBufferData, GL_STATIC_DRAW); GLuint vao; glGenVertexArrays(1, &vao); defer(glDeleteVertexArrays(1, &vao)); glBindVertexArray(vao); glBindBuffer(GL_ARRAY_BUFFER, vertexVBO); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr); glBindBuffer(GL_ARRAY_BUFFER, 0); // unbind VBO glBindBuffer(GL_ARRAY_BUFFER, colorVBO); glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, nullptr); glBindBuffer(GL_ARRAY_BUFFER, 0); // unbind VBO glBindVertexArray(0); // unbind VAO glm::mat4 projection = glm::perspective(80.0f, 4.0f / 3.0f, 0.3f, 100.0f); GLint matrixId = glGetUniformLocation(programId, "MVP"); auto startTime = std::chrono::high_resolution_clock::now(); auto prevTime = startTime; // hide cursor glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED); Camera camera(window, glm::vec3(0, 0, 5), 3.14f /* toward -Z */, 0.0f /* look at the horizon */); glEnable(GL_DOUBLEBUFFER); glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); glDepthFunc(GL_LESS); glClearColor(0, 0, 0, 1); while(glfwWindowShouldClose(window) == GL_FALSE) { if(glfwGetKey(window, GLFW_KEY_Q) == GLFW_PRESS) break; if(glfwGetKey(window, GLFW_KEY_Z) == GLFW_PRESS) { glPolygonMode(GL_FRONT_AND_BACK, GL_LINE); } if(glfwGetKey(window, GLFW_KEY_X) == GLFW_PRESS) { glPolygonMode(GL_FRONT_AND_BACK, GL_FILL); } auto currentTime = std::chrono::high_resolution_clock::now(); float startDeltaTimeMs = std::chrono::duration_cast<std::chrono::milliseconds>(currentTime - startTime).count(); float prevDeltaTimeMs = std::chrono::duration_cast<std::chrono::milliseconds>(currentTime - prevTime).count(); prevTime = currentTime; float rotationTimeMs = 3000.0f; float currentRotation = startDeltaTimeMs / rotationTimeMs; float angle = 360.0f*(currentRotation - (long)currentRotation); glm::mat4 view; camera.getViewMatrix(prevDeltaTimeMs, &view); glm::mat4 model = glm::rotate(angle, 0.0f, 1.0f, 0.0f); glm::mat4 mvp = projection * view * model; // matrix multiplication is the other way around glUniformMatrix4fv(matrixId, 1, GL_FALSE, &mvp[0][0]); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glUseProgram(programId); glBindVertexArray(vao); glEnableVertexAttribArray(0); // could be done once before while loop ... -> glEnableVertexAttribArray(1); glDrawArrays(GL_TRIANGLES, 0, 3*12); glDisableVertexAttribArray(1); // -> ... in this cast remove these two lines glDisableVertexAttribArray(0); glfwSwapBuffers(window); glfwPollEvents(); } return 0; }
void display() { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear the buffer glLoadIdentity(); current_frame++; // increment the current frame camera(); // get the up-vector, rider_point and lookat_point rider_point.x += 1.15*up_vector.x; // prop the camera up slightly rider_point.y += 1.15*up_vector.y; rider_point.z += 1.15*up_vector.z; lookat_point.x += up_vector.x; // make the lookat_point slightly higher lookat_point.y += up_vector.y; lookat_point.z += up_vector.z; gluLookAt(0, 0, 0, // ensure that the sky texture stays in place lookat_point.x - rider_point.x, lookat_point.y - rider_point.y, (lookat_point.z - rider_point.z), up_vector.x, up_vector.y, up_vector.z); glScalef(300, 300, 300); // scale it so that it fits the cube properly /*DRAW THE SKY*/ glBindTexture(GL_TEXTURE_2D, sky_texture); glDisable(GL_LIGHTING); glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); // use quads and make SIX faces like a cube glTexCoord2f(0, 0); glVertex3f(-1, -1, -1); glTexCoord2f(0, 1); glVertex3f(-1, 1, -1); glTexCoord2f(1, 1); glVertex3f(1, 1, -1); glTexCoord2f(1, 0); glVertex3f(1, -1, -1); glTexCoord2f(0, 0); glVertex3f(-1, -1, 1); glTexCoord2f(0, 1); glVertex3f(-1, 1, 1); glTexCoord2f(1, 1); glVertex3f(1, 1, 1); glTexCoord2f(1, 0); glVertex3f(1, -1, 1); glTexCoord2f(0, 0); glVertex3f(-1, 1, -1); glTexCoord2f(0, 1); glVertex3f(-1, 1, 1); glTexCoord2f(1, 1); glVertex3f(1, 1, 1); glTexCoord2f(1, 0); glVertex3f(1, 1, -1); glTexCoord2f(0, 0); glVertex3f(-1, -1, -1); glTexCoord2f(0, 1); glVertex3f(-1, -1, 1); glTexCoord2f(1, 1); glVertex3f(1, -1, 1); glTexCoord2f(1, 0); glVertex3f(1, -1, -1); glTexCoord2f(0, 0); glVertex3f(-1, -1, -1); glTexCoord2f(0, 1); glVertex3f(-1, -1, 1); glTexCoord2f(1, 1); glVertex3f(-1, 1, 1); glTexCoord2f(1, 0); glVertex3f(-1, 1, -1); glTexCoord2f(0, 0); glVertex3f(1, -1, -1); glTexCoord2f(0, 2); glVertex3f(1, -1, 1); glTexCoord2f(2, 2); glVertex3f(1, 1, 1); glTexCoord2f(2, 0); glVertex3f(1, 1, -1); glEnd(); glDisable(GL_TEXTURE_2D); glEnable(GL_LIGHTING); glLoadIdentity(); gluLookAt(rider_point.x, rider_point.y, rider_point.z, lookat_point.x, lookat_point.y, lookat_point.z, up_vector.x, up_vector.y, up_vector.z); // camera to look at where the coaster is going GLfloat light0_pos[] = {-45, -45, 15, 1}; // set up the two different lights GLfloat light1_pos[] = {45, 45, 15, 1}; // second light glLightfv(GL_LIGHT0, GL_POSITION, light0_pos); glLightfv(GL_LIGHT1, GL_POSITION, light1_pos); glDisable(GL_LIGHTING); /*DRAW TRACK*/ glLineWidth(18); // make the width of the lines 18 glBegin(GL_LINES); // use lines for (int i=0; i<g_iNumOfSplines; i++) { // do it for the two splines in track.txt int l = g_Splines[i].numControlPoints; for (int j=0; j<l; j++) { glColor3f(10.0*i,10.0,10.0*j); // color it differently depending which spline you're on draw(&(g_Splines[i].points[j%l]), // call the draw function &(g_Splines[i].points[(j+1)%l]), &(g_Splines[i].points[(j+2)%l]), &(g_Splines[i].points[(j+3)%l])); } } glEnd(); glEnable(GL_LIGHTING); /*DRAW GROUND*/ glBindTexture(GL_TEXTURE_2D, ground_texture); glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); // use quads for (int i =-400; i<400; i+=30) { // will cover large area for (int j=-400; j<400; j+=30) { glTexCoord2f(0, 0); glVertex3f(i, j, -10); // four points for quads glTexCoord2f(0, 2); glVertex3f(i, j+30, -10); // make it a little lower than 0-plane glTexCoord2f(2, 2); glVertex3f(i+30, j+30, -10); glTexCoord2f(2, 0); glVertex3f(i+30, j, -10); } } glEnd(); glDisable(GL_TEXTURE_2D); glLoadIdentity(); glutSwapBuffers(); }
void CoronaRenderer::defineCamera() { MPoint rot, pos, scale; for(int objId = 0; objId < this->mtco_scene->camList.size(); objId++) { mtco_MayaObject *cam = (mtco_MayaObject *)this->mtco_scene->camList[objId]; if( !this->mtco_scene->isCameraRenderable(cam->mobject) && (!(cam->dagPath == this->mtco_scene->uiCamera))) { continue; } logger.debug(MString("using camera ") + cam->shortName); MFnCamera camera(cam->mobject); MPoint pos, rot, scale; MMatrix camMatrix = cam->transformMatrices[0] * this->mtco_renderGlobals->globalConversionMatrix; getMatrixComponents(camMatrix, pos, rot, scale); Corona::Pos cpos(pos.x, pos.y, pos.z); float focusDistance = 0.0; float fStop = 0.0; float focalLength = 35.0f; bool dof; float horizontalFilmAperture, verticalFilmAperture; float coi = 100.0f; getFloat(MString("horizontalFilmAperture"), camera, horizontalFilmAperture); getFloat(MString("verticalFilmAperture"), camera, verticalFilmAperture); getFloat(MString("focalLength"), camera, focalLength); getBool(MString("depthOfField"), camera, dof); getFloat(MString("focusDistance"), camera, focusDistance); getFloat(MString("fStop"), camera, fStop); getFloat(MString("centerOfInterest"), camera, coi); focusDistance *= this->mtco_renderGlobals->scaleFactor; MPoint coiBase(0,0,-coi); MPoint coiTransform = coiBase * camMatrix; //logger.debug(MString("Center of interest: ") + coi + " transformed " + coiTransform.x + " " + coiTransform.y + " " + coiTransform.z); Corona::Pos center(coiTransform.x, coiTransform.y, coiTransform.z); float fov = 2.0 * atan((horizontalFilmAperture * 0.5f) / (focalLength * 0.03937)); float fovDeg = fov * 57.29578; Corona::AnimatedFloat fieldOfView(fov); //logger.debug(MString("fov ") + fov + " deg: " + fovDeg); //Corona::AnimatedFloat fieldOfView(Corona::DEG_TO_RAD(45.f)); Corona::CameraData cameraData; //cameraData.type cameraData.createPerspective(Corona::AnimatedPos(cpos), Corona::AnimatedPos(center), Corona::AnimatedDir(Corona::Dir::UNIT_Z), fieldOfView); Corona::AnimatedFloat focalDist(focusDistance); cameraData.perspective.focalDist = focalDist; cameraData.perspective.fStop = fStop; cameraData.perspective.filmWidth = this->mtco_renderGlobals->toMillimeters(horizontalFilmAperture * 2.54f * 10.0f); //film width in mm if( dof && this->mtco_renderGlobals->doDof) cameraData.perspective.useDof = true; if (getBoolAttr("mtco_useBokeh", camera, false)) { cameraData.perspective.bokeh.use = true; cameraData.perspective.bokeh.blades = getIntAttr("mtco_blades", camera, 6); cameraData.perspective.bokeh.bladesRotation = getIntAttr("mtco_bladeRotation", camera, 0.0); MPlug bokehBitMapPlug = camera.findPlug("mtco_bokehBitmap"); if (!bokehBitMapPlug.isNull()) { if (bokehBitMapPlug.isConnected()) { MObject bitmapNode = getConnectedInNode(bokehBitMapPlug); if (bitmapNode.hasFn(MFn::kFileTexture)) { MFnDependencyNode bitMapFn(bitmapNode); MPlug texNamePlug = bitMapFn.findPlug("fileTextureName"); if (!texNamePlug.isNull()) { MString fileName = texNamePlug.asString(); logger.debug(MString("Found bokeh bitmap file: ") + fileName); Corona::Bitmap<Corona::Rgb> bokehBitmap; Corona::loadImage(fileName.asChar(), bokehBitmap); cameraData.perspective.bokeh.customShape = bokehBitmap; } } } } } this->context.scene->getCamera() = cameraData; } }
int main(int argc, char** argv) { /*Polynomial2 poly2; poly2.kuu = -1; poly2.kuv = 1; poly2.kvv= -1; poly2.ku = 0.25; poly2.kv = 0.25; poly2.k1 = 5; CurveRasterizer<Polynomial2> raster(1, 1, -100, 100, poly2); CurveRasterizer2<Polynomial2> raster2(1, 1, -100, 100, poly2); auto tr0 = clock(); int x1 = 0; int x2 = 0; for (int i = 0; i < 10000000; i++) { raster.step(); x1 += raster.x; } auto tr1 = clock(); for (int i = 0; i < 10000000; i++) { raster2.step(); x2 += raster2.x; } auto tr2 = clock(); cout << "optimized " << double(tr1 - tr0) / CLOCKS_PER_SEC << endl; cout << "simple " << double(tr2 - tr1) / CLOCKS_PER_SEC << endl; cout << x1 << " " << x2 << endl; return 0;*/ ifstream paramFile(argv[1]); if (not paramFile.is_open()) { cout << argv[1] << " : ERROR, file is not found" << endl; return 0; } array<double, 6> params; cout << "EU Camera model parameters :" << endl; for (auto & p: params) { paramFile >> p; cout << setw(10) << p; } cout << endl; paramFile.ignore(); array<double, 6> cameraPose; cout << "Camera pose wrt the robot :" << endl; for (auto & e: cameraPose) { paramFile >> e; cout << setw(10) << e; } cout << endl; paramFile.ignore(); Transformation<double> TbaseCamera(cameraPose.data()); array<double, 6> robotPose1, robotPose2; SGMParameters stereoParams; stereoParams.verbosity = 3; stereoParams.salientPoints = false; paramFile >> stereoParams.u0; paramFile >> stereoParams.v0; paramFile >> stereoParams.dispMax; paramFile >> stereoParams.scale; paramFile.ignore(); string imageDir; getline(paramFile, imageDir); string imageInfo, imageName; getline(paramFile, imageInfo); istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose1) imageStream >> x; Mat8u img1 = imread(imageDir + imageName, 0); stereoParams.uMax = img1.cols; stereoParams.vMax = img1.rows; stereoParams.setEqualMargin(); int counter = 2; EnhancedCamera camera(params.data()); while (getline(paramFile, imageInfo)) { istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose2) imageStream >> x; Transformation<double> T01(robotPose1.data()), T02(robotPose2.data()); Transformation<double> TleftRight = T01.compose(TbaseCamera).inverseCompose(T02.compose(TbaseCamera)); Mat8u img2 = imread(imageDir + imageName, 0); EnhancedSGM stereo(TleftRight, &camera, &camera, stereoParams); DepthMap depth; auto t2 = clock(); stereo.computeStereo(img1, img2, depth); auto t3 = clock(); cout << double(t3 - t2) / CLOCKS_PER_SEC << endl; Mat32f dMat; depth.toInverseMat(dMat); // imwrite(imageDir + "res" + to_string(counter++) + ".png", depth*200); imwrite(imageDir + "res" + to_string(counter++) + ".png", dMat*30); } return 0; }
void GLViewer::draw() { #ifdef YADE_GL2PS if(!nextFrameSnapshotFilename.empty() && boost::algorithm::ends_with(nextFrameSnapshotFilename,".pdf")){ gl2psStream=fopen(nextFrameSnapshotFilename.c_str(),"wb"); if(!gl2psStream){ int err=errno; throw runtime_error(string("Error opening file ")+nextFrameSnapshotFilename+": "+strerror(err)); } LOG_DEBUG("Start saving snapshot to "<<nextFrameSnapshotFilename); size_t nBodies=Omega::instance().getScene()->bodies->size(); int sortAlgo=(nBodies<100 ? GL2PS_BSP_SORT : GL2PS_SIMPLE_SORT); gl2psBeginPage(/*const char *title*/"Some title", /*const char *producer*/ "Yade", /*GLint viewport[4]*/ NULL, /*GLint format*/ GL2PS_PDF, /*GLint sort*/ sortAlgo, /*GLint options*/GL2PS_SIMPLE_LINE_OFFSET|GL2PS_USE_CURRENT_VIEWPORT|GL2PS_TIGHT_BOUNDING_BOX|GL2PS_COMPRESS|GL2PS_OCCLUSION_CULL|GL2PS_NO_BLENDING, /*GLint colormode*/ GL_RGBA, /*GLint colorsize*/0, /*GL2PSrgba *colortable*/NULL, /*GLint nr*/0, /*GLint ng*/0, /*GLint nb*/0, /*GLint buffersize*/4096*4096 /* 16MB */, /*FILE *stream*/ gl2psStream, /*const char *filename*/NULL); } #endif qglviewer::Vec vd=camera()->viewDirection(); renderer->viewDirection=Vector3r(vd[0],vd[1],vd[2]); if(Omega::instance().getScene()){ const shared_ptr<Scene>& scene=Omega::instance().getScene(); int selection = selectedName(); if(selection!=-1 && (*(Omega::instance().getScene()->bodies)).exists(selection) && isMoving){ static Real lastTimeMoved(0); float v0,v1,v2; manipulatedFrame()->getPosition(v0,v1,v2); if(last == selection) // delay by one redraw, so the body will not jump into 0,0,0 coords { Quaternionr& q = (*(Omega::instance().getScene()->bodies))[selection]->state->ori; Vector3r& v = (*(Omega::instance().getScene()->bodies))[selection]->state->pos; Vector3r& vel = (*(Omega::instance().getScene()->bodies))[selection]->state->vel; Vector3r& angVel = (*(Omega::instance().getScene()->bodies))[selection]->state->angVel; angVel=Vector3r::Zero(); Real dt=(scene->time-lastTimeMoved); lastTimeMoved=scene->time; if (dt!=0) { vel[0]=-(v[0]-v0)/dt; vel[1]=-(v[1]-v1)/dt; vel[2]=-(v[2]-v2)/dt;} else vel[0]=vel[1]=vel[2]=0; //FIXME: should update spin like velocity above, when the body is rotated: double q0,q1,q2,q3; manipulatedFrame()->getOrientation(q0,q1,q2,q3); q.x()=q0;q.y()=q1;q.z()=q2;q.w()=q3; } (*(Omega::instance().getScene()->bodies))[selection]->userForcedDisplacementRedrawHook(); } if(manipulatedClipPlane>=0){ assert(manipulatedClipPlane<renderer->numClipPlanes); float v0,v1,v2; manipulatedFrame()->getPosition(v0,v1,v2); double q0,q1,q2,q3; manipulatedFrame()->getOrientation(q0,q1,q2,q3); Se3r newSe3(Vector3r(v0,v1,v2),Quaternionr(q0,q1,q2,q3)); newSe3.orientation.normalize(); const Se3r& oldSe3=renderer->clipPlaneSe3[manipulatedClipPlane]; FOREACH(int planeId, boundClipPlanes){ if(planeId>=renderer->numClipPlanes || !renderer->clipPlaneActive[planeId] || planeId==manipulatedClipPlane) continue; Se3r& boundSe3=renderer->clipPlaneSe3[planeId]; Quaternionr relOrient=oldSe3.orientation.conjugate()*boundSe3.orientation; relOrient.normalize(); Vector3r relPos=oldSe3.orientation.conjugate()*(boundSe3.position-oldSe3.position); boundSe3.position=newSe3.position+newSe3.orientation*relPos; boundSe3.orientation=newSe3.orientation*relOrient; boundSe3.orientation.normalize(); } renderer->clipPlaneSe3[manipulatedClipPlane]=newSe3; } scene->renderer=renderer; renderer->render(scene, selectedName()); }
int main(int argc, char **argv) { sf::RenderWindow window(sf::VideoMode(600, 800), "The Floor Is Lava!"); //window.setVerticalSyncEnabled(true); window.setFramerateLimit(60); Camera camera(608, 800, 1); camera.MoveTowards(0.0f, -1.0f); std::unique_ptr<sf::Texture> lava(new sf::Texture); std::unique_ptr<sf::Texture> solid(new sf::Texture); std::unique_ptr<sf::Texture> player(new sf::Texture); sf::Font font; sf::Text score; if(!font.loadFromFile("Assets/Roboto-Regular.ttf")) { std::cerr << "Unable to load font!" << std::endl; } score.setString("0"); score.setFont(font); score.setCharacterSize(50); sf::View gui_view; gui_view.setSize(608, 800); gui_view.setCenter(300, 400); lava->loadFromFile("Assets/lava.png"); solid->loadFromFile("Assets/solid.png"); player->loadFromFile("Assets/player.png"); Tile lava_sprite(lava, Tile::LAVA); Tile solid_sprite(solid, Tile::SOLID); Player player_sprite(player, camera); //sf::Sprite lava_sprite; //sf::Sprite solid_sprite; //lava_sprite.setTexture(lava); //solid_sprite.setTexture(solid); //solid_sprite.move(sf::Vector2f(32, 0)); while (window.isOpen()) { sf::Event event; while (window.pollEvent(event)) { if (event.type == sf::Event::Closed) { window.close(); } if(event.type == sf::Event::KeyPressed) { if (player_sprite.IsAlive()){ switch (event.key.code) { case sf::Keyboard::Left: player_sprite.Move(-32.0f, 0.0f); break; case sf::Keyboard::Right: player_sprite.Move(32.0f, 0.0f); break; case sf::Keyboard::Up: player_sprite.Move(0.0f, -32.0f); break; case sf::Keyboard::Down: player_sprite.Move(0.0f, 32.0f); } } } } window.clear(sf::Color::Black); window.setView(camera.GetView()); if (player_sprite.IsAlive()) { camera.Update(); player_sprite.Update(); score.setString(std::to_string(player_sprite.GetScore())); } window.draw(lava_sprite.GetSprite()); window.draw(solid_sprite.GetSprite()); window.draw(player_sprite.GetPlayer()); window.setView(gui_view); window.draw(score); window.display(); } return 0; }
int main(int argc, char ** argv[]) { Display display(800, 600, "TSBK07 Level of Detail on Terrain"); Basic_Shader base_shader("./shaders/space"); Phong_Shader phong("./shaders/phong"); Texture texture("./textures/dirt.tga"); Camera camera(glm::vec3(0, 1, 0), 70.0f, display.GetAspectRation(), 0.01f, 1000.0f); Terrain terr("./textures/terrain2.jpg", "./textures/terrain2.jpg"); Skybox sky; sky.SkyboxInit("./textures/skybox/", "back.jpg", "front.jpg", "left.jpg", "right.jpg", "top.jpg", "bottom.jpg"); Transform transform; Keyboard keyboard; Mouse mouse; float counter = 0.0f; Mesh monkey("./models/monkey3.obj"); Mesh box("./models/box.obj"); std::cout << "init complete" << std::endl; bool wireframe = true; bool lock = false; while (!display.IsClosed()) { display.Clear(1, 0, 1, 1); SDL_Event e; while (SDL_PollEvent(&e)) { if (e.type == SDL_QUIT) { display.HandleEvent(e); } if (e.type == SDL_MOUSEMOTION || e.type == SDL_MOUSEBUTTONDOWN || e.type == SDL_MOUSEBUTTONUP) { mouse.HandleEvent(e, camera); } } const Uint8* currentKeyStates = SDL_GetKeyboardState(NULL); keyboard.HandleEvent(currentKeyStates, camera); sky.Draw(transform, camera); if (currentKeyStates[SDL_SCANCODE_B]) { lock = !lock; } if (currentKeyStates[SDL_SCANCODE_F]) { wireframe = !wireframe; } terr.Draw(transform, camera, lock, wireframe); display.Update(); counter += 0.001f; } return 0; }
int main( int argc, char** argv ) { try { std::string config_string; std::string fields; std::string strobe_string; unsigned int discard; boost::program_options::options_description description( "options" ); description.add_options() ( "help,h", "display help message" ) ( "verbose,v", "more output; --help --verbose: more help message" ) ( "list", "list cameras on the bus with guids" ) ( "list-attributes", "output current camera attributes" ) ( "discard,d", "discard frames, if cannot keep up; same as --buffer=1" ) ( "config,c", boost::program_options::value< std::string >( &config_string ), "configuration file for the camera or semicolon-separated name=value string, see long help for details" ) ( "buffer", boost::program_options::value< unsigned int >( &discard )->default_value( 0 ), "maximum buffer size before discarding frames, default: unlimited" ) ( "fields,f", boost::program_options::value< std::string >( &fields )->default_value( "t,rows,cols,type" ), "header fields, possible values: t,rows,cols,type,size" ) ( "header", "output header only" ) ( "no-header", "output image data only" ) ( "strobe", boost::program_options::value< std::string >( &strobe_string ), "strobe control" ); boost::program_options::variables_map vm; boost::program_options::store( boost::program_options::parse_command_line( argc, argv, description), vm ); boost::program_options::parsed_options parsed = boost::program_options::command_line_parser(argc, argv).options( description ).allow_unregistered().run(); boost::program_options::notify( vm ); if( vm.count( "header" ) + vm.count( "no-header" ) > 1 ) { COMMA_THROW( comma::exception, "--header and --no-header are mutually exclusive" ); } if( vm.count( "fields" ) && vm.count( "no-header" ) > 1 ) { COMMA_THROW( comma::exception, "--fields and --no-header are mutually exclusive" ); } if ( vm.count( "help" ) || vm.count( "verbose" ) ) { std::cerr << "acquire images from a firewire camera using libdc1394 and output them to std::out in OpenCV format" << std::endl; std::cerr << "Usage: fire-cat [options] [<filters>]\n" << std::endl; std::cerr << "output header format: fields: t,cols,rows,type; binary: t,3ui\n" << std::endl; std::cerr << description << std::endl; std::cerr << "strobe: " << std::endl; std::cerr << " parameters: " << std::endl; std::cerr << " pin: GPIO pin for strobe (its direction should be set to 'Out', use point-grey utility if necessary)" << std::endl; std::cerr << " polarity: low/high" << std::endl; std::cerr << " delay: delay after start of exposure until the strobe signal asserts" << std::endl; std::cerr << " duration: duration of the strobe signal" << std::endl; std::cerr << " note: delay and duration are given in ticks of the 1.024MHz clock (needs to be confirmed)" << std::endl; std::cerr << " optional commands (by default strobe will be on while the camera is in use): " << std::endl; std::cerr << " on: turn strobe on and exit" << std::endl; std::cerr << " off: turn strobe off and exit" << std::endl; std::cerr << " examples: " << std::endl; std::cerr << " --strobe=\"pin=2;polarity=high;delay=4095;duration=4095\"" << std::endl; std::cerr << " --strobe=\"on;pin=2\"" << std::endl; std::cerr << " --strobe=\"off;pin=2\"" << std::endl; std::cerr << " default parameters: \"pin=0;polarity=high;delay=0;duration=0\"" << std::endl; std::cerr << std::endl; std::cerr << "examples:" << std::endl; std::cerr << "\tview all 3 bumblebee cameras: fire-cat --config=bumblebee.config \"split;bayer=4;resize=640,1440;view\" > /dev/null" << std::endl; std::cerr << "\tview all 6 ladybug cameras: fire-cat --config=ladybug.config \"bayer=1;resize=808,3696;transpose;view\" > /dev/null" << std::endl; std::cerr << std::endl; if ( vm.count( "verbose" ) ) { std::cerr << snark::cv_mat::filters::usage() << std::endl; std::cerr << std::endl << "config file options:" << std::endl; std::cerr << "\tvideo-mode: dc1394 video mode" << std::endl; std::cerr << "\toperation-mode: dc1394 operation mode" << std::endl; std::cerr << "\tiso-speed: dc1394 iso speed" << std::endl; std::cerr << "\tframe-rate: dc1394 frame rate" << std::endl; std::cerr << "\tshutter: camera shutter speed (absolute)" << std::endl; std::cerr << "\tgain: camera gain (absolute)" << std::endl; std::cerr << "\trelative-shutter: camera shutter speed (relative)" << std::endl; std::cerr << "\trelative-gain: camera gain (relative)" << std::endl; std::cerr << "\t(shutter and gain work as a pair, a non-zero shutter activates its corresponding gain)" << std::endl; std::cerr << "\texposure: camera exposure" << std::endl; std::cerr << "\twidth and height default to 0, meaning no ROI is used" << std::endl; std::cerr << "\twidth: format7 image width (default 0 : maximum width in given video-mode)" << std::endl; std::cerr << "\theight: format7 image height (default 0 : maximum height in given video-mode)" << std::endl; std::cerr << "\tleft: format7 horizontal offset from left, must have non-zero width and height (default 0)" << std::endl; std::cerr << "\ttop: format7 vertical offset from top, must have non-zero width height (default 0)" << std::endl; std::cerr << "\tpacket-size: format7 data packet size (default 0 : maximum available)" << std::endl; std::cerr << "\tdeinterlace: splits one RAW/MONO16 image into 2 8bit mono images (default false)" << std::endl; std::cerr << std::endl << "allowed video modes, use coriander to see what your camera supports: " << std::endl; snark::camera::print_video_modes(); std::cerr << std::endl << "allowed operation modes: " << std::endl; snark::camera::print_operation_modes(); std::cerr << std::endl << "allowed iso speeds: " << std::endl; snark::camera::print_iso_speeds(); std::cerr << std::endl << "allowed frame rates (note: format7 ignores this, use packet-size to control format7 framerate): " << std::endl; snark::camera::print_frame_rates(); std::cerr << std::endl << "allowed color codings for format7 op modes: " << std::endl; snark::camera::print_color_coding(); std::cerr << std::endl << "config file example( --config=\"filename:bumblebee2\" ):" << std::endl; std::cerr << "{ " << std::endl; std::cerr << " \"bumblebee2\": " << std::endl; std::cerr << " { " << std::endl; std::cerr << " \"video-mode\": DC1394_VIDEO_MODE_FORMAT7_3, " << std::endl; std::cerr << " \"operation-mode\": DC1394_OPERATION_MODE_1394B, " << std::endl; std::cerr << " \"iso-speed\": DC1394_ISO_SPEED_400, " << std::endl; std::cerr << " \"frame-rate\": DC1394_FRAMERATE_240, " << std::endl; std::cerr << " \"color-coding\": DC1394_COLOR_CODING_RAW16, " << std::endl; std::cerr << " \"shutter\": 0.000075, " << std::endl; std::cerr << " \"gain\": 5, " << std::endl; std::cerr << " \"packet-size\": 1536, " << std::endl; std::cerr << " \"deinterlace\": 1, " << std::endl; std::cerr << " \"guid\": 49712223535733607 " << std::endl; std::cerr << " } " << std::endl; std::cerr << "} " << std::endl; std::cerr << std::endl; std::cerr << std::endl << "default values:" << std::endl; // todo comma::write_json< snark::camera::dc1394::config >( snark::camera::dc1394::config(), std::cerr ); std::cerr << std::endl; } return 1; } if ( vm.count( "list" ) ) { snark::camera::dc1394::list_cameras(); return 1; } if ( vm.count( "discard" ) ) { discard = 1; } std::vector< std::string > v = comma::split( fields, "," ); comma::csv::format format; for( unsigned int i = 0; i < v.size(); ++i ) { if( v[i] == "t" ) { format += "t"; } else { format += "ui"; } } std::vector< std::string > filterStrings = boost::program_options::collect_unrecognized( parsed.options, boost::program_options::include_positional ); std::string filters; if( filterStrings.size() == 1 ) { filters = filterStrings[0]; } if( filterStrings.size() > 1 ) { COMMA_THROW( comma::exception, "please provide filters as name-value string" ); } boost::scoped_ptr< snark::cv_mat::serialization > serialization; if( vm.count( "no-header" ) ) { serialization.reset( new snark::cv_mat::serialization( "", format ) ); } else { serialization.reset( new snark::cv_mat::serialization( fields, format, vm.count( "header" ) ) ); } if( config_string.empty() ) { std::cerr << name() << ": --config is not given" << std::endl; return 1; } snark::camera::dc1394::config config; bool config_from_command_line = config_string.find_first_of( '=' ) != std::string::npos; // quick and dirty if( config_from_command_line ) { config = comma::name_value::parser( ';', '=' ).get< snark::camera::dc1394::config >( config_string ); } else { std::vector< std::string > v = comma::split( config_string, ':' ); if( v.size() > 2 ) { std::cerr << name() << ": expected --config=filename or --config=filename:xpath, got '" << config_string << "'" << std::endl; return 1; } std::string filename = v[0]; std::string xpath = ( v.size() == 1 ) ? "" : v[1]; try { config = comma::read< snark::camera::dc1394::config >( filename, xpath.c_str() ); } catch(...) { config = comma::read_ini< snark::camera::dc1394::config >( filename, xpath.c_str() ); } } snark::camera::dc1394::strobe strobe; bool trigger_strobe_and_exit = false; if( vm.count( "strobe" ) ) { std::vector< std::string > v = comma::split( strobe_string, ';' ); if( v.empty() ) { std::cerr << name() << ": strobe parameters are not given (e.g. --strobe=\"pin=2\")" << std::endl; return 1; } if( v[0] == "on" || v[0] == "off" ) { trigger_strobe_and_exit = true; v[0] = "command=" + v[0]; } else { v.push_back( "command=auto" ); } strobe = comma::name_value::parser( ';', '=' ).get< snark::camera::dc1394::strobe >( comma::join< std::vector< std::string > >( v, ';' ) ); } snark::camera::dc1394 camera( config, strobe ); if( trigger_strobe_and_exit ) { return 0; } if( vm.count( "list-attributes" ) ) { camera.list_attributes(); return 0; } reader.reset( new snark::tbb::bursty_reader< Pair >( boost::bind( &capture, boost::ref( camera ) ), discard ) ); snark::imaging::applications::pipeline pipeline( *serialization, filters, *reader ); pipeline.run(); return 0; } catch( std::exception& ex ) { std::cerr << argv[0] << ": " << ex.what() << std::endl; } catch( ... ) { std::cerr << argv[0] << ": unknown exception" << std::endl; } return 1; }
void GlobalMap::requestOverlayTextureUpdate(int x, int y, int width, int height, osg::ref_ptr<osg::Texture2D> texture, bool clear, bool cpuCopy, float srcLeft, float srcTop, float srcRight, float srcBottom) { osg::ref_ptr<osg::Camera> camera (new osg::Camera); camera->setNodeMask(Mask_RenderToTexture); camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF); camera->setViewMatrix(osg::Matrix::identity()); camera->setProjectionMatrix(osg::Matrix::identity()); camera->setProjectionResizePolicy(osg::Camera::FIXED); camera->setRenderOrder(osg::Camera::PRE_RENDER); y = mHeight - y - height; // convert top-left origin to bottom-left camera->setViewport(x, y, width, height); if (clear) { camera->setClearMask(GL_COLOR_BUFFER_BIT); camera->setClearColor(osg::Vec4(0,0,0,0)); } else camera->setClearMask(GL_NONE); camera->setUpdateCallback(new CameraUpdateGlobalCallback(this)); camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT, osg::Camera::PIXEL_BUFFER_RTT); camera->attach(osg::Camera::COLOR_BUFFER, mOverlayTexture); // no need for a depth buffer camera->setImplicitBufferAttachmentMask(osg::DisplaySettings::IMPLICIT_COLOR_BUFFER_ATTACHMENT); if (cpuCopy) { // Attach an image to copy the render back to the CPU when finished osg::ref_ptr<osg::Image> image (new osg::Image); image->setPixelFormat(mOverlayImage->getPixelFormat()); image->setDataType(mOverlayImage->getDataType()); camera->attach(osg::Camera::COLOR_BUFFER, image); ImageDest imageDest; imageDest.mImage = image; imageDest.mX = x; imageDest.mY = y; mPendingImageDest.push_back(imageDest); } // Create a quad rendering the updated texture if (texture) { osg::ref_ptr<osg::Geometry> geom = createTexturedQuad(srcLeft, srcTop, srcRight, srcBottom); osg::ref_ptr<osg::Depth> depth = new osg::Depth; depth->setWriteMask(0); osg::StateSet* stateset = geom->getOrCreateStateSet(); stateset->setAttribute(depth); stateset->setTextureAttributeAndModes(0, texture, osg::StateAttribute::ON); stateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF); stateset->setMode(GL_DEPTH_TEST, osg::StateAttribute::OFF); osg::ref_ptr<osg::Geode> geode = new osg::Geode; geode->addDrawable(geom); camera->addChild(geode); } mRoot->addChild(camera); mActiveCameras.push_back(camera); }
int main(void) { GLFWwindow* window; // The OpenGL context creation code is in // ../common/util.cpp window = init("Hello Heightmap", 640, 480); if(!window) { return -1; } // Hide the cursor (escape will exit the application) glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED); // We will need to enable depth testing, so that OpenGL draws further // vertices first glEnable(GL_DEPTH_TEST); // Draw wireframe glPolygonMode(GL_FRONT_AND_BACK, GL_LINE); // Enable backface culling glEnable(GL_CULL_FACE); glCullFace(GL_BACK); glFrontFace(GL_CCW); // front facing vertices are defined counter clock wise // Create the model matrix model = glm::mat4(); // Rotate just a bit (the vector indicates the axes on which to rotate) model = glm::rotate(model, -glm::radians(35.0f), glm::vec3(0.0f, 1.0f, 1.0f)); // Set the camera Camera camera(CAMERA_PERSPECTIVE, 45.0f, 0.1f, 1000.0f, 640.0f, 480.0f); camera.setPosition(0.0f, 0.0f, -3.0f); setCamera(&camera); // The camera updating is handled in ../common/util.cpp // Load the heightmap HeightMap map(20.0f); if(!map.load("heightmap.bmp")) { return -1; } const std::vector<float>& data = map.getData(); int w = map.getWidth(), h = map.getHeight(); std::vector<float> vertices; for(int i = 0; i < h; ++i) { for(int j = 0; j < w; ++j) { float x = (float)i; float z = (float)j; float height = data[i * w + j]; vertices.push_back(x * SIZE); vertices.push_back(height); vertices.push_back(z * SIZE); // This is where you could add extra information, // like colour or texture coordinates. // You would have to change the vertex attribute pointer // code as well! } } std::vector<GLuint> indices; for(int i = 0; i < (h - 1); ++i) { for(int j = 0; j < (w - 1); ++j) { // We create six indices for each tile indices.push_back(i * w + j); indices.push_back((i + 1) * w + j); indices.push_back(i * w + j + 1); indices.push_back(i * w + j + 1); indices.push_back((i + 1) * w + j); indices.push_back((i + 1) * w + j + 1); } } // We start by creating a vertex and fragment shader // from the above strings GLuint vertex = createShader(VERTEX_SRC, GL_VERTEX_SHADER); if(!vertex) { return -1; } GLuint fragment = createShader(FRAGMENT_SRC, GL_FRAGMENT_SHADER); if(!fragment) { return -1; } // Now we must make a shader program: this program // contains both the vertex and the fragment shader GLuint program = createShaderProgram(vertex, fragment); if(!program) { return -1; } // We link the program, just like your C compiler links // .o files bool result = linkShader(program); if(!result) { return -1; } // We make sure the shader is validated result = validateShader(program); if(!result) { return -1; } // Detach and delete the shaders, because we no longer need them glDetachShader(program, vertex); glDeleteShader(vertex); glDetachShader(program, fragment); glDeleteShader(fragment); glUseProgram(program); // Set this as the current shader program // We now create the data to send to the GPU GLuint vao; glGenVertexArrays(1, &vao); glBindVertexArray(vao); GLuint vbo; glGenBuffers(1, &vbo); // Upload the vertices to the buffer glBindBuffer(GL_ARRAY_BUFFER, vbo); glBufferData(GL_ARRAY_BUFFER, sizeof(float) * vertices.size(), &vertices[0], GL_STATIC_DRAW); GLuint ebo; glGenBuffers(1, &ebo); // Upload the indices to the buffer glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo); glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indices.size(), &indices[0], GL_STATIC_DRAW); // Enable the vertex attributes and upload their data (see: layout(location=x)) glEnableVertexAttribArray(0); // position glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), 0); // We have now successfully created a drawable Vertex Array Object // Set the clear color to a light grey glClearColor(0.75f, 0.75f, 0.75f, 1.0f); while(!glfwWindowShouldClose(window)) { if(glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) { break; } updateCamera(640, 480, window); // Clear (note the addition of GL_DEPTH_BUFFER_BIT) glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Upload the MVP matrices GLint modelUL = glGetUniformLocation(program, "model"); glUniformMatrix4fv(modelUL, 1, GL_FALSE, glm::value_ptr(model)); GLint viewUL = glGetUniformLocation(program, "view"); glUniformMatrix4fv(viewUL, 1, GL_FALSE, glm::value_ptr(camera.getView())); // This can be moved out of the loop because it rarely changes GLint projUL = glGetUniformLocation(program, "projection"); glUniformMatrix4fv(projUL, 1, GL_FALSE, glm::value_ptr(camera.getProjection())); // The VAO is still bound so just draw the vertices glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0); // Tip: if nothing is drawn, check the return value of glGetError and google it // Swap buffers to show current image on screen (for more information google 'backbuffer') glfwSwapBuffers(window); glfwPollEvents(); } // Clean up glDeleteBuffers(1, &vbo); glDeleteBuffers(1, &ebo); glDeleteVertexArrays(1, &vao); glfwTerminate(); return 0; }
asf::auto_release_ptr<asr::Project> build_project() { // Create an empty project. asf::auto_release_ptr<asr::Project> project(asr::ProjectFactory::create("test project")); project->get_search_paths().push_back("data"); // Add default configurations to the project. project->add_default_configurations(); // Set the number of samples. This is basically the quality parameter: the higher the number // of samples, the smoother the image but the longer the rendering time. project->configurations() .get_by_name("final")->get_parameters() .insert_path("generic_tile_renderer.min_samples", "25") .insert_path("generic_tile_renderer.max_samples", "25"); // Create a scene. asf::auto_release_ptr<asr::Scene> scene(asr::SceneFactory::create()); // Create an assembly. asf::auto_release_ptr<asr::Assembly> assembly( asr::AssemblyFactory::create( "assembly", asr::ParamArray())); //------------------------------------------------------------------------ // Materials //------------------------------------------------------------------------ // Create a color called "gray" and insert it into the assembly. static const float GrayReflectance[] = { 0.5f, 0.5f, 0.5f }; assembly->colors().insert( asr::ColorEntityFactory::create( "gray", asr::ParamArray() .insert("color_space", "srgb"), asr::ColorValueArray(3, GrayReflectance))); // Create a BRDF called "diffuse_gray_brdf" and insert it into the assembly. assembly->bsdfs().insert( asr::LambertianBRDFFactory().create( "diffuse_gray_brdf", asr::ParamArray() .insert("reflectance", "gray"))); // Create a physical surface shader and insert it into the assembly. assembly->surface_shaders().insert( asr::PhysicalSurfaceShaderFactory().create( "physical_surface_shader", asr::ParamArray())); // Create a material called "gray_material" and insert it into the assembly. assembly->materials().insert( asr::MaterialFactory::create( "gray_material", asr::ParamArray() .insert("surface_shader", "physical_surface_shader") .insert("bsdf", "diffuse_gray_brdf"))); //------------------------------------------------------------------------ // Geometry //------------------------------------------------------------------------ // Load the scene geometry from disk. asr::MeshObjectArray objects; asr::MeshObjectReader::read( project->get_search_paths(), "cube", asr::ParamArray() .insert("filename", "scene.obj"), objects); // Insert all the objects into the assembly. for (size_t i = 0; i < objects.size(); ++i) { // Insert this object into the scene. asr::MeshObject* object = objects[i]; assembly->objects().insert(asf::auto_release_ptr<asr::Object>(object)); // Create an instance of this object and insert it into the assembly. const std::string instance_name = std::string(object->get_name()) + "_inst"; assembly->object_instances().insert( asr::ObjectInstanceFactory::create( instance_name.c_str(), asr::ParamArray(), object->get_name(), asf::Transformd(asf::Matrix4d::identity()), asf::StringDictionary() .insert("default", "gray_material") .insert("default2", "gray_material"))); } //------------------------------------------------------------------------ // Light //------------------------------------------------------------------------ // Create a color called "light_exitance" and insert it into the assembly. static const float LightExitance[] = { 1.0f, 1.0f, 1.0f }; assembly->colors().insert( asr::ColorEntityFactory::create( "light_exitance", asr::ParamArray() .insert("color_space", "srgb") .insert("multiplier", "30.0"), asr::ColorValueArray(3, LightExitance))); // Create a point light called "light" and insert it into the assembly. asf::auto_release_ptr<asr::Light> light( asr::PointLightFactory().create( "light", asr::ParamArray() .insert("exitance", "light_exitance"))); light->set_transform(asf::Transformd( asf::Matrix4d::translation(asf::Vector3d(0.6, 2.0, 1.0)))); assembly->lights().insert(light); // Create an instance of the assembly and insert it into the scene. asf::auto_release_ptr<asr::AssemblyInstance> assembly_instance( asr::AssemblyInstanceFactory::create( "assembly_inst", asr::ParamArray(), "assembly")); assembly_instance ->transform_sequence() .set_transform( 0.0, asf::Transformd(asf::Matrix4d::identity())); scene->assembly_instances().insert(assembly_instance); // Insert the assembly into the scene. scene->assemblies().insert(assembly); //------------------------------------------------------------------------ // Environment //------------------------------------------------------------------------ // Create a color called "sky_exitance" and insert it into the scene. static const float SkyExitance[] = { 0.75f, 0.80f, 1.0f }; scene->colors().insert( asr::ColorEntityFactory::create( "sky_exitance", asr::ParamArray() .insert("color_space", "srgb") .insert("multiplier", "0.5"), asr::ColorValueArray(3, SkyExitance))); // Create an environment EDF called "sky_edf" and insert it into the scene. scene->environment_edfs().insert( asr::ConstantEnvironmentEDFFactory().create( "sky_edf", asr::ParamArray() .insert("exitance", "sky_exitance"))); // Create an environment shader called "sky_shader" and insert it into the scene. scene->environment_shaders().insert( asr::EDFEnvironmentShaderFactory().create( "sky_shader", asr::ParamArray() .insert("environment_edf", "sky_edf"))); // Create an environment called "sky" and bind it to the scene. scene->set_environment( asr::EnvironmentFactory::create( "sky", asr::ParamArray() .insert("environment_edf", "sky_edf") .insert("environment_shader", "sky_shader"))); //------------------------------------------------------------------------ // Camera //------------------------------------------------------------------------ // Create a pinhole camera with film dimensions 0.980 x 0.735 in (24.892 x 18.669 mm). asf::auto_release_ptr<asr::Camera> camera( asr::PinholeCameraFactory().create( "camera", asr::ParamArray() .insert("film_dimensions", "0.024892 0.018669") .insert("focal_length", "0.035"))); // Place and orient the camera. By default cameras are located in (0.0, 0.0, 0.0) // and are looking toward Z- (0.0, 0.0, -1.0). camera->transform_sequence().set_transform( 0.0, asf::Transformd( asf::Matrix4d::rotation(asf::Vector3d(1.0, 0.0, 0.0), asf::deg_to_rad(-20.0)) * asf::Matrix4d::translation(asf::Vector3d(0.0, 0.8, 11.0)))); // Bind the camera to the scene. scene->set_camera(camera); //------------------------------------------------------------------------ // Frame //------------------------------------------------------------------------ // Create a frame and bind it to the project. project->set_frame( asr::FrameFactory::create( "beauty", asr::ParamArray() .insert("camera", scene->get_camera()->get_name()) .insert("resolution", "640 480") .insert("color_space", "srgb"))); // Bind the scene to the project. project->set_scene(scene); return project; }
void SliceRenderer3D::updateResult(DataContainer& data) { std::cout << "Entering updateResult of SliceRenderer3D " << std::endl; ImageRepresentationGL::ScopedRepresentation img(data, p_sourceImageID.getValue()); ScopedTypedData<CameraData> camera(data, p_camera.getValue()); if (img != nullptr && camera != nullptr) { if (img->getDimensionality() == 3) { const cgt::Camera& cam = camera->getCamera(); // Creating the slice proxy geometry works as follows: // Create the cube proxy geometry for the volume, then clip the cube against the slice plane. // The closing face is the slice proxy geometry. // This is probably not the fastest, but an elegant solution, which also supports arbitrary slice orientations. :) cgt::Bounds volumeExtent = img->getParent()->getWorldBounds(); std::unique_ptr<MeshGeometry> cube = GeometryDataFactory::createCube(volumeExtent, cgt::Bounds(cgt::vec3(0.f), cgt::vec3(1.f))); cgt::vec3 normal(0.f, 0.f, 0.f); float p = 0.0f; switch (p_sliceOrientation.getOptionValue()) { case XY_PLANE: normal = cgt::vec3(0.f, 0.f, 1.f); p = img->getParent()->getMappingInformation().getOffset().z + (p_sliceNumber.getValue() * img->getParent()->getMappingInformation().getVoxelSize().z); break; case XZ_PLANE: normal = cgt::vec3(0.f, 1.f, 0.f); p = img->getParent()->getMappingInformation().getOffset().y + (p_sliceNumber.getValue() * img->getParent()->getMappingInformation().getVoxelSize().y); break; case YZ_PLANE: normal = cgt::vec3(1.f, 0.f, 0.f); p = img->getParent()->getMappingInformation().getOffset().x + (p_sliceNumber.getValue() * img->getParent()->getMappingInformation().getVoxelSize().x); break; } MeshGeometry clipped = cube->clipAgainstPlane(p, normal, true); const FaceGeometry& slice = clipped.getFaces().back(); // the last face is the closing face glEnable(GL_DEPTH_TEST); _shader->activate(); _shader->setIgnoreUniformLocationError(true); _shader->setUniform("_viewportSizeRCP", 1.f / cgt::vec2(getEffectiveViewportSize())); _shader->setUniform("_projectionMatrix", cam.getProjectionMatrix()); _shader->setUniform("_viewMatrix", cam.getViewMatrix()); cgt::TextureUnit inputUnit, tfUnit; img->bind(_shader, inputUnit); p_transferFunction.getTF()->bind(_shader, tfUnit); FramebufferActivationGuard f*g(this); createAndAttachColorTexture(); createAndAttachDepthTexture(); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); slice.render(GL_TRIANGLE_FAN); _shader->deactivate(); cgt::TextureUnit::setZeroUnit(); glDisable(GL_DEPTH_TEST); data.addData(p_targetImageID.getValue(), new RenderData(_fbo)); } else { LERROR("Input image must have dimensionality of 3."); } } else { LDEBUG("No suitable input image found."); } std::cout << "Exiting updateResult of SliceRenderer3D " << std::endl; }
/*! Initializes the menu manager. */ void GameView::initializeMenuManager() { m_MenuManager = new MenuManager(camera(), this); connect(m_MenuManager, SIGNAL(newGame()), this, SLOT(loadLevel())); }
int main(){ TempSettings gamesettings; gamesettings.mapw = 10; gamesettings.maph = 6; gamesettings.mapx = 0; gamesettings.mapy = 0; gamesettings.mapmidx = gamesettings.mapw/2.0; gamesettings.mapmidy = gamesettings.maph/2.0; gamesettings.window_width = 1300; gamesettings.window_height = 800; // initialize window, renderer, textures if (SDL_Init(SDL_INIT_EVERYTHING) != 0){ std::cerr << "SDL_Init error: " << SDL_GetError() << std::endl; return 1; } if (TTF_Init() != 0){ std::cerr << "TTF_Init error: " << SDL_GetError() << std::endl; SDL_Quit(); return 1; } SDL_Window* window = SDL_CreateWindow("deathblade_floating", 0, 0, gamesettings.window_width, gamesettings.window_height, SDL_WINDOW_SHOWN); if (window == nullptr){ std::cerr << "SDL_CreateWindow error: " << SDL_GetError() << std::endl; SDL_Quit(); return 1; } SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED); if (renderer == nullptr){ std::cerr << "SDL_CreateRenderer error: " << SDL_GetError() << std::endl; SDL_DestroyWindow(window); SDL_Quit(); return 1; } std::string resource_path = getResourcePath(""); std::string charfile = resource_path + "initialcharacter.png"; std::string bgfile = resource_path + "initialbackgroundtile.png"; std::string starfile = resource_path + "star.png"; std::string wallfile = resource_path + "wall.png"; SDL_Texture* character_texture = IMG_LoadTexture(renderer, charfile.c_str()); SDL_Texture* bgtile_texture = IMG_LoadTexture(renderer, bgfile.c_str()); SDL_Texture* star_texture = IMG_LoadTexture(renderer, starfile.c_str()); SDL_Texture* wall_texture = IMG_LoadTexture(renderer,wallfile.c_str()); if (character_texture == nullptr || bgtile_texture == nullptr || star_texture == nullptr || wall_texture == nullptr){ std::cerr << "IMG_LoadTexture error: " << SDL_GetError() << std::endl; SDL_DestroyTexture(character_texture); SDL_DestroyTexture(bgtile_texture); SDL_DestroyRenderer(renderer); SDL_DestroyWindow(window); SDL_Quit(); return 1; } std::string fontfile = resource_path + "sample.ttf"; TTF_Font* font = TTF_OpenFont(fontfile.c_str(), 15); if (font == NULL){ std::cerr << "TTF_OpenFont error: " << SDL_GetError() << std::endl; } CameraControl camera(&gamesettings); ObjectController objects; DeveloperConsoleClass console(&gamesettings); console.add_controller(&console); console.add_controller(&camera); const double tilew = 0.5; const double tileh = 0.5; double mousex = gamesettings.mapmidx; double mousey = gamesettings.mapmidy; int mousepx = gamesettings.window_width/2; int mousepy = gamesettings.window_height/2; double wallthickness = 0.1; TextureWall bottomwall, topwall, leftwall, rightwall; bottomwall.x = gamesettings.mapw/2; bottomwall.y = gamesettings.maph+wallthickness/2; bottomwall.setTexture(wall_texture,gamesettings.mapw,wallthickness); objects.add_object(&bottomwall); topwall.x = gamesettings.mapw/2; topwall.y = -wallthickness/2; topwall.setTexture(wall_texture,gamesettings.mapw,wallthickness); objects.add_object(&topwall); leftwall.x = -wallthickness/2; leftwall.y = gamesettings.maph/2; leftwall.setTexture(wall_texture,wallthickness,gamesettings.maph); objects.add_object(&leftwall); rightwall.x = gamesettings.mapw + wallthickness/2; rightwall.y = gamesettings.maph/2; rightwall.setTexture(wall_texture,wallthickness,gamesettings.maph); objects.add_object(&rightwall); Player human; human.x = 5; human.y = 5; human.dx = -0.025; human.dy = -0.03; human.setTexture(character_texture, 0.05, 0.05); objects.add_object(&human); // map x [0, 10] // map y [0, 6] // star width 0.256 std::vector<vec2d> star_positions = { vec2d(6,4), vec2d(3,4.1), vec2d(9,0.2), vec2d(1,0.4), vec2d(2,2.5), vec2d(3,2.5), vec2d(9,4.9), vec2d(0.2,5.1), vec2d(4.1,4.1) }; std::vector<double> star_thetas = { 0, 45, 15, 60, 85, 4, 50, 66, 31 }; std::vector<Star*> star_field; for(unsigned int i = 0; i < star_positions.size(); ++i){ Star* newstar = new Star(); newstar->x = star_positions[i].x; newstar->y = star_positions[i].y; newstar->setTexture(star_texture, 0.256, 0.256); if(i < star_thetas.size()) newstar->rotate(star_thetas[i]*3.14159265359/180.0); star_field.push_back(newstar); objects.add_object(star_field[i]); } bool rightmouse_down = false; SDL_Event event; bool quitnow = false; Uint32 fps_lastframe = SDL_GetTicks(); while(!quitnow){ int zoomdirection = 0; while(SDL_PollEvent(&event)){ if (console.is_active()){ if (event.type == SDL_KEYDOWN){ switch(event.key.keysym.sym){ case SDLK_BACKQUOTE: console.toggle(); break; case SDLK_BACKSPACE: console.backspace(); break; case SDLK_RETURN: case SDLK_RETURN2: console.enter(); break; case SDLK_UP: console.goback_inhistory(); break; case SDLK_DOWN: console.goforward_inhistory(); break; default: break; } console.render_current_command(renderer); } else if (event.type == SDL_TEXTINPUT && event.text.text[0] != '`'){ console.addinput(event.text.text); console.render_current_command(renderer); } else if (event.type == SDL_MOUSEBUTTONDOWN){ if (event.button.button == SDL_BUTTON_LEFT){ if(!console.mouse_grab(true, event.button.x, event.button.y)) camera.mousecontrol_on(); } //if (event.button.button == SDL_BUTTON_RIGHT) } else if (event.type == SDL_MOUSEBUTTONUP){ if (event.button.button == SDL_BUTTON_LEFT){ console.mouse_grab(false, -1,-1); camera.mousecontrol_off(); } } else if (event.type == SDL_MOUSEMOTION){ mousepx = event.motion.x; mousepy = event.motion.y; console.handle_mouse(event.motion.xrel, event.motion.yrel); if (camera.mouse_controlling()){ camera.mousecontrol_move(mousepx, mousepy, event.motion.xrel, event.motion.yrel,SDL_GetModState()==KMOD_LCTRL); } else{ mousex = camera.xfrompixel(event.motion.x, event.motion.y, db::Player); mousey = camera.yfrompixel(event.motion.x, event.motion.y, db::Player); } } else if (event.type == SDL_MOUSEWHEEL){ if(!console.scroll(event.wheel.y, mousepx, mousepy)) zoomdirection += event.wheel.y; } else if (event.type == SDL_QUIT){ quitnow = true; } continue; } // if console is not up if (event.type == SDL_KEYDOWN){ switch(event.key.keysym.sym){ case SDLK_ESCAPE: quitnow = true; break; case SDLK_BACKQUOTE: console.toggle(); break; case SDLK_t: if (!camera.is_tracking()) camera.track_object(&(human.x), &(human.y)); else camera.stop_tracking(); break; case SDLK_w: case SDLK_UP: camera.pan_updown(-1); break; case SDLK_a: case SDLK_LEFT: camera.pan_leftright(-1); break; case SDLK_s: case SDLK_DOWN: camera.pan_updown(1); break; case SDLK_d: case SDLK_RIGHT: camera.pan_leftright(1); break; default: break; } } else if (event.type == SDL_KEYUP){ switch(event.key.keysym.sym){ case SDLK_w: case SDLK_UP: camera.pan_updown(0); break; case SDLK_a: case SDLK_LEFT: camera.pan_leftright(0); break; case SDLK_s: case SDLK_DOWN: camera.pan_updown(0); break; case SDLK_d: case SDLK_RIGHT: camera.pan_leftright(0); break; default: break; } } else if (event.type == SDL_MOUSEBUTTONDOWN){ if (event.button.button == SDL_BUTTON_LEFT){ camera.mousecontrol_on(); } else if (event.button.button == SDL_BUTTON_RIGHT){ rightmouse_down = true; human.bound.xclick = camera.xfrompixel(event.button.x,event.button.y,db::Player); human.bound.yclick = camera.yfrompixel(event.button.x,event.button.y,db::Player); human.bound.xdrag = 0; human.bound.ydrag = 0; human.bound.enabled = true; } } else if (event.type == SDL_MOUSEBUTTONUP){ if (event.button.button == SDL_BUTTON_LEFT){ camera.mousecontrol_off(); } else if (event.button.button == SDL_BUTTON_RIGHT){ rightmouse_down = false; } } else if (event.type == SDL_MOUSEWHEEL){ zoomdirection += event.wheel.y; } else if (event.type == SDL_MOUSEMOTION){ mousepx = event.motion.x; mousepy = event.motion.y; if (camera.mouse_controlling()){ camera.mousecontrol_move(mousepx, mousepy, event.motion.xrel, event.motion.yrel,SDL_GetModState()==KMOD_LCTRL); } else{ mousex = camera.xfrompixel(event.motion.x, event.motion.y, db::Player); mousey = camera.yfrompixel(event.motion.x, event.motion.y, db::Player); if(mousepx <= 1) camera.pan_leftright(-1); else if (mousepx >= (int)gamesettings.window_width-1) camera.pan_leftright(1); else if (mousepx - event.motion.xrel <= 1) camera.pan_leftright(0); else if (mousepx - event.motion.xrel >= (int)gamesettings.window_width-1) camera.pan_leftright(0); if(mousepy <= 1) camera.pan_updown(-1); else if (mousepy >= (int)gamesettings.window_height-1) camera.pan_updown(1); else if (mousepy - event.motion.yrel <= 1) camera.pan_updown(0); else if (mousepy - event.motion.yrel >= (int)gamesettings.window_height-1) camera.pan_updown(0); if(rightmouse_down){ human.bound.xdrag += event.motion.xrel; human.bound.ydrag += event.motion.yrel; } } } else if (event.type == SDL_QUIT){ quitnow = true; } } objects.step_time(); SDL_SetRenderDrawColor(renderer, 0,0,0,255); SDL_RenderClear(renderer); camera.adjust_zoom(zoomdirection, mousex, mousey); for (double x = gamesettings.mapx+tilew/2; x < gamesettings.mapx+gamesettings.mapw+tilew/2; x += tilew){ for (double y = gamesettings.mapy+tileh/2; y < gamesettings.mapy+gamesettings.maph+tileh/2; y += tileh){ SDL_Rect dst = camera.calculate_display_destination(x,y,tilew,tileh,db::Floor); SDL_RenderCopyEx(renderer, bgtile_texture, NULL, &dst, -camera.camyaw*180.0/3.14156033, NULL, SDL_FLIP_NONE); } } objects.drawon(renderer, &camera); if(console.is_active()) console.drawon(renderer); human.bound.drawon(renderer, &camera); Uint32 fps_newframe = SDL_GetTicks(); if((fps_newframe-fps_lastframe) < SCREEN_TICKS_PER_FRAME){ SDL_Delay(SCREEN_TICKS_PER_FRAME - (fps_newframe-fps_lastframe)); } draw_fps(renderer, font, 1.0/(fps_newframe/1000.0 - fps_lastframe/1000.0)); fps_lastframe = fps_newframe; SDL_RenderPresent(renderer); } SDL_DestroyTexture(character_texture); SDL_DestroyTexture(bgtile_texture); SDL_DestroyRenderer(renderer); SDL_DestroyWindow(window); SDL_Quit(); for(unsigned int i = 0; i < star_field.size(); ++i) delete star_field[i]; return 0; }
// Drawing void Scene::draw(GLWidget * dest) const { camera()->loadMatrix(dest); Container::draw(dest); camera()->unloadMatrix(); }
int main(int argc, char** argv) { // Initialize some global data Aria::init(); // If you want ArLog to print "Verbose" level messages uncomment this: //ArLog::init(ArLog::StdOut, ArLog::Verbose); // This object parses program options from the command line ArArgumentParser parser(&argc, argv); // Load some default values for command line arguments from /etc/Aria.args // (Linux) or the ARIAARGS environment variable. parser.loadDefaultArguments(); // Central object that is an interface to the robot and its integrated // devices, and which manages control of the robot by the rest of the program. ArRobot robot; // Object that connects to the robot or simulator using program options ArRobotConnector robotConnector(&parser, &robot); // If the robot has an Analog Gyro, this object will activate it, and // if the robot does not automatically use the gyro to correct heading, // this object reads data from it and corrects the pose in ArRobot ArAnalogGyro gyro(&robot); // Connect to the robot, get some initial data from it such as type and name, // and then load parameter files for this robot. if (!robotConnector.connectRobot()) { // Error connecting: // if the user gave the -help argumentp, then just print out what happened, // and continue so options can be displayed later. if (!parser.checkHelpAndWarnUnparsed()) { ArLog::log(ArLog::Terse, "Could not connect to robot, will not have parameter file so options displayed later may not include everything"); } // otherwise abort else { ArLog::log(ArLog::Terse, "Error, could not connect to robot."); Aria::logOptions(); Aria::exit(1); } } if(!robot.isConnected()) { ArLog::log(ArLog::Terse, "Internal error: robot connector succeeded but ArRobot::isConnected() is false!"); } // Connector for laser rangefinders ArLaserConnector laserConnector(&parser, &robot, &robotConnector); // Connector for compasses ArCompassConnector compassConnector(&parser); // Parse the command line options. Fail and print the help message if the parsing fails // or if the help was requested with the -help option if (!Aria::parseArgs() || !parser.checkHelpAndWarnUnparsed()) { Aria::logOptions(); Aria::exit(1); return 1; } // Used to access and process sonar range data ArSonarDevice sonarDev; // Used to perform actions when keyboard keys are pressed ArKeyHandler keyHandler; Aria::setKeyHandler(&keyHandler); // ArRobot contains an exit action for the Escape key. It also // stores a pointer to the keyhandler so that other parts of the program can // use the same keyhandler. robot.attachKeyHandler(&keyHandler); printf("You may press escape to exit\n"); // Attach sonarDev to the robot so it gets data from it. robot.addRangeDevice(&sonarDev); // Start the robot task loop running in a new background thread. The 'true' argument means if it loses // connection the task loop stops and the thread exits. robot.runAsync(true); // Connect to the laser(s) if lasers were configured in this robot's parameter // file or on the command line, and run laser processing thread if applicable // for that laser class. For the purposes of this demo, add all // possible lasers to ArRobot's list rather than just the ones that were // connected by this call so when you enter laser mode, you // can then interactively choose which laser to use from that list of all // lasers mentioned in robot parameters and on command line. Normally, // only connected lasers are put in ArRobot's list. if (!laserConnector.connectLasers( false, // continue after connection failures false, // add only connected lasers to ArRobot true // add all lasers to ArRobot )) { printf("Could not connect to lasers... exiting\n"); Aria::exit(2); } /* not needed, robot connector will do it by default if (!sonarConnector.connectSonars( false, // continue after connection failures false, // add only connected lasers to ArRobot true // add all lasers to ArRobot )) { printf("Could not connect to sonars... exiting\n"); Aria::exit(2); } */ // Create and connect to the compass if the robot has one. ArTCM2 *compass = compassConnector.create(&robot); if(compass && !compass->blockingConnect()) { compass = NULL; } // Sleep for a second so some messages from the initial responses // from robots and cameras and such can catch up ArUtil::sleep(1000); // We need to lock the robot since we'll be setting up these modes // while the robot task loop thread is already running, and they // need to access some shared data in ArRobot. robot.lock(); // now add all the modes for this demo // these classes are defined in ArModes.cpp in ARIA's source code. if(robot.getOrigRobotConfig()->getHasGripper()) new ArModeGripper(&robot, "gripper", 'g', 'G'); else ArLog::log(ArLog::Normal, "Robot does not indicate that it has a gripper."); ArModeActs actsMode(&robot, "acts", 'a', 'A'); ArModeTCM2 tcm2(&robot, "tcm2", 'm', 'M', compass); ArModeIO io(&robot, "io", 'i', 'I'); ArModeConfig cfg(&robot, "report robot config", 'o' , 'O'); ArModeCommand command(&robot, "command", 'd', 'D'); ArModeCamera camera(&robot, "camera", 'c', 'C'); ArModePosition position(&robot, "position", 'p', 'P', &gyro); ArModeSonar sonar(&robot, "sonar", 's', 'S'); ArModeBumps bumps(&robot, "bumps", 'b', 'B'); ArModeLaser laser(&robot, "laser", 'l', 'L'); ArModeWander wander(&robot, "wander", 'w', 'W'); ArModeUnguardedTeleop unguardedTeleop(&robot, "unguarded teleop", 'u', 'U'); ArModeTeleop teleop(&robot, "teleop", 't', 'T'); // activate the default mode teleop.activate(); // turn on the motors robot.comInt(ArCommands::ENABLE, 1); robot.unlock(); // Block execution of the main thread here and wait for the robot's task loop // thread to exit (e.g. by robot disconnecting, escape key pressed, or OS // signal) robot.waitForRunExit(); Aria::exit(0); return 0; }
int main (int argc, char * argv[]) { ULogger::setType(ULogger::kTypeConsole); ULogger::setLevel(ULogger::kInfo); ULogger::setPrintTime(false); ULogger::setPrintWhere(false); // parse arguments float rate = 0.0; std::string inputDatabase; int driver = 0; int odomType = rtabmap::Parameters::defaultOdomFeatureType(); bool icp = false; bool flow = false; bool mono = false; int nnType = rtabmap::Parameters::defaultOdomBowNNType(); float nndr = rtabmap::Parameters::defaultOdomBowNNDR(); float distance = rtabmap::Parameters::defaultOdomInlierDistance(); int maxWords = rtabmap::Parameters::defaultOdomMaxFeatures(); int minInliers = rtabmap::Parameters::defaultOdomMinInliers(); float maxDepth = rtabmap::Parameters::defaultOdomMaxDepth(); int iterations = rtabmap::Parameters::defaultOdomIterations(); int resetCountdown = rtabmap::Parameters::defaultOdomResetCountdown(); int decimation = 4; float voxel = 0.005; int samples = 10000; float ratio = 0.7f; int maxClouds = 10; int briefBytes = rtabmap::Parameters::defaultBRIEFBytes(); int fastThr = rtabmap::Parameters::defaultFASTThreshold(); float sec = 0.0f; bool gpu = false; int localHistory = rtabmap::Parameters::defaultOdomBowLocalHistorySize(); bool p2p = rtabmap::Parameters::defaultOdomPnPEstimation(); for(int i=1; i<argc; ++i) { if(strcmp(argv[i], "-driver") == 0) { ++i; if(i < argc) { driver = std::atoi(argv[i]); if(driver < 0 || driver > 7) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-o") == 0) { ++i; if(i < argc) { odomType = std::atoi(argv[i]); if(odomType < 0 || odomType > 6) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nn") == 0) { ++i; if(i < argc) { nnType = std::atoi(argv[i]); if(nnType < 0 || nnType > 4) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-nndr") == 0) { ++i; if(i < argc) { nndr = uStr2Float(argv[i]); if(nndr < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-hz") == 0) { ++i; if(i < argc) { rate = uStr2Float(argv[i]); if(rate < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-db") == 0) { ++i; if(i < argc) { inputDatabase = argv[i]; if(UFile::getExtension(inputDatabase).compare("db") != 0) { printf("Database path (%s) should end with \"db\" \n", inputDatabase.c_str()); showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-clouds") == 0) { ++i; if(i < argc) { maxClouds = std::atoi(argv[i]); if(maxClouds < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-sec") == 0) { ++i; if(i < argc) { sec = uStr2Float(argv[i]); if(sec < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-in") == 0) { ++i; if(i < argc) { distance = uStr2Float(argv[i]); if(distance <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-max") == 0) { ++i; if(i < argc) { maxWords = std::atoi(argv[i]); if(maxWords < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-min") == 0) { ++i; if(i < argc) { minInliers = std::atoi(argv[i]); if(minInliers < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-depth") == 0) { ++i; if(i < argc) { maxDepth = uStr2Float(argv[i]); if(maxDepth < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-i") == 0) { ++i; if(i < argc) { iterations = std::atoi(argv[i]); if(iterations <= 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-reset") == 0) { ++i; if(i < argc) { resetCountdown = std::atoi(argv[i]); if(resetCountdown < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-d") == 0) { ++i; if(i < argc) { decimation = std::atoi(argv[i]); if(decimation < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-v") == 0) { ++i; if(i < argc) { voxel = uStr2Float(argv[i]); if(voxel < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-s") == 0) { ++i; if(i < argc) { samples = std::atoi(argv[i]); if(samples < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-cr") == 0) { ++i; if(i < argc) { ratio = uStr2Float(argv[i]); if(ratio < 0.0f) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-gpu") == 0) { gpu = true; continue; } if(strcmp(argv[i], "-lh") == 0) { ++i; if(i < argc) { localHistory = std::atoi(argv[i]); if(localHistory < 0) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-brief_bytes") == 0) { ++i; if(i < argc) { briefBytes = std::atoi(argv[i]); if(briefBytes < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-fast_thr") == 0) { ++i; if(i < argc) { fastThr = std::atoi(argv[i]); if(fastThr < 1) { showUsage(); } } else { showUsage(); } continue; } if(strcmp(argv[i], "-icp") == 0) { icp = true; continue; } if(strcmp(argv[i], "-flow") == 0) { flow = true; continue; } if(strcmp(argv[i], "-mono") == 0) { mono = true; continue; } if(strcmp(argv[i], "-p2p") == 0) { p2p = true; continue; } if(strcmp(argv[i], "-debug") == 0) { ULogger::setLevel(ULogger::kDebug); ULogger::setPrintTime(true); ULogger::setPrintWhere(true); continue; } printf("Unrecognized option : %s\n", argv[i]); showUsage(); } if(odomType > 1 && nnType == rtabmap::VWDictionary::kNNFlannKdTree) { UERROR("You set \"-o %d\" (binary descriptor), you must use \"-nn 2\" (any \"-nn\" other than kNNFlannKdTree)", odomType); showUsage(); } else if(odomType <= 1 && nnType == rtabmap::VWDictionary::kNNFlannLSH) { UERROR("You set \"-o %d\" (float descriptor), you must use \"-nn 1\" (any \"-nn\" other than kNNFlannLSH)", odomType); showUsage(); } if(inputDatabase.size()) { UINFO("Using database input \"%s\"", inputDatabase.c_str()); } else { UINFO("Using OpenNI camera"); } std::string odomName; if(odomType == 0) { odomName = "SURF"; } else if(odomType == 1) { odomName = "SIFT"; } else if(odomType == 2) { odomName = "ORB"; } else if(odomType == 3) { odomName = "FAST+FREAK"; } else if(odomType == 4) { odomName = "FAST+BRIEF"; } else if(odomType == 5) { odomName = "GFTT+FREAK"; } else if(odomType == 6) { odomName = "GFTT+BRIEF"; } else if(odomType == 7) { odomName = "BRISK"; } if(icp) { odomName= "ICP"; } if(flow) { odomName= "Optical Flow"; } std::string nnName; if(nnType == 0) { nnName = "kNNFlannLinear"; } else if(nnType == 1) { nnName = "kNNFlannKdTree"; } else if(nnType == 2) { nnName= "kNNFlannLSH"; } else if(nnType == 3) { nnName= "kNNBruteForce"; } else if(nnType == 4) { nnName= "kNNBruteForceGPU"; } UINFO("Odometry used = %s", odomName.c_str()); UINFO("Camera rate = %f Hz", rate); UINFO("Maximum clouds shown = %d", maxClouds); UINFO("Delay = %f s", sec); UINFO("Max depth = %f", maxDepth); UINFO("Reset odometry coutdown = %d", resetCountdown); QApplication app(argc, argv); rtabmap::Odometry * odom = 0; rtabmap::ParametersMap parameters; parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxDepth(), uNumber2Str(maxDepth))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomResetCountdown(), uNumber2Str(resetCountdown))); if(!icp) { UINFO("Min inliers = %d", minInliers); UINFO("Inlier maximum correspondences distance = %f", distance); UINFO("RANSAC iterations = %d", iterations); UINFO("Max features = %d", maxWords); UINFO("GPU = %s", gpu?"true":"false"); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomInlierDistance(), uNumber2Str(distance))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMinInliers(), uNumber2Str(minInliers))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), uNumber2Str(iterations))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomMaxFeatures(), uNumber2Str(maxWords))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomFeatureType(), uNumber2Str(odomType))); if(odomType == 0) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kSURFGpuVersion(), uBool2Str(gpu))); } if(odomType == 2) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kORBGpu(), uBool2Str(gpu))); } if(odomType == 3 || odomType == 4) { UINFO("FAST threshold = %d", fastThr); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTThreshold(), uNumber2Str(fastThr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kFASTGpu(), uBool2Str(gpu))); } if(odomType == 4 || odomType == 6) { UINFO("BRIEF bytes = %d", briefBytes); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kBRIEFBytes(), uNumber2Str(briefBytes))); } if(flow) { // Optical Flow odom = new rtabmap::OdometryOpticalFlow(parameters); } else { //BOW UINFO("Nearest neighbor = %s", nnName.c_str()); UINFO("Nearest neighbor ratio = %f", nndr); UINFO("Local history = %d", localHistory); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNType(), uNumber2Str(nnType))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowNNDR(), uNumber2Str(nndr))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomBowLocalHistorySize(), uNumber2Str(localHistory))); if(mono) { parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPFlags(), uNumber2Str(cv::ITERATIVE))); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomPnPReprojError(), "4.0")); parameters.insert(rtabmap::ParametersPair(rtabmap::Parameters::kOdomIterations(), "100")); odom = new rtabmap::OdometryMono(parameters); } else { odom = new rtabmap::OdometryBOW(parameters); } } } else if(icp) // ICP { UINFO("ICP maximum correspondences distance = %f", distance); UINFO("ICP iterations = %d", iterations); UINFO("Cloud decimation = %d", decimation); UINFO("Cloud voxel size = %f", voxel); UINFO("Cloud samples = %d", samples); UINFO("Cloud correspondence ratio = %f", ratio); UINFO("Cloud point to plane = %s", p2p?"false":"true"); odom = new rtabmap::OdometryICP(decimation, voxel, samples, distance, iterations, ratio, !p2p); } rtabmap::OdometryThread odomThread(odom); rtabmap::OdometryViewer odomViewer(maxClouds, 2, 0.0, 50); UEventsManager::addHandler(&odomThread); UEventsManager::addHandler(&odomViewer); odomViewer.setWindowTitle("Odometry view"); odomViewer.resize(1280, 480+QPushButton().minimumHeight()); if(inputDatabase.size()) { rtabmap::DBReader camera(inputDatabase, rate, true); if(camera.init()) { odomThread.start(); if(sec > 0) { uSleep(sec*1000); } camera.start(); app.exec(); camera.join(true); odomThread.join(true); } } else { rtabmap::CameraRGBD * camera = 0; rtabmap::Transform t=rtabmap::Transform(0,0,1,0, -1,0,0,0, 0,-1,0,0); if(driver == 0) { camera = new rtabmap::CameraOpenni("", rate, t); } else if(driver == 1) { if(!rtabmap::CameraOpenNI2::available()) { UERROR("Not built with OpenNI2 support..."); exit(-1); } camera = new rtabmap::CameraOpenNI2("", rate, t); } else if(driver == 2) { if(!rtabmap::CameraFreenect::available()) { UERROR("Not built with Freenect support..."); exit(-1); } camera = new rtabmap::CameraFreenect(0, rate, t); } else if(driver == 3) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(false, rate, t); } else if(driver == 4) { if(!rtabmap::CameraOpenNICV::available()) { UERROR("Not built with OpenNI from OpenCV support..."); exit(-1); } camera = new rtabmap::CameraOpenNICV(true, rate, t); } else if(driver == 5) { if(!rtabmap::CameraFreenect2::available()) { UERROR("Not built with Freenect2 support..."); exit(-1); } camera = new rtabmap::CameraFreenect2(0, rtabmap::CameraFreenect2::kTypeRGBDepthSD, rate, t); } else if(driver == 6) { if(!rtabmap::CameraStereoDC1394::available()) { UERROR("Not built with dc1394 support..."); exit(-1); } camera = new rtabmap::CameraStereoDC1394(rate, t); } else if(driver == 7) { if(!rtabmap::CameraStereoFlyCapture2::available()) { UERROR("Not built with FlyCapture2/Triclops support..."); exit(-1); } camera = new rtabmap::CameraStereoFlyCapture2(rate, t); } else { UFATAL("Camera driver (%d) not found!", driver); } //pcl::console::setVerbosityLevel(pcl::console::L_DEBUG); if(camera->init()) { if(camera->isCalibrated()) { rtabmap::CameraThread cameraThread(camera); odomThread.start(); cameraThread.start(); odomViewer.exec(); cameraThread.join(true); odomThread.join(true); } else { printf("The camera is not calibrated! You should calibrate the camera first.\n"); delete camera; } } else { printf("Failed to initialize the camera! Please select another driver (see \"--help\").\n"); delete camera; } } return 0; }
static void CameraAndLightTest() { HdStRenderDelegate renderDelegate; std::unique_ptr<HdRenderIndex> index(HdRenderIndex::New(&renderDelegate)); TF_VERIFY(index); std::unique_ptr<Hdx_UnitTestDelegate> delegate( new Hdx_UnitTestDelegate(index.get())); HdChangeTracker& tracker = index->GetChangeTracker(); HdPerfLog& perfLog = HdPerfLog::GetInstance(); perfLog.Enable(); HdRprimCollection collection(HdTokens->geometry, HdTokens->hull); HdRenderPassStateSharedPtr renderPassState(new HdRenderPassState()); HdRenderPassSharedPtr renderPass( new HdSt_RenderPass(index.get(), collection)); HdEngine engine; HdTaskSharedPtr drawTask = boost::make_shared<Hd_TestTask>(renderPass, renderPassState); HdTaskSharedPtrVector tasks = { drawTask }; GfMatrix4d tx(1.0f); tx.SetRow(3, GfVec4f(5, 0, 5, 1.0)); SdfPath cube("geometry"); delegate->AddCube(cube, tx); SdfPath camera("camera"); SdfPath light("light"); delegate->AddCamera(camera); delegate->AddLight(light, GlfSimpleLight()); delegate->SetLight(light, HdStLightTokens->shadowCollection, VtValue(HdRprimCollection(HdTokens->geometry, HdTokens->hull))); engine.Execute(*index, tasks); VERIFY_PERF_COUNT(HdPerfTokens->rebuildBatches, 1); // Update camera matrix delegate->SetCamera(camera, GfMatrix4d(2), GfMatrix4d(2)); tracker.MarkSprimDirty(camera, HdStCamera::DirtyViewMatrix); tracker.MarkSprimDirty(camera, HdStCamera::DirtyProjMatrix); engine.Execute(*index, tasks); // batch should not be rebuilt VERIFY_PERF_COUNT(HdPerfTokens->rebuildBatches, 1); // Update shadow collection delegate->SetLight(light, HdStLightTokens->shadowCollection, VtValue(HdRprimCollection(HdTokens->geometry, HdTokens->refined))); tracker.MarkSprimDirty(light, HdStLight::DirtyCollection); engine.Execute(*index, tasks); // batch rebuilt VERIFY_PERF_COUNT(HdPerfTokens->rebuildBatches, 2); // Update shadow collection again with the same data delegate->SetLight(light, HdStLightTokens->shadowCollection, VtValue(HdRprimCollection(HdTokens->geometry, HdTokens->refined))); tracker.MarkSprimDirty(light, HdStLight::DirtyCollection); engine.Execute(*index, tasks); // batch should not be rebuilt VERIFY_PERF_COUNT(HdPerfTokens->rebuildBatches, 2); }
void keyb(unsigned char key) #endif { static short fillCutPart = 0, drawFill = 1, drawStroke = 1, useBezier = 1; switch (key) { #ifndef USE_SDL case 'C': case 'c': #else case SDL_SCANCODE_C: #endif fillCutPart=!fillCutPart; glUniform1i(glGetUniformLocation(program,"fillCutPart"), fillCutPart); break; #ifndef USE_SDL case 'P': case 'p': #else case SDL_SCANCODE_MENU: case SDL_SCANCODE_P: #endif useBezier=!useBezier; glUniform1i(glGetUniformLocation(program,"useBezier"), useBezier); break; #ifndef USE_SDL case 'F': case 'f': #else case SDL_SCANCODE_F: #endif drawFill=!drawFill; glUniform1i(glGetUniformLocation(program,"drawFill"), drawFill); break; #ifndef USE_SDL case 'S': case 's': #else case SDL_SCANCODE_S: #endif drawStroke=!drawStroke; glUniform1i(glGetUniformLocation(program,"drawStroke"), drawStroke); break; #ifndef USE_SDL case 'Z': case 'z': #else case SDL_SCANCODE_Z: case SDL_SCANCODE_VOLUMEDOWN: #endif Camera.dist*= 1.1; camera(); break; #ifndef USE_SDL case 'X': case 'x': #else case SDL_SCANCODE_X: case SDL_SCANCODE_VOLUMEUP: #endif Camera.dist*= 0.9; camera(); break; #ifndef USE_SDL case 27: // Escape performanceReport(); exit(0); break; #endif } }
void GraphicsView::paintGL() { if(!d->scene){ return; } d->shader->bind(); // clear qglClearColor(d->backgroundColor); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // setup camera (if necessary) if(camera()->changed()){ glMatrixMode(GL_MODELVIEW); Vector3f f = camera()->direction(); Vector3f s = f.cross(camera()->upVector()); Vector3f u = s.cross(f); Eigen::Matrix<float, 4, 4> transform; transform << s.x(), s.y(), s.z(), 0.0f, u.x(), u.y(), u.z(), 0.0f, -f.x(), -f.y(), -f.z(), 0.0f, 0.0f, 0.0f, 0.0f, 1.0f; d->modelViewTransform = GraphicsTransform(transform); d->modelViewTransform *= GraphicsTransform::translation(-camera()->position()); glLoadMatrixf(d->modelViewTransform.data()); camera()->setChanged(false); } // draw items GraphicsPainter painter; QList<GraphicsItem *> nonOpaqueItems; foreach(GraphicsItem *item, scene()->items()){ if(!item->isVisible()) continue; if(!item->isOpaque()){ nonOpaqueItems.append(item); } else{ glPushMatrix(); GraphicsTransform transform = item->transform(); glMultMatrixf(transform.data()); painter.setMaterial(item->material()); item->paint(&painter); glPopMatrix(); } } if(!nonOpaqueItems.isEmpty()){ glEnable(GL_BLEND); foreach(GraphicsItem *item, nonOpaqueItems){ glPushMatrix(); GraphicsTransform transform = item->transform(); glMultMatrixf(transform.data()); painter.setMaterial(item->material()); item->paint(&painter); glPopMatrix(); }
void Scene02::init(const Window& window) { Camera camera(50.0f, window.getAspectRatio(), 0.1f, 100.0f); CameraComponent* cameraComponent = new CameraComponent(camera); PhysicsEngine physicsEngine; PhysicsObject physicsObject1(new BoundingSphere(glm::vec3(-1.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); PhysicsObject physicsObject2(new BoundingSphere(glm::vec3(-2.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); PhysicsObject physicsObject3(new BoundingSphere(glm::vec3(-3.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); PhysicsObject physicsObject4(new BoundingSphere(glm::vec3(-4.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); PhysicsObject physicsObject5(new BoundingSphere(glm::vec3(-5.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); //PhysicsObject physicsObject6(new BoundingSphere(glm::vec3(-6.0, 0.5, -5.0), 0.2f), glm::vec3(0.1f, 0.1f, 0.0f), true); //PhysicsObject physicsObject6(new BoundingSphere(glm::vec3(3.0, 0.0, -5.0), 0.1f), glm::vec3(0.0f, 0.0f, 0.0f), false); physicsEngine.addObject(physicsObject1); physicsEngine.addObject(physicsObject2); physicsEngine.addObject(physicsObject3); physicsEngine.addObject(physicsObject4); physicsEngine.addObject(physicsObject5); //physicsEngine.addObject(physicsObject6); PhysicsEngineComponent* physicsEngineComponent = new PhysicsEngineComponent(physicsEngine); Node* physicsEngineNode = new Node; physicsEngineNode->addComponent(physicsEngineComponent); addToScene(physicsEngineNode); IndexedMesh floorIndexedMesh("models/plane.obj"); floorIndexedMesh.m_texCoords.push_back( glm::vec2(1.0, 1.0) ); floorIndexedMesh.m_texCoords.push_back( glm::vec2(1.0, 0.0) ); floorIndexedMesh.m_texCoords.push_back( glm::vec2(0.0, 0.0) ); floorIndexedMesh.m_texCoords.push_back( glm::vec2(0.0, 1.0) ); Mesh floorMesh; floorMesh.init(floorIndexedMesh); Material* floor = new Material( new Texture("textures/snow.jpg") , glm::vec3(1.0, 1.0, 1.0)); // adding interior Material* back = new Material( new Texture("textures/Fireplace.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* backNode = new Node(glm::vec3(-10.0, 8.0, -10.0), glm::vec3(-90.0f, 0.0f, 0.0f), glm::vec3(20.0f, 1.0f, 10.0f)); backNode->addComponent(new MeshRenderer(floorMesh, *back)); addToScene(backNode); Material* win = new Material( new Texture("textures/s.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* winNode = new Node(glm::vec3(9.99, 10.0, -10.0), glm::vec3(0.0f, -90.0f, -90.0f), glm::vec3(5.0f, 1.0f, 5.0f)); winNode->addComponent(new MeshRenderer(floorMesh, *win)); addToScene(winNode); Node* winNode2 = new Node(glm::vec3(10.01, 10.0, -10.0), glm::vec3(0.0f, -90.0f, -90.0f), glm::vec3(5.0f, 1.0f, 5.0f)); winNode2->addComponent(new MeshRenderer(floorMesh, *win)); addToScene(winNode2); Material* walls = new Material( new Texture("textures/carpet5.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* roofNode = new Node(glm::vec3(-10.0, 18.0, -10.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(20.0f, 1.0f, 20.0f)); roofNode->addComponent(new MeshRenderer(floorMesh, *walls)); addToScene(roofNode); Node* rwallNode = new Node(glm::vec3(10.0, 8.0, -10.0), glm::vec3(0.0f, 0.0f, -90.0f), glm::vec3(13.0f, 1.0f, 20.0f)); rwallNode->addComponent(new MeshRenderer(floorMesh, *walls)); addToScene(rwallNode); Material* fwall = new Material( new Texture("textures/Door.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* fwallNode = new Node(glm::vec3(-30.0, 8.0, -10.0), glm::vec3(0.0f, 90.0f, -90.0f), glm::vec3(13.0f, 1.0f, 20.0f)); fwallNode->addComponent(new MeshRenderer(floorMesh, *fwall)); addToScene(fwallNode); Material* flor = new Material( new Texture("textures/carpet4.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* florNode = new Node(glm::vec3(-10.0, -0.99, -10.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(20.0f, 1.0f, 20.0f)); florNode->addComponent(new MeshRenderer(floorMesh, *flor)); addToScene(florNode); Material* bg1 = new Material( new Texture("textures/n.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* bgNode = new Node(glm::vec3(0.0, 8.0, -10.0), glm::vec3(-90.0f, 0.0f, 0.0f), glm::vec3(30.0f, 1.0f, 10.0f)); bgNode->addComponent(new MeshRenderer(floorMesh, *bg1)); addToScene(bgNode); Node* bgNode2 = new Node(glm::vec3(30.0, 8.0, -10.0), glm::vec3(-90.0f, 0.0f, 0.0f), glm::vec3(30.0f, 1.0f, 10.0f)); bgNode2->addComponent(new MeshRenderer(floorMesh, *bg1)); addToScene(bgNode2); Node* bgNode3 = new Node(glm::vec3(90.0, 8.0, -10.0), glm::vec3(-90.0f, 0.0f, 0.0f), glm::vec3(30.0f, 1.0f, 10.0f)); bgNode3->addComponent(new MeshRenderer(floorMesh, *bg1)); addToScene(bgNode3); // finishing interior Node* floorNode = new Node(glm::vec3(0.0, -1.0, 0.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(50.0f, 1.0f, 10.0f)); floorNode->addComponent(new MeshRenderer(floorMesh, *floor)); addToScene(floorNode); Material* bg = new Material( new Texture("textures/bg2.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* bgNode1 = new Node(glm::vec3(0.0, 2.5, -10.0), glm::vec3(90.0f, 0.0f, 0.0f), glm::vec3(50.0f, 1.0f, 5.0f)); bgNode1->addComponent(new MeshRenderer(floorMesh, *bg)); addToScene(bgNode1); Mesh* cubeMesh = new Mesh; cubeMesh->initCube(); //Material* building = new Material( new Texture("textures/igloo.jpg") , glm::vec3(1.0, 1.0, 1.0)); Material* gift = new Material( new Texture("textures/xmas.jpg") , glm::vec3(1.0, 1.0, 1.0)); // Node* cubeNode1 = new Node(glm::vec3(3.0, 0.0, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); // cubeNode1->addComponent(new MeshRenderer(*cubeMesh, *building)); // //cubeNode1->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(5))); // addToScene(cubeNode1); // // Node* cubeNode2 = new Node(glm::vec3(13.0, 0.0, -6.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); // cubeNode2->addComponent(new MeshRenderer(*cubeMesh, *building)); // addToScene(cubeNode2); Material* building = new Material( new Texture("textures/e.jpg") , glm::vec3(1.0, 1.0, 1.0)); Material* outer = new Material( new Texture("textures/igloo.jpg") , glm::vec3(1.0, 1.0, 1.0)); Node* cubeNode1 = new Node(glm::vec3(3.0, 0.0, -6.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); cubeNode1->addComponent(new MeshRenderer(*cubeMesh, *building)); addToScene(cubeNode1); Node* cubeNode2 = new Node(glm::vec3(13.0, 0.0, -6.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); cubeNode2->addComponent(new MeshRenderer(*cubeMesh, *outer)); addToScene(cubeNode2); Node* cubeNode3 = new Node(glm::vec3(23.0, 0.0, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); cubeNode3->addComponent(new MeshRenderer(*cubeMesh, *outer)); addToScene(cubeNode3); Node* cubeNode4 = new Node(glm::vec3(-3.0, 0.0, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); cubeNode4->addComponent(new MeshRenderer(*cubeMesh, *building)); addToScene(cubeNode4); // Node* cubeNode3 = new Node(glm::vec3(23.0, 0.0, -6.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(1.0f, 1.0f, 1.0f)); // cubeNode3->addComponent(new MeshRenderer(*cubeMesh, *building)); // addToScene(cubeNode3); Node* xmasNode = new Node(glm::vec3(33.0, 0.0, -6.0), glm::vec3(90.0f, 0.0f, 0.0f), glm::vec3(3.0f, 1.0f, 1.0f)); xmasNode->addComponent(new MeshRenderer(floorMesh, *gift)); addToScene(xmasNode); IndexedMesh paperPlaneIndexedMesh("models/paperbird.obj"); Mesh paperPlaneMesh; paperPlaneMesh.init(paperPlaneIndexedMesh); Material* yellowPaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(0.95, 0.95, 0.25)); Node* birdNode1 = new Node(glm::vec3(-1.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); birdNode1->addComponent(new MeshRenderer(paperPlaneMesh, *yellowPaper)); birdNode1->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(0))); birdNode1->addComponent(cameraComponent); addToScene(birdNode1); Material* orangePaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(0.93, 0.67, 0.09)); Node* birdNode2 = new Node(glm::vec3(-2.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); birdNode2->addComponent(new MeshRenderer(paperPlaneMesh, *orangePaper)); birdNode2->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(1))); addToScene(birdNode2); Material* redPaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(0.9, 0.28, 0.28)); Node* birdNode3 = new Node(glm::vec3(-3.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); birdNode3->addComponent(new MeshRenderer(paperPlaneMesh, *redPaper)); birdNode3->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(2))); addToScene(birdNode3); Material* greenPaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(0.43, 0.79, 0.43)); Node* birdNode4 = new Node(glm::vec3(-4.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); birdNode4->addComponent(new MeshRenderer(paperPlaneMesh, *greenPaper)); birdNode4->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(3))); addToScene(birdNode4); Material* bluePaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(0.27, 0.51, 0.78)); Node* birdNode5 = new Node(glm::vec3(-5.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); birdNode5->addComponent(new MeshRenderer(paperPlaneMesh, *bluePaper)); birdNode5->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(4))); addToScene(birdNode5); // Material* whitePaper = new Material( new Texture("textures/paper.jpg") , glm::vec3(1.0, 1.0, 1.0)); // Node* birdNode6 = new Node(glm::vec3(-6.0, 0.5, -5.0), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.5f, 0.5f, 0.5f)); // birdNode6->addComponent(new MeshRenderer(paperPlaneMesh, *whitePaper)); // birdNode6->addComponent(new PhysicsObjectComponent(&physicsEngineComponent->getPhysicsEngine().getObject(5))); // addToScene(birdNode6); }
void Painter::paint_3_2(float timef) { paint_3_1_scene(false, timef); paint_3_2_label(camera()->viewProjection(), timef); }
void View::mousePressEvent(QMouseEvent* event) { m_mousePos = event->pos(); camera()->setAnimatedRotation( QVector3D() ); }
Frustum& CameraProxy::frustum() { return camera()->frustum(); }