int main(int argc, char * argv[]) { sf::RenderWindow window(sf::VideoMode(640, 480), "OpenGL"); glEnable(GL_DEPTH_TEST); glDepthFunc(GL_LEQUAL); glEnable(GL_CULL_FACE); glCullFace(GL_BACK); g_Resources->SetShaderPath("/home/gergondet/ros/perception_blort/blort_ros/Tracker/shader/"); TomGine::tgCamera::Parameter camPar; TomGine::tgCamera camera; camPar.width = 640; camPar.height = 480; camPar.fx = 568.168672; camPar.fy = 566.360327; camPar.cx = 349.631248; camPar.cy = 256.295344; camPar.k1 = -0.059636; camPar.k2 = 0.232352; camPar.k3 = 0; camPar.p1 = -0.003432; camPar.p2 = 0.017361; camPar.pos.x = 0.31188; camPar.pos.y = -0.3; camPar.pos.z = 0.243049; vec3 rotv; rotv.x = -1.917587; rotv.y = -0.453044; rotv.z = 0.389306; camPar.rot.fromRotVector(rotv); camera.Load(camPar); Tracking::TrackerModel * model = new Tracking::TrackerModel(); Tracking::ModelLoader loader; loader.LoadPly(*model, "/home/gergondet/ros/perception_blort/blort_ros/Resources/ply/can.ply"); bool running = true; while (running) { // handle events sf::Event event; while (window.pollEvent(event)) { if (event.type == sf::Event::Closed) { // end the program running = false; } else if (event.type == sf::Event::Resized) { // adjust the viewport when the window is resized glViewport(0, 0, event.size.width, event.size.height); } } glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); window.clear(sf::Color(0x77, 0x77, 0x77, 255)); window.pushGLStates(); window.popGLStates(); camera.Activate(); model->drawPass(); window.display(); } delete model; return 0; }
void BLORTObject::Display(sf::RenderTarget * app, unsigned int frameCount, sf::Clock & clock) { if(!model) { /* TODO Get this from camera_info topic */ camera.Load(manager.GetCameraParameter()); model = new Tracking::TrackerModel(); model->setBFC(false); Tracking::ModelLoader loader; loader.LoadPly(*model, filename.c_str()); for(size_t i = 0; i < model->m_vertices.size(); ++i) { if(model->m_vertices[i].pos.x < bb.xmin) bb.xmin = model->m_vertices[i].pos.x; if(model->m_vertices[i].pos.x > bb.xmax) bb.xmax = model->m_vertices[i].pos.x; if(model->m_vertices[i].pos.y < bb.ymin) bb.ymin = model->m_vertices[i].pos.y; if(model->m_vertices[i].pos.y > bb.ymax) bb.ymax = model->m_vertices[i].pos.y; if(model->m_vertices[i].pos.z < bb.zmin) bb.zmin = model->m_vertices[i].pos.z; if(model->m_vertices[i].pos.z > bb.zmax) bb.zmax = model->m_vertices[i].pos.z; } bb.Init(); } #ifndef WIN32 //if( (ros::Time::now() - last_update).sec < 2 ) #endif { boost::mutex::scoped_lock(pose_mutex); camera.Activate(); pose.Activate(); glViewport(vp.left, vp.bottom, vp.width, vp.height); glEnable(GL_SCISSOR_TEST); glScissor((wwidth - iwidth)/2, (wheight - iheight)/2, iwidth, iheight); Tracking::TrackerModel * m = model; #ifdef HAS_CVEP_SUPPORT if( (ssvep_stim && ssvep_stim->DisplayActive(frameCount)) || (cvep_stim && cvep_stim->GetDisplay()) ) #else if( (ssvep_stim && ssvep_stim->DisplayActive(frameCount)) ) #endif { #ifdef HAS_CVEP_SUPPORT if(cvep_stim) { glDisable(GL_TEXTURE_2D); glColor4f(color.r, color.g, color.b, color.a); m->drawFaces(); } else #endif { m->drawPass(); } } else { glDisable(GL_TEXTURE_2D); #ifdef HAS_CVEP_SUPPORT if(cvep_stim) { glColor4f(0,0,0, color.a); } else #endif { glColor4f(color.r, color.g, color.b, color.a); } m->drawFaces(); } if(highlight) { bb.Draw(); } glDisable(GL_SCISSOR_TEST); pose.Deactivate(); } }