int main(int argc, char** argv) { camera_ = Camera::Ptr(new Camera()); scene_ = Scene::Ptr(new Scene()); range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_, 640)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(10, 10, 96, 96, scene_)); //range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_)); window_width_ = range_likelihood_->width() * 2; window_height_ = range_likelihood_->height(); glutInit(&argc, argv); glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGB); glutInitWindowPosition(10,10); glutInitWindowSize(window_width_, window_height_); glutCreateWindow("OpenGL range likelihood"); initialize(argc, argv); glutReshapeFunc(on_reshape); glutDisplayFunc(display); glutIdleFunc(display); glutKeyboardFunc(on_keyboard); glutMouseFunc(on_mouse); glutMotionFunc(on_motion); glutPassiveMotionFunc(on_passive_motion); glutEntryFunc(on_entry); glutMainLoop(); }
void depthBufferToMM(const float* depth_buffer,unsigned short* depth_img) { int npixels = range_likelihood_->getWidth() * range_likelihood_->getHeight(); // unsigned short * depth_img = new unsigned short[npixels ]; for (int y = 0; y < 480; ++y) { for (int x = 0; x < 640; ++x) { int i= y*640 + x ; int i_in= (480-1 -y) *640 + x ; // flip up down float zn = 0.7; float zf = 20.0; float d = depth_buffer[i_in]; unsigned short z_new = (unsigned short) floor( 1000*( -zf*zn/((zf-zn)*(d - zf/(zf-zn))))); if (z_new < 0) z_new = 0; // else if (z_new>65535) z_new = 65535; else if (z_new>5000) z_new = 0; // if ( z_new < 18000){ // cout << z_new << " " << d << " " << x << "\n"; // } depth_img[i] = z_new; } } }
void write_rgb_image(const uint8_t* rgb_buffer) { int npixels = range_likelihood_->getWidth() * range_likelihood_->getHeight(); uint8_t* rgb_img = new uint8_t[npixels * 3]; for (int y = 0; y < 480; ++y) { for (int x = 0; x < 640; ++x) { int px= y*640 + x ; int px_in= (480-1 -y) *640 + x ; // flip up down rgb_img [3* (px) +0] = rgb_buffer[3*px_in+0]; rgb_img [3* (px) +1] = rgb_buffer[3*px_in+1]; rgb_img [3* (px) +2] = rgb_buffer[3*px_in+2]; } } // Write to file: IplImage *cv_ipl = cvCreateImage( cvSize(640 ,480), 8, 3); cv::Mat cv_mat(cv_ipl); cv_mat.data = rgb_img ; // cv::imwrite("rgb_image.png", cv_mat); std::stringstream ss; ss <<"rgb_image.png" ; cv::imwrite(ss.str() , cv_mat); delete [] rgb_img; }
void display_score_image (const float* score_buffer) { int npixels = range_likelihood_->getWidth () * range_likelihood_->getHeight (); uint8_t* score_img = new uint8_t[npixels * 3]; float min_score = score_buffer[0]; float max_score = score_buffer[0]; for (int i=1; i<npixels; i++) { if (score_buffer[i] < min_score) min_score = score_buffer[i]; if (score_buffer[i] > max_score) max_score = score_buffer[i]; } for (int i=0; i < npixels; i++) { float d = (score_buffer[i]-min_score)/(max_score-min_score); score_img[3*i+0] = 0; score_img[3*i+1] = static_cast<unsigned char> (d*255); score_img[3*i+2] = 0; } textured_quad_->setTexture (score_img); textured_quad_->render (); delete [] score_img; }
void display_score_image(const float* score_buffer) { int npixels = range_likelihood_->getWidth() * range_likelihood_->getHeight(); uint8_t* score_img = new uint8_t[npixels * 3]; float min_score = score_buffer[0]; float max_score = score_buffer[0]; for (int i=1; i<npixels; i++) { if (score_buffer[i] < min_score) min_score = score_buffer[i]; if (score_buffer[i] > max_score) max_score = score_buffer[i]; } for (int i=0; i<npixels; i++) { float d = (score_buffer[i]-min_score)/(max_score-min_score); score_img[3*i+0] = 0; score_img[3*i+1] = d*255; score_img[3*i+2] = 0; } glRasterPos2i(-1,-1); glDrawPixels(range_likelihood_->getWidth(), range_likelihood_->getHeight(), GL_RGB, GL_UNSIGNED_BYTE, score_img); delete [] score_img; }
int main(int argc, char** argv) { print_info ("Manually generate a simulated RGB-D point cloud using pcl::simulation. For more information, use: %s -h\n", argv[0]); if (argc < 2){ printHelp (argc, argv); return (-1); } int i; for (i=0; i<2048; i++) { float v = i/2048.0; v = powf(v, 3)* 6; t_gamma[i] = v*6*256; } camera_ = Camera::Ptr(new Camera()); scene_ = Scene::Ptr(new Scene()); range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_, 640)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(10, 10, 96, 96, scene_)); //range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_)); // Actually corresponds to default parameters: range_likelihood_->set_CameraIntrinsicsParameters(640,480,576.09757860, 576.09757860, 321.06398107, 242.97676897); window_width_ = range_likelihood_->width() * 2; window_height_ = range_likelihood_->height(); glutInit(&argc, argv); glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGB);// was GLUT_RGBA glutInitWindowPosition(10,10); glutInitWindowSize(window_width_, window_height_); glutCreateWindow("OpenGL range likelihood"); initialize(argc, argv); glutReshapeFunc(on_reshape); glutDisplayFunc(display); glutIdleFunc(display); glutKeyboardFunc(on_keyboard); glutMouseFunc(on_mouse); glutMotionFunc(on_motion); glutPassiveMotionFunc(on_passive_motion); glutEntryFunc(on_entry); glutMainLoop(); }
void capture (Eigen::Isometry3d pose_in) { // No reference image - but this is kept for compatibility with range_test_v2: float* reference = new float[range_likelihood_->getRowHeight() * range_likelihood_->getColWidth()]; const float* depth_buffer = range_likelihood_->getDepthBuffer(); // Copy one image from our last as a reference. for (int i=0, n=0; i<range_likelihood_->getRowHeight(); ++i) { for (int j=0; j<range_likelihood_->getColWidth(); ++j) { reference[n++] = depth_buffer[i*range_likelihood_->getWidth() + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; poses.push_back (pose_in); range_likelihood_->computeLikelihoods (reference, poses, scores); std::cout << "score: "; for (size_t i = 0; i<scores.size (); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; std::cout << "camera: " << camera_->getX () << " " << camera_->getY () << " " << camera_->getZ () << " " << camera_->getRoll () << " " << camera_->getPitch () << " " << camera_->getYaw () << std::endl; delete [] reference; // Benchmark Values for // 27840 triangle faces // 13670 vertices // 45.00Hz: simuation only // 1.28Hz: simuation, addNoise? , getPointCloud, writeASCII // 33.33Hz: simuation, getPointCloud // 23.81Hz: simuation, getPointCloud, writeBinary // 14.28Hz: simuation, addNoise, getPointCloud, writeBinary // MODULE TIME FRACTION // simuation 0.02222 31% // addNoise 0.03 41% // getPointCloud 0.008 11% // writeBinary 0.012 16% // total 0.07222 pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_out (new pcl::PointCloud<pcl::PointXYZRGB>); }
void display () { float* reference = new float[range_likelihood_->getRowHeight () * range_likelihood_->getColWidth ()]; const float* depth_buffer = range_likelihood_->getDepthBuffer (); // Copy one image from our last as a reference. for (int i = 0, n = 0; i < range_likelihood_->getRowHeight (); ++i) { for (int j = 0; j < range_likelihood_->getColWidth (); ++j) { reference[n++] = depth_buffer[ (i + range_likelihood_->getRowHeight () * range_likelihood_->getRows () / 2) * range_likelihood_->getWidth () + j + range_likelihood_->getColWidth () * range_likelihood_->getCols () / 2]; } } float* reference_vis = new float[range_likelihood_visualization_->getRowHeight () * range_likelihood_visualization_->getColWidth ()]; const float* depth_buffer_vis = range_likelihood_visualization_->getDepthBuffer (); // Copy one image from our last as a reference. for (int i = 0, n = 0; i < range_likelihood_visualization_->getRowHeight (); ++i) { for (int j = 0; j < range_likelihood_visualization_->getColWidth (); ++j) { reference_vis[n++] = depth_buffer_vis[i*range_likelihood_visualization_->getWidth () + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; // Render a single pose for visualization poses.clear (); poses.push_back (camera_->getPose ()); range_likelihood_visualization_->computeLikelihoods (reference_vis, poses, scores); glDrawBuffer (GL_BACK); glReadBuffer (GL_BACK); // Draw the resulting images from the range_likelihood glViewport (range_likelihood_visualization_->getWidth (), 0, range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight ()); glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); // Draw the color image glColorMask (true, true, true, true); glClearColor (0, 0, 0, 0); glClearDepth (1); glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glDisable (GL_DEPTH_TEST); glRasterPos2i (-1,-1); glDrawPixels (range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight (), GL_RGB, GL_UNSIGNED_BYTE, range_likelihood_visualization_->getColorBuffer ()); // Draw the depth image glViewport (0, 0, range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight ()); glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); display_depth_image (range_likelihood_visualization_->getDepthBuffer (), range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight ()); poses.clear (); for (int i = 0; i < range_likelihood_->getRows (); ++i) { for (int j = 0; j < range_likelihood_->getCols (); ++j) { Camera camera (*camera_); camera.move ((j - range_likelihood_->getCols () / 2.0) * 0.1, (i - range_likelihood_->getRows () / 2.0) * 0.1, 0.0); poses.push_back (camera.getPose ()); } } std::cout << std::endl; TicToc tt; tt.tic(); range_likelihood_->computeLikelihoods (reference, poses, scores); tt.toc(); tt.toc_print(); if (gllib::getGLError () != GL_NO_ERROR) { std::cerr << "GL Error: RangeLikelihood::computeLikelihoods: finished" << std::endl; } #if 0 std::cout << "score: "; for (size_t i = 0; i < scores.size (); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; #endif std::cout << "camera: " << camera_->getX () << " " << camera_->getY () << " " << camera_->getZ () << " " << camera_->getRoll () << " " << camera_->getPitch () << " " << camera_->getYaw () << std::endl; delete [] reference_vis; delete [] reference; if (gllib::getGLError () != GL_NO_ERROR) { std::cerr << "GL Error: before buffers" << std::endl; } glBindFramebuffer (GL_FRAMEBUFFER, 0); glDrawBuffer (GL_BACK); glReadBuffer (GL_BACK); if (gllib::getGLError () != GL_NO_ERROR) { std::cerr << "GL Error: after buffers" << std::endl; } glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); if (gllib::getGLError () != GL_NO_ERROR) { std::cerr << "GL Error: before viewport" << std::endl; } // Draw the score image for the particles glViewport (0, range_likelihood_visualization_->getHeight (), range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight ()); if (gllib::getGLError () != GL_NO_ERROR) { std::cerr << "GL Error: after viewport" << std::endl; } display_score_image (range_likelihood_->getScoreBuffer ()); // Draw the depth image for the particles glViewport (range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight (), range_likelihood_visualization_->getWidth (), range_likelihood_visualization_->getHeight ()); display_score_image (range_likelihood_->getDepthBuffer ()); glutSwapBuffers (); }
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //SIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTARTSIMSTART void write_depth_image(const float* depth_buffer) { int npixels = range_likelihood_->getWidth() * range_likelihood_->getHeight(); uint8_t* depth_img = new uint8_t[npixels * 3]; float min_depth = depth_buffer[0]; float max_depth = depth_buffer[0]; for (int i=1; i<npixels; i++) { if (depth_buffer[i] < min_depth) min_depth = depth_buffer[i]; if (depth_buffer[i] > max_depth) max_depth = depth_buffer[i]; } for (int y = 0; y < 480; ++y) { for (int x = 0; x < 640; ++x) { int i= y*640 + x ; int i_in= (480-1 -y) *640 + x ; // flip up down float zn = 0.7; float zf = 20.0; float d = depth_buffer[i_in]; float z = -zf*zn/((zf-zn)*(d - zf/(zf-zn))); float b = 0.075; float f = 580.0; uint16_t kd = static_cast<uint16_t>(1090 - b*f/z*8); if (kd < 0) kd = 0; else if (kd>2047) kd = 2047; int pval = t_gamma[kd]; int lb = pval & 0xff; switch (pval>>8) { case 0: depth_img[3*i+2] = 255; depth_img[3*i+1] = 255-lb; depth_img[3*i+0] = 255-lb; break; case 1: depth_img[3*i+2] = 255; depth_img[3*i+1] = lb; depth_img[3*i+0] = 0; break; case 2: depth_img[3*i+2] = 255-lb; depth_img[3*i+1] = 255; depth_img[3*i+0] = 0; break; case 3: depth_img[3*i+2] = 0; depth_img[3*i+1] = 255; depth_img[3*i+0] = lb; break; case 4: depth_img[3*i+2] = 0; depth_img[3*i+1] = 255-lb; depth_img[3*i+0] = 255; break; case 5: depth_img[3*i+2] = 0; depth_img[3*i+1] = 0; depth_img[3*i+0] = 255-lb; break; default: depth_img[3*i+2] = 0; depth_img[3*i+1] = 0; depth_img[3*i+0] = 0; break; } } } // Write to file: IplImage *cv_ipl = cvCreateImage( cvSize(640 ,480), 8, 3); cv::Mat cv_mat(cv_ipl); cv_mat.data = depth_img; std::stringstream ss; ss <<"depth_image.png" ; cv::imwrite(ss.str() , cv_mat); delete [] depth_img; }
int main (int argc, char** argv) { srand (time (0)); print_info ("The viewer window provides interactive commands; for help, press 'h' or 'H' from within the window.\n"); if (argc < 2) { printHelp (argc, argv); return (-1); } // Command line parsing double bcolor[3] = {0, 0, 0}; pcl::console::parse_3x_arguments (argc, argv, "-bc", bcolor[0], bcolor[1], bcolor[2]); std::vector<double> fcolor_r, fcolor_b, fcolor_g; bool fcolorparam = pcl::console::parse_multiple_3x_arguments (argc, argv, "-fc", fcolor_r, fcolor_g, fcolor_b); std::vector<int> psize; pcl::console::parse_multiple_arguments (argc, argv, "-ps", psize); std::vector<double> opaque; pcl::console::parse_multiple_arguments (argc, argv, "-opaque", opaque); int mview = 0; pcl::console::parse_argument (argc, argv, "-multiview", mview); int normals = 0; pcl::console::parse_argument (argc, argv, "-normals", normals); double normals_scale = NORMALS_SCALE; pcl::console::parse_argument (argc, argv, "-normals_scale", normals_scale); int pc = 0; pcl::console::parse_argument (argc, argv, "-pc", pc); double pc_scale = PC_SCALE; pcl::console::parse_argument (argc, argv, "-pc_scale", pc_scale); // Parse the command line arguments for .pcd files std::vector<int> p_file_indices = pcl::console::parse_file_extension_argument (argc, argv, ".pcd"); std::vector<int> vtk_file_indices = pcl::console::parse_file_extension_argument (argc, argv, ".vtk"); if (p_file_indices.size () == 0 && vtk_file_indices.size () == 0) { print_error ("No .PCD or .VTK file given. Nothing to visualize.\n"); return (-1); } // Multiview enabled? int y_s = 0, x_s = 0; double x_step = 0, y_step = 0; if (mview) { print_highlight ("Multi-viewport rendering enabled.\n"); if (p_file_indices.size () != 0) { y_s = static_cast<int>(floor (sqrt (static_cast<float>(p_file_indices.size ())))); x_s = y_s + static_cast<int>(ceil ((p_file_indices.size () / static_cast<double>(y_s)) - y_s)); print_highlight ("Preparing to load "); print_value ("%d", p_file_indices.size ()); } else if (vtk_file_indices.size () != 0) { y_s = static_cast<int>(floor (sqrt (static_cast<float>(vtk_file_indices.size ())))); x_s = y_s + static_cast<int>(ceil ((vtk_file_indices.size () / static_cast<double>(y_s)) - y_s)); print_highlight ("Preparing to load "); print_value ("%d", vtk_file_indices.size ()); } x_step = static_cast<double>(1.0 / static_cast<double>(x_s)); y_step = static_cast<double>(1.0 / static_cast<double>(y_s)); print_info (" files ("); print_value ("%d", x_s); print_info ("x"); print_value ("%d", y_s); print_info (" / "); print_value ("%f", x_step); print_info ("x"); print_value ("%f", y_step); print_info (")\n"); } // Fix invalid multiple arguments if (psize.size () != p_file_indices.size () && psize.size () > 0) for (size_t i = psize.size (); i < p_file_indices.size (); ++i) psize.push_back (1); if (opaque.size () != p_file_indices.size () && opaque.size () > 0) for (size_t i = opaque.size (); i < p_file_indices.size (); ++i) opaque.push_back (1.0); // Create the PCLVisualizer object boost::shared_ptr<pcl::visualization::PCLHistogramVisualizer> ph; // Using min_p, max_p to set the global Y min/max range for the histogram float min_p = FLT_MAX; float max_p = -FLT_MAX; int k = 0, l = 0, viewport = 0; // Load the data files pcl::PCDReader pcd; pcl::console::TicToc tt; ColorHandlerPtr color_handler; GeometryHandlerPtr geometry_handler; // Go through VTK files for (size_t i = 0; i < vtk_file_indices.size (); ++i) { // Load file tt.tic (); print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[vtk_file_indices.at (i)]); vtkPolyDataReader* reader = vtkPolyDataReader::New (); reader->SetFileName (argv[vtk_file_indices.at (i)]); reader->Update (); vtkSmartPointer<vtkPolyData> polydata = reader->GetOutput (); if (!polydata) return (-1); print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", polydata->GetNumberOfPoints ()); print_info (" points]\n"); // Create the PCLVisualizer object here on the first encountered XYZ file if (!p) p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer")); // Multiview enabled? if (mview) { p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport); k++; if (k >= x_s) { k = 0; l++; } } // Add as actor std::stringstream cloud_name ("vtk-"); cloud_name << argv[vtk_file_indices.at (i)] << "-" << i; p->addModelFromPolyData (polydata, cloud_name.str (), viewport); // Change the shape rendered color if (fcolorparam && fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i) p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, fcolor_r[i], fcolor_g[i], fcolor_b[i], cloud_name.str ()); // Change the shape rendered point size if (psize.size () > 0) p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ()); // Change the shape rendered opacity if (opaque.size () > 0) p->setShapeRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ()); } sensor_msgs::PointCloud2::Ptr cloud; // Go through PCD files for (size_t i = 0; i < p_file_indices.size (); ++i) { cloud.reset (new sensor_msgs::PointCloud2); Eigen::Vector4f origin; Eigen::Quaternionf orientation; int version; print_highlight (stderr, "Loading "); print_value (stderr, "%s ", argv[p_file_indices.at (i)]); tt.tic (); if (pcd.read (argv[p_file_indices.at (i)], *cloud, origin, orientation, version) < 0) return (-1); std::stringstream cloud_name; // ---[ Special check for 1-point multi-dimension histograms if (cloud->fields.size () == 1 && isMultiDimensionalFeatureField (cloud->fields[0])) { cloud_name << argv[p_file_indices.at (i)]; if (!ph) ph.reset (new pcl::visualization::PCLHistogramVisualizer); print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", cloud->fields[0].count); print_info (" points]\n"); pcl::getMinMax (*cloud, 0, cloud->fields[0].name, min_p, max_p); ph->addFeatureHistogram (*cloud, cloud->fields[0].name, cloud_name.str ()); continue; } cloud_name << argv[p_file_indices.at (i)] << "-" << i; // Create the PCLVisualizer object here on the first encountered XYZ file if (!p) { p.reset (new pcl::visualization::PCLVisualizer (argc, argv, "PCD viewer")); p->registerPointPickingCallback (&pp_callback, (void*)&cloud); Eigen::Matrix3f rotation; rotation = orientation; p->setCameraPosition (origin [0] , origin [1] , origin [2], origin [0] + rotation (0, 2), origin [1] + rotation (1, 2), origin [2] + rotation (2, 2), rotation (0, 1), rotation (1, 1), rotation (2, 1)); } // Multiview enabled? if (mview) { p->createViewPort (k * x_step, l * y_step, (k + 1) * x_step, (l + 1) * y_step, viewport); k++; if (k >= x_s) { k = 0; l++; } } if (cloud->width * cloud->height == 0) { print_error ("[error: no points found!]\n"); return (-1); } print_info ("[done, "); print_value ("%g", tt.toc ()); print_info (" ms : "); print_value ("%d", (int)cloud->width * cloud->height); print_info (" points]\n"); print_info ("Available dimensions: "); print_value ("%s\n", pcl::getFieldsList (*cloud).c_str ()); // If no color was given, get random colors if (fcolorparam) { if (fcolor_r.size () > i && fcolor_g.size () > i && fcolor_b.size () > i) color_handler.reset (new pcl::visualization::PointCloudColorHandlerCustom<sensor_msgs::PointCloud2> (cloud, fcolor_r[i], fcolor_g[i], fcolor_b[i])); else color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<sensor_msgs::PointCloud2> (cloud)); } else color_handler.reset (new pcl::visualization::PointCloudColorHandlerRandom<sensor_msgs::PointCloud2> (cloud)); // Add the dataset with a XYZ and a random handler geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerXYZ<sensor_msgs::PointCloud2> (cloud)); // Add the cloud to the renderer //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, color_handler, cloud_name.str (), viewport); p->addPointCloud (cloud, geometry_handler, color_handler, origin, orientation, cloud_name.str (), viewport); // If normal lines are enabled if (normals != 0) { int normal_idx = pcl::getFieldIndex (*cloud, "normal_x"); if (normal_idx == -1) { print_error ("Normal information requested but not available.\n"); continue; //return (-1); } // // Convert from blob to pcl::PointCloud pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>); pcl::fromROSMsg (*cloud, *cloud_xyz); cloud_xyz->sensor_origin_ = origin; cloud_xyz->sensor_orientation_ = orientation; pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>); pcl::fromROSMsg (*cloud, *cloud_normals); std::stringstream cloud_name_normals; cloud_name_normals << argv[p_file_indices.at (i)] << "-" << i << "-normals"; p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, normals, normals_scale, cloud_name_normals.str (), viewport); } // If principal curvature lines are enabled if (pc != 0) { if (normals == 0) normals = pc; int normal_idx = pcl::getFieldIndex (*cloud, "normal_x"); if (normal_idx == -1) { print_error ("Normal information requested but not available.\n"); continue; //return (-1); } int pc_idx = pcl::getFieldIndex (*cloud, "principal_curvature_x"); if (pc_idx == -1) { print_error ("Principal Curvature information requested but not available.\n"); continue; //return (-1); } // // Convert from blob to pcl::PointCloud pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>); pcl::fromROSMsg (*cloud, *cloud_xyz); cloud_xyz->sensor_origin_ = origin; cloud_xyz->sensor_orientation_ = orientation; pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>); pcl::fromROSMsg (*cloud, *cloud_normals); pcl::PointCloud<pcl::PrincipalCurvatures>::Ptr cloud_pc (new pcl::PointCloud<pcl::PrincipalCurvatures>); pcl::fromROSMsg (*cloud, *cloud_pc); std::stringstream cloud_name_normals_pc; cloud_name_normals_pc << argv[p_file_indices.at (i)] << "-" << i << "-normals"; int factor = (std::min)(normals, pc); p->addPointCloudNormals<pcl::PointXYZ, pcl::Normal> (cloud_xyz, cloud_normals, factor, normals_scale, cloud_name_normals_pc.str (), viewport); p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_COLOR, 1.0, 0.0, 0.0, cloud_name_normals_pc.str ()); p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ()); cloud_name_normals_pc << "-pc"; p->addPointCloudPrincipalCurvatures (cloud_xyz, cloud_normals, cloud_pc, factor, pc_scale, cloud_name_normals_pc.str (), viewport); p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_LINE_WIDTH, 3, cloud_name_normals_pc.str ()); } // Add every dimension as a possible color if (!fcolorparam) { for (size_t f = 0; f < cloud->fields.size (); ++f) { if (cloud->fields[f].name == "rgb" || cloud->fields[f].name == "rgba") color_handler.reset (new pcl::visualization::PointCloudColorHandlerRGBField<sensor_msgs::PointCloud2> (cloud)); else { if (!isValidFieldName (cloud->fields[f].name)) continue; color_handler.reset (new pcl::visualization::PointCloudColorHandlerGenericField<sensor_msgs::PointCloud2> (cloud, cloud->fields[f].name)); } // Add the cloud to the renderer //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, color_handler, cloud_name.str (), viewport); p->addPointCloud (cloud, color_handler, origin, orientation, cloud_name.str (), viewport); } } // Additionally, add normals as a handler geometry_handler.reset (new pcl::visualization::PointCloudGeometryHandlerSurfaceNormal<sensor_msgs::PointCloud2> (cloud)); if (geometry_handler->isCapable ()) //p->addPointCloud<pcl::PointXYZ> (cloud_xyz, geometry_handler, cloud_name.str (), viewport); p->addPointCloud (cloud, geometry_handler, origin, orientation, cloud_name.str (), viewport); // Set immediate mode rendering ON p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_IMMEDIATE_RENDERING, 1.0, cloud_name.str ()); // Change the cloud rendered point size if (psize.size () > 0) p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, psize.at (i), cloud_name.str ()); // Change the cloud rendered opacity if (opaque.size () > 0) p->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_OPACITY, opaque.at (i), cloud_name.str ()); // Reset camera viewpoint to center of cloud if camera parameters were not passed manually and this is the first loaded cloud //if (i == 0 && !p->cameraParamsSet ()) // p->resetCameraViewpoint (cloud_name.str ()); } //////////////////////////////////////////////////////////////// // Key binding for saving simulated point cloud: if (p) p->registerKeyboardCallback(simulate_callback, (void*)&p); int width = 640; int height = 480; window_width_ = width * 2; window_height_ = height * 2; print_info ("Manually generate a simulated RGB-D point cloud using pcl::simulation. For more information, use: %s -h\n", argv[0]); for (int i=0; i<2048; i++) { float v = i/2048.0; v = powf(v, 3)* 6; t_gamma[i] = v*6*256; } GLenum err = glewInit (); if (GLEW_OK != err) { std::cerr << "Error: " << glewGetErrorString (err) << std::endl; exit (-1); } std::cout << "Status: Using GLEW " << glewGetString (GLEW_VERSION) << std::endl; if (glewIsSupported ("GL_VERSION_2_0")) std::cout << "OpenGL 2.0 supported" << std::endl; else { std::cerr << "Error: OpenGL 2.0 not supported" << std::endl; exit(1); } camera_ = Camera::Ptr (new Camera ()); scene_ = Scene::Ptr (new Scene ()); range_likelihood_ = RangeLikelihood::Ptr (new RangeLikelihood(1, 1, height, width, scene_)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(10, 10, 96, 96, scene_)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_)); // Actually corresponds to default parameters: range_likelihood_->setCameraIntrinsicsParameters (640,480, 576.09757860, 576.09757860, 321.06398107, 242.97676897); range_likelihood_->setComputeOnCPU (false); range_likelihood_->setSumOnCPU (true); range_likelihood_->setUseColor (true); initialize (argc, argv); if (p) p->setBackgroundColor (bcolor[0], bcolor[1], bcolor[2]); // Read axes settings double axes = 0.0; pcl::console::parse_argument (argc, argv, "-ax", axes); if (axes != 0.0 && p) { double ax_x = 0.0, ax_y = 0.0, ax_z = 0.0; pcl::console::parse_3x_arguments (argc, argv, "-ax_pos", ax_x, ax_y, ax_z, false); // Draw XYZ axes if command-line enabled p->addCoordinateSystem (axes, ax_x, ax_y, ax_z); } // Clean up the memory used by the binary blob // Note: avoid resetting the cloud, otherwise the PointPicking callback will fail //cloud.reset (); if (ph) { print_highlight ("Setting the global Y range for all histograms to: "); print_value ("%f -> %f\n", min_p, max_p); ph->setGlobalYRange (min_p, max_p); ph->updateWindowPositions (); if (p) p->spin (); else ph->spin (); } else if (p) p->spin (); }
void capture (Eigen::Isometry3d pose_in, string point_cloud_fname) { // No reference image - but this is kept for compatability with range_test_v2: float* reference = new float[range_likelihood_->getRowHeight() * range_likelihood_->getColWidth()]; const float* depth_buffer = range_likelihood_->getDepthBuffer(); // Copy one image from our last as a reference. for (int i=0, n=0; i<range_likelihood_->getRowHeight(); ++i) { for (int j=0; j<range_likelihood_->getColWidth(); ++j) { reference[n++] = depth_buffer[i*range_likelihood_->getWidth() + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; poses.push_back (pose_in); range_likelihood_->computeLikelihoods (reference, poses, scores); std::cout << "score: "; for (size_t i = 0; i<scores.size (); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; std::cout << "camera: " << camera_->getX () << " " << camera_->getY () << " " << camera_->getZ () << " " << camera_->getRoll () << " " << camera_->getPitch () << " " << camera_->getYaw () << std::endl; delete [] reference; // Benchmark Values for // 27840 triangle faces // 13670 vertices // 45.00Hz: simuation only // 1.28Hz: simuation, addNoise? , getPointCloud, writeASCII // 33.33Hz: simuation, getPointCloud // 23.81Hz: simuation, getPointCloud, writeBinary // 14.28Hz: simuation, addNoise, getPointCloud, writeBinary // MODULE TIME FRACTION // simuation 0.02222 31% // addNoise 0.03 41% // getPointCloud 0.008 11% // writeBinary 0.012 16% // total 0.07222 pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_out (new pcl::PointCloud<pcl::PointXYZRGB>); bool write_cloud = true; if (write_cloud) { // Read Color Buffer from the GPU before creating PointCloud: // By default the buffers are not read back from the GPU range_likelihood_->getColorBuffer (); range_likelihood_->getDepthBuffer (); // Add noise directly to the CPU depth buffer range_likelihood_->addNoise (); // Optional argument to save point cloud in global frame: // Save camera relative: //range_likelihood_->getPointCloud(pc_out); // Save in global frame - applying the camera frame: //range_likelihood_->getPointCloud(pc_out,true,camera_->pose()); // Save in local frame range_likelihood_->getPointCloud (pc_out,false,camera_->getPose ()); // TODO: what to do when there are more than one simulated view? std::cout << pc_out->points.size() << " points written to file\n"; pcl::PCDWriter writer; //writer.write (point_cloud_fname, *pc_out, false); /// ASCII writer.writeBinary (point_cloud_fname, *pc_out); //cout << "finished writing file\n"; } // Disabled all OpenCV stuff for now: dont want the dependency /* bool demo_other_stuff = false; if (demo_other_stuff && write_cloud) { write_score_image (range_likelihood_->getScoreBuffer ()); write_rgb_image (range_likelihood_->getColorBuffer ()); write_depth_image (range_likelihood_->getDepthBuffer ()); // Demo interacton with RangeImage: pcl::RangeImagePlanar rangeImage; range_likelihood_->getRangeImagePlanar (rangeImage); // display viewer: (currently seqfaults on exit of viewer) if (1==0){ boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer; viewer = simpleVis(pc_out); while (!viewer->wasStopped ()){ viewer->spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } } } */ }
int main (int argc, char** argv) { int width = 640; int height = 480; window_width_ = width * 2; window_height_ = height * 2; int cols = 30; int rows = 30; int col_width = 20; int row_height = 15; print_info ("Range likelihood performance tests using pcl::simulation. For more information, use: %s -h\n", argv[0]); if (argc < 2) { printHelp (argc, argv); return (-1); } for (int i = 0; i < 2048; ++i) { float v = static_cast<float> (i / 2048.0); v = powf(v, 3)* 6; t_gamma[i] = static_cast<uint16_t> (v*6*256); } glutInit (&argc, argv); glutInitDisplayMode (GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGB); glutInitWindowPosition (10, 10); glutInitWindowSize (window_width_, window_height_); glutCreateWindow ("OpenGL range likelihood"); GLenum err = glewInit (); if (GLEW_OK != err) { std::cerr << "Error: " << glewGetErrorString (err) << std::endl; exit (-1); } std::cout << "Status: Using GLEW " << glewGetString (GLEW_VERSION) << std::endl; if (glewIsSupported ("GL_VERSION_2_0")) std::cout << "OpenGL 2.0 supported" << std::endl; else { std::cerr << "Error: OpenGL 2.0 not supported" << std::endl; exit (1); } std::cout << "GL_MAX_VIEWPORTS: " << GL_MAX_VIEWPORTS << std::endl; camera_ = Camera::Ptr (new Camera ()); scene_ = Scene::Ptr (new Scene ()); range_likelihood_visualization_ = RangeLikelihood::Ptr (new RangeLikelihood (1, 1, height, width, scene_)); range_likelihood_ = RangeLikelihood::Ptr (new RangeLikelihood (rows, cols, row_height, col_width, scene_)); // Actually corresponds to default parameters: range_likelihood_visualization_->setCameraIntrinsicsParameters ( 640, 480, 576.09757860f, 576.09757860f, 321.06398107f, 242.97676897f); range_likelihood_visualization_->setComputeOnCPU (false); range_likelihood_visualization_->setSumOnCPU (false); range_likelihood_visualization_->setUseColor (true); range_likelihood_->setCameraIntrinsicsParameters ( 640, 480, 576.09757860f, 576.09757860f, 321.06398107f, 242.97676897f); range_likelihood_->setComputeOnCPU (false); range_likelihood_->setSumOnCPU (false); range_likelihood_->setUseColor (false); textured_quad_ = TexturedQuad::Ptr (new TexturedQuad (range_likelihood_->getWidth (), range_likelihood_->getHeight ())); initialize (argc, argv); glutDisplayFunc (display); glutIdleFunc (display); glutKeyboardFunc (on_keyboard); glutMainLoop (); }
void capture (Eigen::Isometry3d pose_in,unsigned short* depth_buffer_mm,const uint8_t* color_buffer)//, string point_cloud_fname) { // No reference image - but this is kept for compatability with range_test_v2: float* reference = new float[range_likelihood_->getRowHeight() * range_likelihood_->getColWidth()]; //const float* depth_buffer = range_likelihood_->getDepthBuffer(); // Copy one image from our last as a reference. /* for (int i=0, n=0; i<range_likelihood_->getRowHeight(); ++i) { for (int j=0; j<range_likelihood_->getColWidth(); ++j) { reference[n++] = 0;//depth_buffer[i*range_likelihood_->getWidth() + j]; } } */ std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; int n = 1; poses.push_back (pose_in); // HACK: mfallon modified computeLikelihoods to only call render() (which is currently private) // need to make render public and use it. // For simulation it is used alone from the reset of range_likelihood_ range_likelihood_->computeLikelihoods (reference, poses, scores); color_buffer =range_likelihood_->getColorBuffer(); const float* db_ptr= range_likelihood_->getDepthBuffer (); range_likelihood_->addNoise (); depthBufferToMM (db_ptr,depth_buffer_mm); // Writing these images is a smaller computation draw relative to KinFu: write_depth_image (db_ptr); //write_depth_image_uint(depth_buffer_mm); write_rgb_image (color_buffer); /* pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_out (new pcl::PointCloud<pcl::PointXYZRGB>); bool write_cloud=true; if (write_cloud) { // Read Color Buffer from the GPU before creating PointCloud: // By default the buffers are not read back from the GPU range_likelihood_->getColorBuffer (); range_likelihood_->getDepthBuffer (); // Add noise directly to the CPU depth buffer range_likelihood_->addNoise (); // Optional argument to save point cloud in global frame: // Save camera relative: //range_likelihood_->getPointCloud(pc_out); // Save in global frame - applying the camera frame: //range_likelihood_->getPointCloud(pc_out,true,camera_->pose()); // Save in local frame range_likelihood_->getPointCloud (pc_out,false,camera_->pose ()); // TODO: what to do when there are more than one simulated view? std::cout << pc_out->points.size() << " points written to file\n"; pcl::PCDWriter writer; //writer.write (point_cloud_fname, *pc_out, false); /// ASCII writer.writeBinary (point_cloud_fname, *pc_out); //cout << "finished writing file\n"; } */ delete [] reference; }
void display() { glViewport(range_likelihood_->width(), 0, range_likelihood_->width(), range_likelihood_->height()); float* reference = new float[range_likelihood_->row_height() * range_likelihood_->col_width()]; const float* depth_buffer = range_likelihood_->depth_buffer(); // Copy one image from our last as a reference. for (int i=0, n=0; i<range_likelihood_->row_height(); ++i) { for (int j=0; j<range_likelihood_->col_width(); ++j) { reference[n++] = depth_buffer[i*range_likelihood_->width() + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; int n = range_likelihood_->rows()*range_likelihood_->cols(); for (int i=0; i<n; ++i) { Camera camera(*camera_); camera.move(0.0,i*0.02,0.0); poses.push_back(camera.pose()); } float* depth_field =NULL; bool do_depth_field =false; range_likelihood_->compute_likelihoods(reference, poses, scores,depth_field,do_depth_field); // range_likelihood_->compute_likelihoods(reference, poses, scores); delete [] reference; delete [] depth_field; std::cout << "score: "; for (size_t i=0; i<scores.size(); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; // Draw the depth image // glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // glColorMask(true, true, true, true); glDisable(GL_DEPTH_TEST); glViewport(0, 0, range_likelihood_->width(), range_likelihood_->height()); //glViewport(0, 0, range_likelihood_->width(), range_likelihood_->height()); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); //glRasterPos2i(-1,-1); //glDrawPixels(range_likelihood_->width(), range_likelihood_->height(), GL_LUMINANCE, GL_FLOAT, range_likelihood_->depth_buffer()); display_depth_image(range_likelihood_->depth_buffer()); glutSwapBuffers(); }
void display_depth_image(const float* depth_buffer) { int npixels = range_likelihood_->width() * range_likelihood_->height(); uint8_t* depth_img = new uint8_t[npixels * 3]; for (int i=0; i<npixels; i++) { float zn = 0.7; float zf = 20.0; float d = depth_buffer[i]; float z = -zf*zn/((zf-zn)*(d - zf/(zf-zn))); float b = 0.075; float f = 580.0; uint16_t kd = static_cast<uint16_t>(1090 - b*f/z*8); if (kd < 0) kd = 0; else if (kd>2047) kd = 2047; int pval = t_gamma[kd]; int lb = pval & 0xff; switch (pval>>8) { case 0: depth_img[3*i+0] = 255; depth_img[3*i+1] = 255-lb; depth_img[3*i+2] = 255-lb; break; case 1: depth_img[3*i+0] = 255; depth_img[3*i+1] = lb; depth_img[3*i+2] = 0; break; case 2: depth_img[3*i+0] = 255-lb; depth_img[3*i+1] = 255; depth_img[3*i+2] = 0; break; case 3: depth_img[3*i+0] = 0; depth_img[3*i+1] = 255; depth_img[3*i+2] = lb; break; case 4: depth_img[3*i+0] = 0; depth_img[3*i+1] = 255-lb; depth_img[3*i+2] = 255; break; case 5: depth_img[3*i+0] = 0; depth_img[3*i+1] = 0; depth_img[3*i+2] = 255-lb; break; default: depth_img[3*i+0] = 0; depth_img[3*i+1] = 0; depth_img[3*i+2] = 0; break; } } // glRasterPos2i(-1,-1); // glDrawPixels(range_likelihood_->width(), range_likelihood_->height(), GL_LUMINANCE, GL_FLOAT, range_likelihood_->depth_buffer()); glRasterPos2i(-1,-1); glDrawPixels(range_likelihood_->width(), range_likelihood_->height(), GL_RGB, GL_UNSIGNED_BYTE, depth_img); delete [] depth_img; }
int main (int argc, char** argv) { int width = 640; int height = 480; window_width_ = width * 2; window_height_ = height * 2; print_info ("Manually generate a simulated RGB-D point cloud using pcl::simulation. For more information, use: %s -h\n", argv[0]); if (argc < 2) { printHelp (argc, argv); return (-1); } int i; for (i=0; i<2048; i++) { float v = i/2048.0; v = powf(v, 3)* 6; t_gamma[i] = v*6*256; } glutInit (&argc, argv); glutInitDisplayMode (GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGB);// was GLUT_RGBA glutInitWindowPosition (10, 10); glutInitWindowSize (window_width_, window_height_); glutCreateWindow ("OpenGL range likelihood"); GLenum err = glewInit (); if (GLEW_OK != err) { std::cerr << "Error: " << glewGetErrorString (err) << std::endl; exit (-1); } std::cout << "Status: Using GLEW " << glewGetString (GLEW_VERSION) << std::endl; if (glewIsSupported ("GL_VERSION_2_0")) std::cout << "OpenGL 2.0 supported" << std::endl; else { std::cerr << "Error: OpenGL 2.0 not supported" << std::endl; exit(1); } std::cout << "GL_MAX_VIEWPORTS: " << GL_MAX_VIEWPORTS << std::endl; camera_ = Camera::Ptr (new Camera ()); scene_ = Scene::Ptr (new Scene ()); //range_likelihood_ = RangeLikelihoodGLSL::Ptr(new RangeLikelihoodGLSL(1, 1, height, width, scene_, 0)); range_likelihood_ = RangeLikelihood::Ptr (new RangeLikelihood (2, 2, height/2, width/2, scene_)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(10, 10, 96, 96, scene_)); // range_likelihood_ = RangeLikelihood::Ptr(new RangeLikelihood(1, 1, 480, 640, scene_)); // Actually corresponds to default parameters: range_likelihood_->setCameraIntrinsicsParameters (640,480, 576.09757860, 576.09757860, 321.06398107, 242.97676897); range_likelihood_->setComputeOnCPU (false); range_likelihood_->setSumOnCPU (true); range_likelihood_->setUseColor (true); initialize (argc, argv); glutReshapeFunc (on_reshape); glutDisplayFunc (display); glutIdleFunc (display); glutKeyboardFunc (on_keyboard); glutMouseFunc (on_mouse); glutMotionFunc (on_motion); glutPassiveMotionFunc (on_passive_motion); glutEntryFunc (on_entry); glutMainLoop (); return 0; }
void display () { float* reference = new float[range_likelihood_->getRowHeight() * range_likelihood_->getColWidth()]; const float* depth_buffer = range_likelihood_->getDepthBuffer(); // Copy one image from our last as a reference. for (int i=0, n=0; i<range_likelihood_->getRowHeight(); ++i) { for (int j=0; j<range_likelihood_->getColWidth(); ++j) { reference[n++] = depth_buffer[i*range_likelihood_->getWidth() + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; int n = range_likelihood_->getRows ()*range_likelihood_->getCols (); for (int i = 0; i < n; ++i) { Camera camera(*camera_); camera.move(0.0,i*0.02,0.0); //camera.move(0.0,i*0.02,0.0); poses.push_back (camera.getPose ()); } range_likelihood_->computeLikelihoods (reference, poses, scores); range_likelihood_->computeLikelihoods (reference, poses, scores); std::cout << "score: "; for (size_t i = 0; i<scores.size (); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; std::cout << "camera: " << camera_->getX () << " " << camera_->getY () << " " << camera_->getZ () << " " << camera_->getRoll () << " " << camera_->getPitch () << " " << camera_->getYaw () << std::endl; delete [] reference; glDrawBuffer (GL_BACK); glReadBuffer (GL_BACK); // Draw the resulting images from the range_likelihood glViewport (range_likelihood_->getWidth (), 0, range_likelihood_->getWidth (), range_likelihood_->getHeight ()); glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); // Draw the color image glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glColorMask (true, true, true, true); glDisable (GL_DEPTH_TEST); glRasterPos2i (-1,-1); glDrawPixels (range_likelihood_->getWidth (), range_likelihood_->getHeight (), GL_RGB, GL_UNSIGNED_BYTE, range_likelihood_->getColorBuffer ()); // Draw the depth image glViewport (0, 0, range_likelihood_->getWidth (), range_likelihood_->getHeight ()); glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); // display_depth_image (range_likelihood_->getDepthBuffer ()); display_depth_image (range_likelihood_->getDepthBuffer (), range_likelihood_->getWidth (), range_likelihood_->getHeight ()); // Draw the score image glViewport (0, range_likelihood_->getHeight (), range_likelihood_->getWidth (), range_likelihood_->getHeight ()); glMatrixMode (GL_PROJECTION); glLoadIdentity (); glMatrixMode (GL_MODELVIEW); glLoadIdentity (); display_score_image (range_likelihood_->getScoreBuffer ()); glutSwapBuffers (); if (write_file_) { range_likelihood_->addNoise (); pcl::RangeImagePlanar rangeImage; range_likelihood_->getRangeImagePlanar (rangeImage); pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_out (new pcl::PointCloud<pcl::PointXYZRGB>); // Optional argument to save point cloud in global frame: // Save camera relative: //range_likelihood_->getPointCloud(pc_out); // Save in global frame - applying the camera frame: //range_likelihood_->getPointCloud(pc_out,true,camera_->pose()); // Save in local frame range_likelihood_->getPointCloud (pc_out,false,camera_->getPose ()); // TODO: what to do when there are more than one simulated view? pcl::PCDWriter writer; writer.write ("simulated_range_image.pcd", *pc_out, false); cout << "finished writing file\n"; // pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer"); // viewer.showCloud (pc_out); boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer; viewer = simpleVis(pc_out); while (!viewer->wasStopped ()) { viewer->spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } // doesnt work: // viewer->~PCLVisualizer(); // viewer.reset(); cout << "done\n"; // Problem: vtk and opengl dont seem to play very well together // vtk seems to misbehave after a little while and wont keep the window on the screen // method1: kill with [x] - but eventually it crashes: //while (!viewer.wasStopped ()){ //} // method2: eventually starts ignoring cin and pops up on screen and closes almost // immediately // cout << "enter 1 to cont\n"; // cin >> pause; // viewer.wasStopped (); // method 3: if you interact with the window with keys, the window is not closed properly // TODO: use pcl methods as this time stuff is probably not cross playform // struct timespec t; // t.tv_sec = 100; // //t.tv_nsec = (time_t)(20000000); // short sleep // t.tv_nsec = (time_t)(0); // long sleep - normal speed // nanosleep (&t, NULL); write_file_ = 0; } }
void execute (int argc, char** argv, std::string plyfile) { PtrStepSz<const unsigned short> depth; PtrStepSz<const KinfuTracker::PixelRGB> rgb24; int time_ms = 0; bool has_image = false; // Create simulation environment: int width = 640; int height = 480; for (int i=0; i<2048; i++) { float v = i/2048.0; v = powf(v, 3)* 6; t_gamma[i] = v*6*256; } glutInit (&argc, argv); glutInitDisplayMode (GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGB);// was GLUT_RGBA glutInitWindowPosition (10, 10); glutInitWindowSize (10, 10); //glutInitWindowSize (window_width_, window_height_); glutCreateWindow ("OpenGL range likelihood"); GLenum err = glewInit (); if (GLEW_OK != err) { std::cerr << "Error: " << glewGetErrorString (err) << std::endl; exit (-1); } std::cout << "Status: Using GLEW " << glewGetString (GLEW_VERSION) << std::endl; if (glewIsSupported ("GL_VERSION_2_0")) std::cout << "OpenGL 2.0 supported" << std::endl; else { std::cerr << "Error: OpenGL 2.0 not supported" << std::endl; exit(1); } std::cout << "GL_MAX_VIEWPORTS: " << GL_MAX_VIEWPORTS << std::endl; camera_ = Camera::Ptr (new Camera ()); scene_ = Scene::Ptr (new Scene ()); range_likelihood_ = RangeLikelihood::Ptr (new RangeLikelihood (1, 1, height, width, scene_)); // Actually corresponds to default parameters: range_likelihood_->setCameraIntrinsicsParameters (640,480, 576.09757860, 576.09757860, 321.06398107, 242.97676897); range_likelihood_->setComputeOnCPU (false); range_likelihood_->setSumOnCPU (true); range_likelihood_->setUseColor (true); camera_->set(0.471703, 1.59862, 3.10937, 0, 0.418879, -12.2129); camera_->set_pitch(0.418879); // not sure why this is here: cout << "About to read: "<< plyfile << endl; load_PolygonMesh_model (plyfile); // Generate a series of poses: std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; Eigen::Vector3d focus_center(0,0,1.3); // double halo_r = 4.0; double halo_r = 1.5; double halo_dz = 1.5; // was 2; // 20 is too quick when adding noise: // 50 is ok though int n_poses=50; int n_pose_stop = 10; // above means make a circle of 50 poses, stop after the 10th i.e. 1/5 of a halo ring: generate_halo(poses,focus_center,halo_r,halo_dz,n_poses); unsigned short * disparity_buf_ = new unsigned short[width*height ]; const KinfuTracker::PixelRGB* color_buf_; const uint8_t* color_buf_uint; // loop though and create the mesh: for (int i = 0; !exit_; ++i) { vector<double> tic_toc; tic_toc.push_back(getTime()); double tic_main = getTime(); Eigen::Vector3d t(poses[i].translation()); Eigen::Quaterniond r(poses[i].rotation()); std::stringstream ss; ss << t[0]<<", "<<t[1]<<", "<<t[2]<<" | " <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() ; std::cout << i << ": " << ss.str() << " pose_simulatedposition\n"; capture (poses[i],disparity_buf_, color_buf_uint);//,ss.str()); color_buf_ = (const KinfuTracker::PixelRGB*) color_buf_uint; PtrStepSz<const unsigned short> depth_sim = PtrStepSz<const unsigned short>(height, width, disparity_buf_, 2*width); //cout << depth_sim.rows << " by " << depth_sim.cols << " | s: " << depth_sim.step << "\n"; // RGB-KinFu currently disabled for now - problems with color in KinFu apparently // but this constructor might not be right either: not sure about step size integrate_colors_=false; PtrStepSz<const KinfuTracker::PixelRGB> rgb24_sim = PtrStepSz<const KinfuTracker::PixelRGB>(height, width, color_buf_, width); tic_toc.push_back (getTime ()); if (1==0){ // live capture - probably doesnt work anymore, left in here for comparison: bool has_frame = evaluation_ptr_ ? evaluation_ptr_->grab(i, depth) : capture_.grab (depth, rgb24); if (!has_frame) { cout << "Can't grab" << endl; break; } depth_device_.upload (depth.data, depth.step, depth.rows, depth.cols); if (integrate_colors_) image_view_.colors_device_.upload (rgb24.data, rgb24.step, rgb24.rows, rgb24.cols); { SampledScopeTime fps(time_ms, i); //run kinfu algorithm if (integrate_colors_) has_image = kinfu_ (depth_device_, image_view_.colors_device_); else has_image = kinfu_ (depth_device_); } }else{ //simulate: cout << " color: " << integrate_colors_ << "\n"; // integrate_colors_ seems to be zero depth_device_.upload (depth_sim.data, depth_sim.step, depth_sim.rows, depth_sim.cols); if (integrate_colors_){ image_view_.colors_device_.upload (rgb24_sim.data, rgb24_sim.step, rgb24_sim.rows, rgb24_sim.cols); } tic_toc.push_back (getTime ()); { SampledScopeTime fps(time_ms, i); //run kinfu algorithm if (integrate_colors_) has_image = kinfu_ (depth_device_, image_view_.colors_device_); else has_image = kinfu_ (depth_device_); } } tic_toc.push_back (getTime ()); Eigen::Affine3f k_aff = kinfu_.getCameraPose(); Eigen::Matrix3f k_m; k_m =k_aff.rotation(); Eigen::Quaternionf k_r; k_r = Eigen::Quaternionf(k_m); std::stringstream ss_k; ss_k << k_aff(0,3) <<", "<< k_aff(1,3)<<", "<< k_aff(2,3)<<" | " <<k_r.w()<<", "<<k_r.x()<<", "<<k_r.y()<<", "<<k_r.z() ; std::cout << i << ": " << ss_k.str() << " pose_kinect\n"; // Everything below this is Visualization or I/O: if (i >n_pose_stop){ int pause; cout << "Enter a key to write Mesh file\n"; cin >> pause; scene_cloud_view_.showMesh(kinfu_, integrate_colors_); writeMesh(KinFuApp::MESH_VTK); // writeMesh(KinFuApp::MESH_PLY); if (scan_) { scan_ = false; scene_cloud_view_.show (kinfu_, integrate_colors_); if (scan_volume_) { // download tsdf volume { ScopeTimeT time ("tsdf volume download"); cout << "Downloading TSDF volume from device ... " << flush; // kinfu_.volume().downloadTsdfAndWeighs (tsdf_volume_.volumeWriteable (), tsdf_volume_.weightsWriteable ()); kinfu_.volume ().downloadTsdfAndWeighsLocal (); // tsdf_volume_.setHeader (Eigen::Vector3i (pcl::device::VOLUME_X, pcl::device::VOLUME_Y, pcl::device::VOLUME_Z), kinfu_.volume().getSize ()); kinfu_.volume ().setHeader (Eigen::Vector3i (pcl::device::VOLUME_X, pcl::device::VOLUME_Y, pcl::device::VOLUME_Z), kinfu_.volume().getSize ()); // cout << "done [" << tsdf_volume_.size () << " voxels]" << endl << endl; cout << "done [" << kinfu_.volume ().size () << " voxels]" << endl << endl; } { ScopeTimeT time ("converting"); cout << "Converting volume to TSDF cloud ... " << flush; // tsdf_volume_.convertToTsdfCloud (tsdf_cloud_ptr_); kinfu_.volume ().convertToTsdfCloud (tsdf_cloud_ptr_); cout << "done [" << tsdf_cloud_ptr_->size () << " points]" << endl << endl; } } else cout << "[!] tsdf volume download is disabled" << endl << endl; } if (scan_mesh_) { scan_mesh_ = false; scene_cloud_view_.showMesh(kinfu_, integrate_colors_); } if (has_image) { Eigen::Affine3f viewer_pose = getViewerPose(scene_cloud_view_.cloud_viewer_); // image_view_.showScene (kinfu_, rgb24, registration_, independent_camera_ ? &viewer_pose : 0); image_view_.showScene (kinfu_, rgb24_sim, registration_, independent_camera_ ? &viewer_pose : 0); } if (current_frame_cloud_view_) current_frame_cloud_view_->show (kinfu_); image_view_.showDepth (depth_sim); //image_view_.showDepth (depth); // image_view_.showGeneratedDepth(kinfu_, kinfu_.getCameraPose()); if (!independent_camera_) setViewerPose (scene_cloud_view_.cloud_viewer_, kinfu_.getCameraPose()); scene_cloud_view_.cloud_viewer_.spinOnce (3); // As of April 2012, entering a key will end this program... cout << "Paused after view\n"; cin >> pause; } double elapsed = (getTime() -tic_main); cout << elapsed << " sec elapsed [" << (1/elapsed) << "]\n"; tic_toc.push_back (getTime ()); display_tic_toc (tic_toc, "kinfu_app_sim"); }
void display() { glViewport(range_likelihood_->width(), 0, range_likelihood_->width(), range_likelihood_->height()); float* reference = new float[range_likelihood_->row_height() * range_likelihood_->col_width()]; const float* depth_buffer = range_likelihood_->depth_buffer(); // Copy one image from our last as a reference. for (int i=0, n=0; i<range_likelihood_->row_height(); ++i) { for (int j=0; j<range_likelihood_->col_width(); ++j) { reference[n++] = depth_buffer[i*range_likelihood_->width() + j]; } } std::vector<Eigen::Isometry3d, Eigen::aligned_allocator<Eigen::Isometry3d> > poses; std::vector<float> scores; int n = range_likelihood_->rows()*range_likelihood_->cols(); for (int i=0; i<n; ++i) { // This is used when there is Camera camera(*camera_); camera.move(0.0,0.0,0.0); //camera.move(0.0,i*0.02,0.0); poses.push_back(camera.pose()); } float* depth_field =NULL; bool do_depth_field =false; range_likelihood_->compute_likelihoods(reference, poses, scores,depth_field,do_depth_field); // range_likelihood_->compute_likelihoods(reference, poses, scores); delete [] reference; delete [] depth_field; std::cout << "score: "; for (size_t i=0; i<scores.size(); ++i) { std::cout << " " << scores[i]; } std::cout << std::endl; // Draw the depth image // glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // glColorMask(true, true, true, true); glDisable(GL_DEPTH_TEST); glViewport(0, 0, range_likelihood_->width(), range_likelihood_->height()); //glViewport(0, 0, range_likelihood_->width(), range_likelihood_->height()); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); //glRasterPos2i(-1,-1); //glDrawPixels(range_likelihood_->width(), range_likelihood_->height(), GL_LUMINANCE, GL_FLOAT, range_likelihood_->depth_buffer()); display_depth_image(range_likelihood_->depth_buffer()); glutSwapBuffers(); if (write_file_){ range_likelihood_->addNoise(); pcl::RangeImagePlanar rangeImage; range_likelihood_->getRangeImagePlanar(rangeImage); pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc_out (new pcl::PointCloud<pcl::PointXYZRGB>); // Optional argument to save point cloud in global frame: // Save camera relative: //range_likelihood_->getPointCloud(pc_out); // Save in global frame - applying the camera frame: //range_likelihood_->getPointCloud(pc_out,true,camera_->pose()); // Save in local frame range_likelihood_->getPointCloud(pc_out,false,camera_->pose()); // TODO: what to do when there are more than one simulated view? pcl::PCDWriter writer; writer.write ("simulated_range_image.pcd", *pc_out, false); cout << "finished writing file\n"; pcl::visualization::CloudViewer viewer ("Simple Cloud Viewer"); viewer.showCloud (pc_out); // Problem: vtk and opengl dont seem to play very well together // vtk seems to misbehavew after a little while and wont keep the window on the screen // method1: kill with [x] - but eventually it crashes: //while (!viewer.wasStopped ()){ //} // method2: eventually starts ignoring cin and pops up on screen and closes almost // immediately // cout << "enter 1 to cont\n"; // cin >> pause; // viewer.wasStopped (); // method 3: if you interact with the window with keys, the window is not closed properly // TODO: use pcl methods as this time stuff is probably not cross playform struct timespec t; t.tv_sec = 100; //t.tv_nsec = (time_t)(20000000); // short sleep t.tv_nsec = (time_t)(0); // long sleep - normal speed nanosleep(&t, NULL); write_file_ =0; } }