int main (int argc, char** argv) { if (argc < 2) { std::cerr << "Usage: " << argv[0] << " [PCD file] [-s smoothness] [-c curvature] [-n neighbors]" << std::endl; return 1; } const char *filename = argv[1]; double smoothness = 7.0 / 180.0 * M_PI; double curvature = 1.0; int neighbors = 30; for (int i=2; i<argc; i++) { if (strcmp(argv[i], "-s")==0) { smoothness = atof(argv[++i])/180.0*M_PI; } else if(strcmp(argv[i], "-c")==0) { curvature = atof(argv[++i]); } else if(strcmp(argv[i], "-n")==0) { neighbors = atoi(argv[++i]); } } std::cout << "smoothness = " << smoothness*180.0/M_PI << "[deg]" << std::endl; std::cout << "curvature = " << curvature << std::endl; std::cout << "neighbors = " << neighbors << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); pcl::PCDReader reader; reader.read (filename, *cloud); int npoint = cloud->points.size(); std::cout << "npoint = " << npoint << std::endl; pcl::search::Search<pcl::PointXYZ>::Ptr tree = boost::shared_ptr<pcl::search::Search<pcl::PointXYZ> > (new pcl::search::KdTree<pcl::PointXYZ>); pcl::PointCloud <pcl::Normal>::Ptr normals (new pcl::PointCloud <pcl::Normal>); pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normal_estimator; normal_estimator.setSearchMethod (tree); normal_estimator.setInputCloud (cloud); normal_estimator.setKSearch (50); normal_estimator.compute (*normals); pcl::IndicesPtr indices (new std::vector <int>); pcl::PassThrough<pcl::PointXYZ> pass; pass.setInputCloud (cloud); pass.setFilterFieldName ("z"); pass.setFilterLimits (0.0, 1.0); pass.filter (*indices); pcl::RegionGrowing<pcl::PointXYZ, pcl::Normal> reg; reg.setMinClusterSize (100); reg.setMaxClusterSize (10000); reg.setSearchMethod (tree); reg.setNumberOfNeighbours (neighbors); reg.setInputCloud (cloud); //reg.setIndices (indices); reg.setInputNormals (normals); reg.setSmoothnessThreshold (smoothness); reg.setCurvatureThreshold (curvature); std::vector <pcl::PointIndices> clusters; reg.extract (clusters); std::cout << "Number of clusters is equal to " << clusters.size () << std::endl; std::cout << "First cluster has " << clusters[0].indices.size () << " points." << endl; std::cout << "These are the indices of the points of the initial" << std::endl << "cloud that belong to the first cluster:" << std::endl; int counter = 0; while (counter < 5 || counter > clusters[0].indices.size ()) { std::cout << clusters[0].indices[counter] << std::endl; counter++; } pcl::PointCloud <pcl::PointXYZRGB>::Ptr colored_cloud = reg.getColoredCloud (); pcl::visualization::CloudViewer viewer ("Cluster viewer"); viewer.showCloud(colored_cloud); while (!viewer.wasStopped ()) { } return (0); }
// -------------- // -----Main----- // -------------- int main (int argc, char** argv) { // -------------------------------------- // -----Parse Command Line Arguments----- // -------------------------------------- if (pcl::console::find_argument (argc, argv, "-h") >= 0) { printUsage (argv[0]); return 0; } if (pcl::console::find_argument (argc, argv, "-m") >= 0) { setUnseenToMaxRange = true; cout << "Setting unseen values in range image to maximum range readings.\n"; } if (pcl::console::parse (argc, argv, "-o", rotation_invariant) >= 0) cout << "Switching rotation invariant feature version "<< (rotation_invariant ? "on" : "off")<<".\n"; int tmp_coordinate_frame; if (pcl::console::parse (argc, argv, "-c", tmp_coordinate_frame) >= 0) { coordinate_frame = pcl::RangeImage::CoordinateFrame (tmp_coordinate_frame); cout << "Using coordinate frame "<< (int)coordinate_frame<<".\n"; } if (pcl::console::parse (argc, argv, "-s", support_size) >= 0) cout << "Setting support size to "<<support_size<<".\n"; if (pcl::console::parse (argc, argv, "-r", angular_resolution) >= 0) cout << "Setting angular resolution to "<<angular_resolution<<"deg.\n"; angular_resolution = pcl::deg2rad (angular_resolution); // ------------------------------------------------------------------ // -----Read pcd file or create example point cloud if not given----- // ------------------------------------------------------------------ pcl::PointCloud<PointType>::Ptr point_cloud_ptr (new pcl::PointCloud<PointType>); pcl::PointCloud<PointType>& point_cloud = *point_cloud_ptr; pcl::PointCloud<pcl::PointWithViewpoint> far_ranges; Eigen::Affine3f scene_sensor_pose (Eigen::Affine3f::Identity ()); std::vector<int> pcd_filename_indices = pcl::console::parse_file_extension_argument (argc, argv, "pcd"); if (!pcd_filename_indices.empty ()) { std::string filename = argv[pcd_filename_indices[0]]; if (pcl::io::loadPCDFile (filename, point_cloud) == -1) { cerr << "Was not able to open file \""<<filename<<"\".\n"; printUsage (argv[0]); return 0; } scene_sensor_pose = Eigen::Affine3f (Eigen::Translation3f (point_cloud.sensor_origin_[0], point_cloud.sensor_origin_[1], point_cloud.sensor_origin_[2])) * Eigen::Affine3f (point_cloud.sensor_orientation_); std::string far_ranges_filename = pcl::getFilenameWithoutExtension (filename)+"_far_ranges.pcd"; if (pcl::io::loadPCDFile (far_ranges_filename.c_str (), far_ranges) == -1) std::cout << "Far ranges file \""<<far_ranges_filename<<"\" does not exists.\n"; } else { setUnseenToMaxRange = true; cout << "\nNo *.pcd file given => Genarating example point cloud.\n\n"; for (float x=-0.5f; x<=0.5f; x+=0.01f) { for (float y=-0.5f; y<=0.5f; y+=0.01f) { PointType point; point.x = x; point.y = y; point.z = 2.0f - y; point_cloud.points.push_back (point); } } point_cloud.width = (int) point_cloud.points.size (); point_cloud.height = 1; } // ----------------------------------------------- // -----Create RangeImage from the PointCloud----- // ----------------------------------------------- float noise_level = 0.0; float min_range = 0.0f; int border_size = 1; boost::shared_ptr<pcl::RangeImage> range_image_ptr (new pcl::RangeImage); pcl::RangeImage& range_image = *range_image_ptr; range_image.createFromPointCloud (point_cloud, angular_resolution, pcl::deg2rad (360.0f), pcl::deg2rad (180.0f), scene_sensor_pose, coordinate_frame, noise_level, min_range, border_size); range_image.integrateFarRanges (far_ranges); if (setUnseenToMaxRange) range_image.setUnseenToMaxRange (); // -------------------------------------------- // -----Open 3D viewer and add point cloud----- // -------------------------------------------- pcl::visualization::PCLVisualizer viewer ("3D Viewer"); viewer.setBackgroundColor (1, 1, 1); pcl::visualization::PointCloudColorHandlerCustom<pcl::PointWithRange> range_image_color_handler (range_image_ptr, 0, 0, 0); viewer.addPointCloud (range_image_ptr, range_image_color_handler, "range image"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "range image"); //viewer.addCoordinateSystem (1.0f); //PointCloudColorHandlerCustom<PointType> point_cloud_color_handler (point_cloud_ptr, 150, 150, 150); //viewer.addPointCloud (point_cloud_ptr, point_cloud_color_handler, "original point cloud"); viewer.initCameraParameters (); setViewerPose (viewer, range_image.getTransformationToWorldSystem ()); // -------------------------- // -----Show range image----- // -------------------------- pcl::visualization::RangeImageVisualizer range_image_widget ("Range image"); range_image_widget.showRangeImage (range_image); // -------------------------------- // -----Extract NARF keypoints----- // -------------------------------- pcl::RangeImageBorderExtractor range_image_border_extractor; pcl::NarfKeypoint narf_keypoint_detector; narf_keypoint_detector.setRangeImageBorderExtractor (&range_image_border_extractor); narf_keypoint_detector.setRangeImage (&range_image); narf_keypoint_detector.getParameters ().support_size = support_size; pcl::PointCloud<int> keypoint_indices; narf_keypoint_detector.compute (keypoint_indices); std::cout << "Found "<<keypoint_indices.points.size ()<<" key points.\n"; // ---------------------------------------------- // -----Show keypoints in range image widget----- // ---------------------------------------------- //for (size_t i=0; i<keypoint_indices.points.size (); ++i) //range_image_widget.markPoint (keypoint_indices.points[i]%range_image.width, //keypoint_indices.points[i]/range_image.width); // ------------------------------------- // -----Show keypoints in 3D viewer----- // ------------------------------------- pcl::PointCloud<pcl::PointXYZ>::Ptr keypoints_ptr (new pcl::PointCloud<pcl::PointXYZ>); pcl::PointCloud<pcl::PointXYZ>& keypoints = *keypoints_ptr; keypoints.points.resize (keypoint_indices.points.size ()); for (size_t i=0; i<keypoint_indices.points.size (); ++i) keypoints.points[i].getVector3fMap () = range_image.points[keypoint_indices.points[i]].getVector3fMap (); pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> keypoints_color_handler (keypoints_ptr, 0, 255, 0); viewer.addPointCloud<pcl::PointXYZ> (keypoints_ptr, keypoints_color_handler, "keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 7, "keypoints"); // ------------------------------------------------------ // -----Extract NARF descriptors for interest points----- // ------------------------------------------------------ std::vector<int> keypoint_indices2; keypoint_indices2.resize (keypoint_indices.points.size ()); for (unsigned int i=0; i<keypoint_indices.size (); ++i) // This step is necessary to get the right vector type keypoint_indices2[i]=keypoint_indices.points[i]; pcl::NarfDescriptor narf_descriptor (&range_image, &keypoint_indices2); narf_descriptor.getParameters ().support_size = support_size; narf_descriptor.getParameters ().rotation_invariant = rotation_invariant; pcl::PointCloud<pcl::Narf36> narf_descriptors; narf_descriptor.compute (narf_descriptors); cout << "Extracted "<<narf_descriptors.size ()<<" descriptors for " <<keypoint_indices.points.size ()<< " keypoints.\n"; //-------------------- // -----Main loop----- //-------------------- while (!viewer.wasStopped ()) { range_image_widget.spinOnce (); // process GUI events viewer.spinOnce (); pcl_sleep(0.01); } }
int main (int argc, char *argv[]) { parseCommandLine (argc, argv); pcl::PointCloud<PointType>::Ptr model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr model_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr scene (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr scene_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<NormalType>::Ptr model_normals (new pcl::PointCloud<NormalType> ()); pcl::PointCloud<NormalType>::Ptr scene_normals (new pcl::PointCloud<NormalType> ()); pcl::PointCloud<DescriptorType>::Ptr model_descriptors (new pcl::PointCloud<DescriptorType> ()); pcl::PointCloud<DescriptorType>::Ptr scene_descriptors (new pcl::PointCloud<DescriptorType> ()); /** * Load Clouds */ if (pcl::io::loadPCDFile (model_filename_, *model) < 0) { std::cout << "Error loading model cloud." << std::endl; showHelp (argv[0]); return (-1); } if (pcl::io::loadPCDFile (scene_filename_, *scene) < 0) { std::cout << "Error loading scene cloud." << std::endl; showHelp (argv[0]); return (-1); } /** * Compute Normals */ pcl::NormalEstimationOMP<PointType, NormalType> norm_est; norm_est.setKSearch (10); norm_est.setInputCloud (model); norm_est.compute (*model_normals); norm_est.setInputCloud (scene); norm_est.compute (*scene_normals); /** * Downsample Clouds to Extract keypoints */ pcl::UniformSampling<PointType> uniform_sampling; uniform_sampling.setInputCloud (model); uniform_sampling.setRadiusSearch (model_ss_); uniform_sampling.filter (*model_keypoints); std::cout << "Model total points: " << model->size () << "; Selected Keypoints: " << model_keypoints->size () << std::endl; uniform_sampling.setInputCloud (scene); uniform_sampling.setRadiusSearch (scene_ss_); uniform_sampling.filter (*scene_keypoints); std::cout << "Scene total points: " << scene->size () << "; Selected Keypoints: " << scene_keypoints->size () << std::endl; /** * Compute Descriptor for keypoints */ pcl::SHOTEstimationOMP<PointType, NormalType, DescriptorType> descr_est; descr_est.setRadiusSearch (descr_rad_); descr_est.setInputCloud (model_keypoints); descr_est.setInputNormals (model_normals); descr_est.setSearchSurface (model); descr_est.compute (*model_descriptors); descr_est.setInputCloud (scene_keypoints); descr_est.setInputNormals (scene_normals); descr_est.setSearchSurface (scene); descr_est.compute (*scene_descriptors); /** * Find Model-Scene Correspondences with KdTree */ pcl::CorrespondencesPtr model_scene_corrs (new pcl::Correspondences ()); pcl::KdTreeFLANN<DescriptorType> match_search; match_search.setInputCloud (model_descriptors); std::vector<int> model_good_keypoints_indices; std::vector<int> scene_good_keypoints_indices; for (size_t i = 0; i < scene_descriptors->size (); ++i) { std::vector<int> neigh_indices (1); std::vector<float> neigh_sqr_dists (1); if (!pcl_isfinite (scene_descriptors->at (i).descriptor[0])) //skipping NaNs { continue; } int found_neighs = match_search.nearestKSearch (scene_descriptors->at (i), 1, neigh_indices, neigh_sqr_dists); if (found_neighs == 1 && neigh_sqr_dists[0] < 0.25f) { pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]); model_scene_corrs->push_back (corr); model_good_keypoints_indices.push_back (corr.index_query); scene_good_keypoints_indices.push_back (corr.index_match); } } pcl::PointCloud<PointType>::Ptr model_good_kp (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr scene_good_kp (new pcl::PointCloud<PointType> ()); pcl::copyPointCloud (*model_keypoints, model_good_keypoints_indices, *model_good_kp); pcl::copyPointCloud (*scene_keypoints, scene_good_keypoints_indices, *scene_good_kp); std::cout << "Correspondences found: " << model_scene_corrs->size () << std::endl; /** * Clustering */ std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > rototranslations; std::vector < pcl::Correspondences > clustered_corrs; if (use_hough_) { pcl::PointCloud<RFType>::Ptr model_rf (new pcl::PointCloud<RFType> ()); pcl::PointCloud<RFType>::Ptr scene_rf (new pcl::PointCloud<RFType> ()); pcl::BOARDLocalReferenceFrameEstimation<PointType, NormalType, RFType> rf_est; rf_est.setFindHoles (true); rf_est.setRadiusSearch (rf_rad_); rf_est.setInputCloud (model_keypoints); rf_est.setInputNormals (model_normals); rf_est.setSearchSurface (model); rf_est.compute (*model_rf); rf_est.setInputCloud (scene_keypoints); rf_est.setInputNormals (scene_normals); rf_est.setSearchSurface (scene); rf_est.compute (*scene_rf); // Clustering pcl::Hough3DGrouping<PointType, PointType, RFType, RFType> clusterer; clusterer.setHoughBinSize (cg_size_); clusterer.setHoughThreshold (cg_thresh_); clusterer.setUseInterpolation (true); clusterer.setUseDistanceWeight (false); clusterer.setInputCloud (model_keypoints); clusterer.setInputRf (model_rf); clusterer.setSceneCloud (scene_keypoints); clusterer.setSceneRf (scene_rf); clusterer.setModelSceneCorrespondences (model_scene_corrs); clusterer.recognize (rototranslations, clustered_corrs); } else { pcl::GeometricConsistencyGrouping<PointType, PointType> gc_clusterer; gc_clusterer.setGCSize (cg_size_); gc_clusterer.setGCThreshold (cg_thresh_); gc_clusterer.setInputCloud (model_keypoints); gc_clusterer.setSceneCloud (scene_keypoints); gc_clusterer.setModelSceneCorrespondences (model_scene_corrs); gc_clusterer.recognize (rototranslations, clustered_corrs); } /** * Stop if no instances */ if (rototranslations.size () <= 0) { cout << "*** No instances found! ***" << endl; return (0); } else { cout << "Recognized Instances: " << rototranslations.size () << endl << endl; } /** * Generates clouds for each instances found */ std::vector<pcl::PointCloud<PointType>::ConstPtr> instances; for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::PointCloud<PointType>::Ptr rotated_model (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *rotated_model, rototranslations[i]); instances.push_back (rotated_model); } /** * ICP */ std::vector<pcl::PointCloud<PointType>::ConstPtr> registered_instances; if (true) { cout << "--- ICP ---------" << endl; for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::IterativeClosestPoint<PointType, PointType> icp; icp.setMaximumIterations (icp_max_iter_); icp.setMaxCorrespondenceDistance (icp_corr_distance_); icp.setInputTarget (scene); icp.setInputSource (instances[i]); pcl::PointCloud<PointType>::Ptr registered (new pcl::PointCloud<PointType>); icp.align (*registered); registered_instances.push_back (registered); cout << "Instance " << i << " "; if (icp.hasConverged ()) { cout << "Aligned!" << endl; } else { cout << "Not Aligned!" << endl; } } cout << "-----------------" << endl << endl; } /** * Hypothesis Verification */ cout << "--- Hypotheses Verification ---" << endl; std::vector<bool> hypotheses_mask; // Mask Vector to identify positive hypotheses pcl::GlobalHypothesesVerification<PointType, PointType> GoHv; GoHv.setSceneCloud (scene); // Scene Cloud GoHv.addModels (registered_instances, true); //Models to verify GoHv.setInlierThreshold (hv_inlier_th_); GoHv.setOcclusionThreshold (hv_occlusion_th_); GoHv.setRegularizer (hv_regularizer_); GoHv.setRadiusClutter (hv_rad_clutter_); GoHv.setClutterRegularizer (hv_clutter_reg_); GoHv.setDetectClutter (hv_detect_clutter_); GoHv.setRadiusNormals (hv_rad_normals_); GoHv.verify (); GoHv.getMask (hypotheses_mask); // i-element TRUE if hvModels[i] verifies hypotheses for (int i = 0; i < hypotheses_mask.size (); i++) { if (hypotheses_mask[i]) { cout << "Instance " << i << " is GOOD! <---" << endl; } else { cout << "Instance " << i << " is bad!" << endl; } } cout << "-------------------------------" << endl; /** * Visualization */ pcl::visualization::PCLVisualizer viewer ("Hypotheses Verification"); viewer.addPointCloud (scene, "scene_cloud"); pcl::PointCloud<PointType>::Ptr off_scene_model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_scene_model_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_model_good_kp (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *off_scene_model, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_keypoints, *off_scene_model_keypoints, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_good_kp, *off_model_good_kp, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); if (show_keypoints_) { CloudStyle modelStyle = style_white; pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_color_handler (off_scene_model, modelStyle.r, modelStyle.g, modelStyle.b); viewer.addPointCloud (off_scene_model, off_scene_model_color_handler, "off_scene_model"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, modelStyle.size, "off_scene_model"); } if (show_keypoints_) { CloudStyle goodKeypointStyle = style_violet; pcl::visualization::PointCloudColorHandlerCustom<PointType> model_good_keypoints_color_handler (off_model_good_kp, goodKeypointStyle.r, goodKeypointStyle.g, goodKeypointStyle.b); viewer.addPointCloud (off_model_good_kp, model_good_keypoints_color_handler, "model_good_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, goodKeypointStyle.size, "model_good_keypoints"); pcl::visualization::PointCloudColorHandlerCustom<PointType> scene_good_keypoints_color_handler (scene_good_kp, goodKeypointStyle.r, goodKeypointStyle.g, goodKeypointStyle.b); viewer.addPointCloud (scene_good_kp, scene_good_keypoints_color_handler, "scene_good_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, goodKeypointStyle.size, "scene_good_keypoints"); } for (size_t i = 0; i < instances.size (); ++i) { std::stringstream ss_instance; ss_instance << "instance_" << i; CloudStyle clusterStyle = style_red; pcl::visualization::PointCloudColorHandlerCustom<PointType> instance_color_handler (instances[i], clusterStyle.r, clusterStyle.g, clusterStyle.b); viewer.addPointCloud (instances[i], instance_color_handler, ss_instance.str ()); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, clusterStyle.size, ss_instance.str ()); CloudStyle registeredStyles = hypotheses_mask[i] ? style_green : style_cyan; ss_instance << "_registered" << endl; pcl::visualization::PointCloudColorHandlerCustom<PointType> registered_instance_color_handler (registered_instances[i], registeredStyles.r, registeredStyles.g, registeredStyles.b); viewer.addPointCloud (registered_instances[i], registered_instance_color_handler, ss_instance.str ()); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, registeredStyles.size, ss_instance.str ()); } while (!viewer.wasStopped ()) { viewer.spinOnce (); } return (0); }
int main( int argc, char** argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments( &argc, argv ); arguments.getApplicationUsage()->setApplicationName( arguments.getApplicationName() ); arguments.getApplicationUsage()->setDescription( arguments.getApplicationName() + " is the standard OpenSceneGraph example which loads and visualises 3d models." ); arguments.getApplicationUsage()->setCommandLineUsage( arguments.getApplicationName() + " [options] filename ..." ); arguments.getApplicationUsage()->addCommandLineOption( "--image <filename>", "Load an image and render it on a quad" ); arguments.getApplicationUsage()->addCommandLineOption( "--dem <filename>", "Load an image/DEM and render it on a HeightField" ); arguments.getApplicationUsage()->addCommandLineOption( "--login <url> <username> <password>", "Provide authentication information for http file access." ); osgViewer::Viewer viewer( arguments ); unsigned int helpType = 0; if ( ( helpType = arguments.readHelpType() ) ) { arguments.getApplicationUsage()->write( std::cout, helpType ); return 1; } // report any errors if they have occurred when parsing the program arguments. if ( arguments.errors() ) { arguments.writeErrorMessages( std::cout ); return 1; } if ( arguments.argc() <= 1 ) { arguments.getApplicationUsage()->write( std::cout, osg::ApplicationUsage::COMMAND_LINE_OPTION ); return 1; } std::string url, username, password; while( arguments.read( "--login", url, username, password ) ) { if ( !osgDB::Registry::instance()->getAuthenticationMap() ) { osgDB::Registry::instance()->setAuthenticationMap( new osgDB::AuthenticationMap ); osgDB::Registry::instance()->getAuthenticationMap()->addAuthenticationDetails( url, new osgDB::AuthenticationDetails( username, password ) ); } } // set up the camera manipulators. { osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator; keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() ); keyswitchManipulator->addMatrixManipulator( '2', "Flight", new osgGA::FlightManipulator() ); keyswitchManipulator->addMatrixManipulator( '3', "Drive", new osgGA::DriveManipulator() ); keyswitchManipulator->addMatrixManipulator( '4', "Terrain", new osgGA::TerrainManipulator() ); keyswitchManipulator->addMatrixManipulator( '5', "Orbit", new osgGA::OrbitManipulator() ); keyswitchManipulator->addMatrixManipulator( '6', "FirstPerson", new osgGA::FirstPersonManipulator() ); keyswitchManipulator->addMatrixManipulator( '7', "Spherical", new osgGA::SphericalManipulator() ); std::string pathfile; double animationSpeed = 1.0; while( arguments.read( "--speed", animationSpeed ) ) {} char keyForAnimationPath = '8'; while ( arguments.read( "-p", pathfile ) ) { osgGA::AnimationPathManipulator* apm = new osgGA::AnimationPathManipulator( pathfile ); if ( apm || !apm->valid() ) { apm->setTimeScale( animationSpeed ); unsigned int num = keyswitchManipulator->getNumMatrixManipulators(); keyswitchManipulator->addMatrixManipulator( keyForAnimationPath, "Path", apm ); keyswitchManipulator->selectMatrixManipulator( num ); ++keyForAnimationPath; } } viewer.setCameraManipulator( keyswitchManipulator.get() ); } // add the state manipulator viewer.addEventHandler( new osgGA::StateSetManipulator( viewer.getCamera()->getOrCreateStateSet() ) ); // add the thread model handler viewer.addEventHandler( new osgViewer::ThreadingHandler ); // add the window size toggle handler viewer.addEventHandler( new osgViewer::WindowSizeHandler ); // add the stats handler viewer.addEventHandler( new osgViewer::StatsHandler ); // add the help handler viewer.addEventHandler( new osgViewer::HelpHandler( arguments.getApplicationUsage() ) ); // add the record camera path handler viewer.addEventHandler( new osgViewer::RecordCameraPathHandler ); // add the LOD Scale handler viewer.addEventHandler( new osgViewer::LODScaleHandler ); // add the screen capture handler viewer.addEventHandler( new osgViewer::ScreenCaptureHandler ); // load the data osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles( arguments ); if ( !loadedModel ) { std::cout << arguments.getApplicationName() << ": No data loaded" << std::endl; return 1; } // any option left unread are converted into errors to write out later. arguments.reportRemainingOptionsAsUnrecognized(); // report any errors if they have occurred when parsing the program arguments. if ( arguments.errors() ) { arguments.writeErrorMessages( std::cout ); return 1; } // optimize the scene graph, remove redundant nodes and state etc. osgUtil::Optimizer optimizer; optimizer.optimize( loadedModel.get() ); viewer.setSceneData( loadedModel.get() ); viewer.realize(); return viewer.run(); }
int main(int argc, const char **argv) { const char *me; char *err; hestOpt *hopt=NULL; hestParm *hparm; airArray *mop; /* variables learned via hest */ Nrrd *nin; float camfr[3], camat[3], camup[3], camnc, camfc, camFOV; int camortho, hitandquit; unsigned int camsize[2]; double isovalue, sliso, isomin, isomax; /* boilerplate hest code */ me = argv[0]; mop = airMopNew(); hparm = hestParmNew(); hparm->respFileEnable = AIR_TRUE; airMopAdd(mop, hparm, (airMopper)hestParmFree, airMopAlways); /* setting up the command-line options */ hparm->respFileEnable = AIR_TRUE; hestOptAdd(&hopt, "i", "volume", airTypeOther, 1, 1, &nin, NULL, "input volume to isosurface", NULL, NULL, nrrdHestNrrd); hestOptAdd(&hopt, "v", "isovalue", airTypeDouble, 1, 1, &isovalue, "nan", "isovalue at which to run Marching Cubes"); hestOptAdd(&hopt, "fr", "x y z", airTypeFloat, 3, 3, camfr, "3 4 5", "look-from point"); hestOptAdd(&hopt, "at", "x y z", airTypeFloat, 3, 3, camat, "0 0 0", "look-at point"); hestOptAdd(&hopt, "up", "x y z", airTypeFloat, 3, 3, camup, "0 0 1", "up direction"); hestOptAdd(&hopt, "nc", "dist", airTypeFloat, 1, 1, &(camnc), "-2", "at-relative near clipping distance"); hestOptAdd(&hopt, "fc", "dist", airTypeFloat, 1, 1, &(camfc), "2", "at-relative far clipping distance"); hestOptAdd(&hopt, "fov", "angle", airTypeFloat, 1, 1, &(camFOV), "20", "vertical field-of-view, in degrees. Full vertical " "extent of image plane subtends this angle."); hestOptAdd(&hopt, "sz", "s0 s1", airTypeUInt, 2, 2, &(camsize), "640 480", "# samples (horz vert) of image plane. "); hestOptAdd(&hopt, "ortho", NULL, airTypeInt, 0, 0, &(camortho), NULL, "use orthographic instead of (the default) " "perspective projection "); hestOptAdd(&hopt, "haq", NULL, airTypeBool, 0, 0, &(hitandquit), NULL, "save a screenshot rather than display the viewer"); hestParseOrDie(hopt, argc-1, argv+1, hparm, me, "demo program", AIR_TRUE, AIR_TRUE, AIR_TRUE); airMopAdd(mop, hopt, (airMopper)hestOptFree, airMopAlways); airMopAdd(mop, hopt, (airMopper)hestParseFree, airMopAlways); /* learn value range, and set initial isovalue if needed */ NrrdRange *range = nrrdRangeNewSet(nin, AIR_FALSE); airMopAdd(mop, range, (airMopper)nrrdRangeNix, airMopAlways); isomin = range->min; isomax = range->max; if (!AIR_EXISTS(isovalue)) { isovalue = (isomin + isomax)/2; } /* first, make sure we can isosurface ok */ limnPolyData *lpld = limnPolyDataNew(); seekContext *sctx = seekContextNew(); airMopAdd(mop, sctx, (airMopper)seekContextNix, airMopAlways); sctx->pldArrIncr = nrrdElementNumber(nin); seekVerboseSet(sctx, 0); seekNormalsFindSet(sctx, AIR_TRUE); if (seekDataSet(sctx, nin, NULL, 0) || seekTypeSet(sctx, seekTypeIsocontour) || seekIsovalueSet(sctx, isovalue) || seekUpdate(sctx) || seekExtract(sctx, lpld)) { airMopAdd(mop, err=biffGetDone(SEEK), airFree, airMopAlways); fprintf(stderr, "trouble with isosurfacing:\n%s", err); airMopError(mop); return 1; } if (!lpld->xyzwNum) { fprintf(stderr, "%s: warning: No isocontour generated at isovalue %g\n", me, isovalue); } /* then create empty scene */ Hale::init(); Hale::Scene scene; /* then create viewer (in order to create the OpenGL context) */ Hale::Viewer viewer(camsize[0], camsize[1], "Iso", &scene); viewer.lightDir(glm::vec3(-1.0f, 1.0f, 3.0f)); viewer.camera.init(glm::vec3(camfr[0], camfr[1], camfr[2]), glm::vec3(camat[0], camat[1], camat[2]), glm::vec3(camup[0], camup[1], camup[2]), camFOV, (float)camsize[0]/camsize[1], camnc, camfc, camortho); viewer.refreshCB((Hale::ViewerRefresher)render); viewer.refreshData(&viewer); sliso = isovalue; viewer.slider(&sliso, isomin, isomax); viewer.current(); /* then create geometry, and add it to scene */ Hale::Polydata hply(lpld, true, // hply now owns lpld Hale::ProgramLib(Hale::preprogramAmbDiff2SideSolid)); scene.add(&hply); scene.drawInit(); render(&viewer); if (hitandquit) { seekIsovalueSet(sctx, isovalue); seekUpdate(sctx); seekExtract(sctx, lpld); hply.rebuffer(); render(&viewer); glfwWaitEvents(); render(&viewer); viewer.snap(); Hale::done(); airMopOkay(mop); return 0; } while(!Hale::finishing){ glfwWaitEvents(); if (viewer.sliding() && sliso != isovalue) { isovalue = sliso; printf("%s: isosurfacing at %g\n", me, isovalue); seekIsovalueSet(sctx, isovalue); seekUpdate(sctx); seekExtract(sctx, lpld); hply.rebuffer(); } render(&viewer); } /* clean exit; all okay */ Hale::done(); airMopOkay(mop); return 0; }
int main (int argc, char** argv) { if(argc<2){ std::cout << "need ply/pcd file. " << std::endl; return -1; } // 确定文件格式 char tmpStr[100]; strcpy(tmpStr,argv[1]); char* pext = strrchr(tmpStr, '.'); std::string extply("ply"); std::string extpcd("pcd"); if(pext){ *pext='\0'; pext++; } std::string ext(pext); //如果不支持文件格式,退出程序 if (!((ext == extply)||(ext == extpcd))){ std::cout << "文件格式不支持!" << std::endl; std::cout << "支持文件格式:*.pcd和*.ply!" << std::endl; return(-1); } //根据文件格式选择输入方式 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>) ; //创建点云对象指针,用于存储输入 if (ext == extply){ if (pcl::io::loadPLYFile(argv[1] , *cloud) == -1){ PCL_ERROR("Could not read ply file!\n") ; return -1; } } else{ if (pcl::io::loadPCDFile(argv[1] , *cloud) == -1){ PCL_ERROR("Could not read pcd file!\n") ; return -1; } } // 估计法向量 x,y,x + 法向量 + 曲率 pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud(cloud); n.setInputCloud(cloud); n.setSearchMethod(tree); n.setKSearch(20);//20个邻居 n.compute (*normals); //计算法线,结果存储在normals中 //* normals 不能同时包含点的法向量和表面的曲率 //将点云和法线放到一起 pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); //* cloud_with_normals = cloud + normals //创建搜索树 pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); //初始化 移动立方体算法 MarchingCubes对象,并设置参数 pcl::MarchingCubes<pcl::PointNormal> *mc; mc = new pcl::MarchingCubesHoppe<pcl::PointNormal> (); /* if (hoppe_or_rbf == 0) mc = new pcl::MarchingCubesHoppe<pcl::PointNormal> (); else { mc = new pcl::MarchingCubesRBF<pcl::PointNormal> (); (reinterpret_cast<pcl::MarchingCubesRBF<pcl::PointNormal>*> (mc))->setOffSurfaceDisplacement (off_surface_displacement); } */ //创建多变形网格,用于存储结果 pcl::PolygonMesh mesh; //设置MarchingCubes对象的参数 mc->setIsoLevel (0.0f); mc->setGridResolution (50, 50, 50); mc->setPercentageExtendGrid (0.0f); //设置搜索方法 mc->setInputCloud (cloud_with_normals); //执行重构,结果保存在mesh中 mc->reconstruct (mesh); //保存网格图 pcl::io::savePLYFile("result2.ply", mesh); // 显示结果图 boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer")); viewer->setBackgroundColor (0, 0, 0); //设置背景 viewer->addPolygonMesh(mesh,"my"); //设置显示的网格 //viewer->addCoordinateSystem (1.0); //设置坐标系 viewer->initCameraParameters (); while (!viewer->wasStopped ()){ viewer->spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } return (0); }
int main (int argc, char *argv[]) { parseCommandLine (argc, argv); pcl::PointCloud<PointType>::Ptr model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr model_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr scene (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr scene_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<NormalType>::Ptr model_normals (new pcl::PointCloud<NormalType> ()); pcl::PointCloud<NormalType>::Ptr scene_normals (new pcl::PointCloud<NormalType> ()); pcl::PointCloud<DescriptorType>::Ptr model_descriptors (new pcl::PointCloud<DescriptorType> ()); pcl::PointCloud<DescriptorType>::Ptr scene_descriptors (new pcl::PointCloud<DescriptorType> ()); // // Load clouds // if (pcl::io::loadPCDFile (model_filename_, *model) < 0) { std::cout << "Error loading model cloud." << std::endl; showHelp (argv[0]); return (-1); } if (pcl::io::loadPCDFile (scene_filename_, *scene) < 0) { std::cout << "Error loading scene cloud." << std::endl; showHelp (argv[0]); return (-1); } // // Set up resolution invariance // if (use_cloud_resolution_) { float resolution = static_cast<float> (computeCloudResolution (model)); if (resolution != 0.0f) { model_ss_ *= resolution; scene_ss_ *= resolution; rf_rad_ *= resolution; descr_rad_ *= resolution; cg_size_ *= resolution; } std::cout << "Model resolution: " << resolution << std::endl; std::cout << "Model sampling size: " << model_ss_ << std::endl; std::cout << "Scene sampling size: " << scene_ss_ << std::endl; std::cout << "LRF support radius: " << rf_rad_ << std::endl; std::cout << "SHOT descriptor radius: " << descr_rad_ << std::endl; std::cout << "Clustering bin size: " << cg_size_ << std::endl << std::endl; } // // Compute Normals // pcl::NormalEstimationOMP<PointType, NormalType> norm_est; norm_est.setKSearch (10); norm_est.setInputCloud (model); norm_est.compute (*model_normals); norm_est.setInputCloud (scene); norm_est.compute (*scene_normals); // // Downsample Clouds to Extract keypoints // /* pcl::UniformSampling<PointType> uniform_sampling; uniform_sampling.setInputCloud (model); uniform_sampling.setRadiusSearch (model_ss_); uniform_sampling.filter (*model_keypoints); std::cout << "Model total points: " << model->size () << "; Selected Keypoints: " << model_keypoints->size () << std::endl; uniform_sampling.setInputCloud (scene); uniform_sampling.setRadiusSearch (scene_ss_); uniform_sampling.filter (*scene_keypoints); std::cout << "Scene total points: " << scene->size () << "; Selected Keypoints: " << scene_keypoints->size () << std::endl; */ pcl::UniformSampling<PointType> uniform_sampling; uniform_sampling.setInputCloud (model); uniform_sampling.setRadiusSearch (model_ss_); //uniform_sampling.filter (*model_keypoints); pcl::PointCloud<int> keypointIndices1; uniform_sampling.compute(keypointIndices1); pcl::copyPointCloud(*model, keypointIndices1.points, *model_keypoints); std::cout << "Model total points: " << model->size () << "; Selected Keypoints: " << model_keypoints->size () << std::endl; uniform_sampling.setInputCloud (scene); uniform_sampling.setRadiusSearch (scene_ss_); //uniform_sampling.filter (*scene_keypoints); pcl::PointCloud<int> keypointIndices2; uniform_sampling.compute(keypointIndices2); pcl::copyPointCloud(*scene, keypointIndices2.points, *scene_keypoints); std::cout << "Scene total points: " << scene->size () << "; Selected Keypoints: " << scene_keypoints->size () << std::endl; // // Compute Descriptor for keypoints // pcl::SHOTEstimationOMP<PointType, NormalType, DescriptorType> descr_est; descr_est.setRadiusSearch (descr_rad_); descr_est.setInputCloud (model_keypoints); descr_est.setInputNormals (model_normals); descr_est.setSearchSurface (model); descr_est.compute (*model_descriptors); descr_est.setInputCloud (scene_keypoints); descr_est.setInputNormals (scene_normals); descr_est.setSearchSurface (scene); descr_est.compute (*scene_descriptors); // // Find Model-Scene Correspondences with KdTree // pcl::CorrespondencesPtr model_scene_corrs (new pcl::Correspondences ()); pcl::KdTreeFLANN<DescriptorType> match_search; match_search.setInputCloud (model_descriptors); // For each scene keypoint descriptor, find nearest neighbor into the model keypoints descriptor cloud and add it to the correspondences vector. for (size_t i = 0; i < scene_descriptors->size (); ++i) { std::vector<int> neigh_indices (1); std::vector<float> neigh_sqr_dists (1); if (!pcl_isfinite (scene_descriptors->at (i).descriptor[0])) //skipping NaNs { continue; } int found_neighs = match_search.nearestKSearch (scene_descriptors->at (i), 1, neigh_indices, neigh_sqr_dists); if(found_neighs == 1 && neigh_sqr_dists[0] < 0.25f) // add match only if the squared descriptor distance is less than 0.25 (SHOT descriptor distances are between 0 and 1 by design) { pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]); model_scene_corrs->push_back (corr); } } std::cout << "Correspondences found: " << model_scene_corrs->size () << std::endl; // // Actual Clustering // std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > rototranslations; std::vector<pcl::Correspondences> clustered_corrs; // Using Hough3D if (use_hough_) { // // Compute (Keypoints) Reference Frames only for Hough // pcl::PointCloud<RFType>::Ptr model_rf (new pcl::PointCloud<RFType> ()); pcl::PointCloud<RFType>::Ptr scene_rf (new pcl::PointCloud<RFType> ()); pcl::BOARDLocalReferenceFrameEstimation<PointType, NormalType, RFType> rf_est; rf_est.setFindHoles (true); rf_est.setRadiusSearch (rf_rad_); rf_est.setInputCloud (model_keypoints); rf_est.setInputNormals (model_normals); rf_est.setSearchSurface (model); rf_est.compute (*model_rf); rf_est.setInputCloud (scene_keypoints); rf_est.setInputNormals (scene_normals); rf_est.setSearchSurface (scene); rf_est.compute (*scene_rf); // Clustering pcl::Hough3DGrouping<PointType, PointType, RFType, RFType> clusterer; clusterer.setHoughBinSize (cg_size_); clusterer.setHoughThreshold (cg_thresh_); clusterer.setUseInterpolation (true); clusterer.setUseDistanceWeight (false); clusterer.setInputCloud (model_keypoints); clusterer.setInputRf (model_rf); clusterer.setSceneCloud (scene_keypoints); clusterer.setSceneRf (scene_rf); clusterer.setModelSceneCorrespondences (model_scene_corrs); //clusterer.cluster (clustered_corrs); clusterer.recognize (rototranslations, clustered_corrs); } else // Using GeometricConsistency { pcl::GeometricConsistencyGrouping<PointType, PointType> gc_clusterer; gc_clusterer.setGCSize (cg_size_); gc_clusterer.setGCThreshold (cg_thresh_); gc_clusterer.setInputCloud (model_keypoints); gc_clusterer.setSceneCloud (scene_keypoints); gc_clusterer.setModelSceneCorrespondences (model_scene_corrs); //gc_clusterer.cluster (clustered_corrs); gc_clusterer.recognize (rototranslations, clustered_corrs); } // // Output results // std::cout << "Model instances found: " << rototranslations.size () << std::endl; for (size_t i = 0; i < rototranslations.size (); ++i) { std::cout << "\n Instance " << i + 1 << ":" << std::endl; std::cout << " Correspondences belonging to this instance: " << clustered_corrs[i].size () << std::endl; // Print the rotation matrix and translation vector Eigen::Matrix3f rotation = rototranslations[i].block<3,3>(0, 0); Eigen::Vector3f translation = rototranslations[i].block<3,1>(0, 3); printf ("\n"); printf (" | %6.3f %6.3f %6.3f | \n", rotation (0,0), rotation (0,1), rotation (0,2)); printf (" R = | %6.3f %6.3f %6.3f | \n", rotation (1,0), rotation (1,1), rotation (1,2)); printf (" | %6.3f %6.3f %6.3f | \n", rotation (2,0), rotation (2,1), rotation (2,2)); printf ("\n"); printf (" t = < %0.3f, %0.3f, %0.3f >\n", translation (0), translation (1), translation (2)); } // // Visualization // pcl::visualization::PCLVisualizer viewer ("Correspondence Grouping"); viewer.addPointCloud (scene, "scene_cloud"); pcl::PointCloud<PointType>::Ptr off_scene_model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_scene_model_keypoints (new pcl::PointCloud<PointType> ()); if (show_correspondences_ || show_keypoints_) { // We are translating the model so that it doesn't end in the middle of the scene representation pcl::transformPointCloud (*model, *off_scene_model, Eigen::Vector3f (-1,0,0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_keypoints, *off_scene_model_keypoints, Eigen::Vector3f (-1,0,0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_color_handler (off_scene_model, 255, 255, 128); viewer.addPointCloud (off_scene_model, off_scene_model_color_handler, "off_scene_model"); } if (show_keypoints_) { pcl::visualization::PointCloudColorHandlerCustom<PointType> scene_keypoints_color_handler (scene_keypoints, 0, 0, 255); viewer.addPointCloud (scene_keypoints, scene_keypoints_color_handler, "scene_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "scene_keypoints"); pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_keypoints_color_handler (off_scene_model_keypoints, 0, 0, 255); viewer.addPointCloud (off_scene_model_keypoints, off_scene_model_keypoints_color_handler, "off_scene_model_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "off_scene_model_keypoints"); } for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::PointCloud<PointType>::Ptr rotated_model (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *rotated_model, rototranslations[i]); std::stringstream ss_cloud; ss_cloud << "instance" << i; pcl::visualization::PointCloudColorHandlerCustom<PointType> rotated_model_color_handler (rotated_model, 255, 0, 0); viewer.addPointCloud (rotated_model, rotated_model_color_handler, ss_cloud.str ()); if (show_correspondences_) { for (size_t j = 0; j < clustered_corrs[i].size (); ++j) { std::stringstream ss_line; ss_line << "correspondence_line" << i << "_" << j; PointType& model_point = off_scene_model_keypoints->at (clustered_corrs[i][j].index_query); PointType& scene_point = scene_keypoints->at (clustered_corrs[i][j].index_match); // We are drawing a line for each pair of clustered correspondences found between the model and the scene viewer.addLine<PointType, PointType> (model_point, scene_point, 0, 255, 0, ss_line.str ()); } } } while (!viewer.wasStopped ()) { viewer.spinOnce (); } return (0); }
int main( int argc, char** argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc, argv); // read the scene from the list of file specified command line arguments. osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles(arguments); // if not loaded assume no arguments passed in, try use default cow model instead. if (!loadedModel) { loadedModel = osgDB::readNodeFile("cow.osgt"); } // Still no loaded model, then exit if (!loadedModel) { osg::notify(osg::ALWAYS) << "No model could be loaded and didn't find cow.osgt, terminating.." << std::endl; return 0; } // Create Trackball manipulator osg::ref_ptr<osgGA::CameraManipulator> cameraManipulator = new osgGA::TrackballManipulator; const osg::BoundingSphere& bs = loadedModel->getBound(); if (bs.valid()) { // Adjust view to object view /* This caused a problem with the head tracking on the Vive osg::Vec3 modelCenter = bs.center(); osg::Vec3 eyePos = bs.center() + osg::Vec3(0, bs.radius(), 0); cameraManipulator->setHomePosition(eyePos, modelCenter, osg::Vec3(0, 0, 1)); */ } // Exit if we do not have an HMD present if (!OpenVRDevice::hmdPresent()) { osg::notify(osg::FATAL) << "Error: No valid HMD present!" << std::endl; return 1; } // Open the HMD float nearClip = 0.01f; float farClip = 10000.0f; float worldUnitsPerMetre = 1.0f; int samples = 4; osg::ref_ptr<OpenVRDevice> openvrDevice = new OpenVRDevice(nearClip, farClip, worldUnitsPerMetre, samples); // Exit if we fail to initialize the HMD device if (!openvrDevice->hmdInitialized()) { // The reason for failure was reported by the constructor. return 1; } // Get the suggested context traits osg::ref_ptr<osg::GraphicsContext::Traits> traits = openvrDevice->graphicsContextTraits(); traits->windowName = "OsgOpenVRViewerExample"; // Create a graphic context based on our desired traits osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits); if (!gc) { osg::notify(osg::NOTICE) << "Error, GraphicsWindow has not been created successfully" << std::endl; return 1; } if (gc.valid()) { gc->setClearColor(osg::Vec4(0.2f, 0.2f, 0.4f, 1.0f)); gc->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); } GraphicsWindowViewer viewer(arguments, dynamic_cast<osgViewer::GraphicsWindow*>(gc.get())); // Force single threaded to make sure that no other thread can use the GL context viewer.setThreadingModel(osgViewer::Viewer::SingleThreaded); viewer.getCamera()->setGraphicsContext(gc); viewer.getCamera()->setViewport(0, 0, traits->width, traits->height); // Disable automatic computation of near and far plane viewer.getCamera()->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR); viewer.setCameraManipulator(cameraManipulator); // Things to do when viewer is realized osg::ref_ptr<OpenVRRealizeOperation> openvrRealizeOperation = new OpenVRRealizeOperation(openvrDevice); viewer.setRealizeOperation(openvrRealizeOperation.get()); osg::ref_ptr<OpenVRViewer> openvrViewer = new OpenVRViewer(&viewer, openvrDevice, openvrRealizeOperation); openvrViewer->addChild(loadedModel); viewer.setSceneData(openvrViewer); // Add statistics handler viewer.addEventHandler(new osgViewer::StatsHandler); viewer.addEventHandler(new OpenVREventHandler(openvrDevice)); viewer.run(); // Need to do this here to make it happen before destruction of the OSG Viewer, which destroys the OpenGL context. openvrDevice->shutdown(gc.get()); return 0; }
viewer_rt::viewer_rt (){ //Inicialization i = 0; key = 10; num_semaphore = 6; act = 0; run = true; turn_on = false; turn_view = 0; v1 = 0; v2 = 0; v3 = 0; v4 = 0; obj1 = 0; obj2 = 0; obj3 = 0; //Make semaphore if((semid = semget(key,num_semaphore,IPC_CREAT|0600)) == -1) std::cout << "[WIEWER_RT] ERROR CREATING SEMAPHORE" << std::endl; operation.sem_flg = 0; //Initialze semaphore semctl(semid,WRITE_PCD,SETVAL,1); semctl(semid,REQUEST_LOAD_CLOUD,SETVAL,0); semctl(semid,5,SETVAL,0); //Make and set pipes if (pipe(pipe_flags) == -1) std::cout << "[WIEWER_RT] ERROR CREATING THE PRINCIPAL PIPE" << std::endl; if (pipe(pipe_cloud) == -1) std::cout << "[WIEWER_RT] ERROR CREATING THE SECOND PIPE" << std::endl; //Fork the execcution pid = fork(); vector_pid.push_back(pid); name_process.push_back("VIEWER_RT"); if (pid == -1) std::cout << "[VIEWER_RT] ERROR MAKING FORK" << std::endl; //Child execution (paralel to the principal proccess) else if (pid == 0) { pcl::visualization::PCLVisualizer viewer ("vista 3D"); viewer.setSize(1360,768); //Configure viewer viewer.initCameraParameters (); /* //( xmin , ymin, xmax , ymax, viewport )// //two windows //viewer.createViewPort(0.0, 0.5, 1, 1, v1); //viewer.createViewPort(0.0, 0.0, 1, 0.5, v2); //three windows (A) //viewer.createViewPort(0.0, 0.66, 1, 1, v1); //viewer.createViewPort(0.0, 0.33, 1, 0.66, v2); //viewer.createViewPort(0.0, 0.0, 1, 0.33, v3); //three windows (B) //viewer.createViewPort(0, 0.5, 1, 1, v1); //viewer.createViewPort(0, 0, 0.5, 0.5, v2); //viewer.createViewPort(0.5, 0, 1, 0.5, v3); //viewer.setBackgroundColor (255, 255, 255); //viewer.createViewPort(0, 0, 1, 1, v1); //four windows //viewer.createViewPort(0.0, 0.5, 0.5, 1.0, v1); //viewer.createViewPort(0.5, 0.5, 1, 1, v2); //viewer.createViewPort(0.0, 0.0, 0.5, 0.5, v3); //viewer.createViewPort(0.5, 0.0, 1.0, 0.5, v4); */ viewer.setCameraPosition( 10 , 10, 10 , 10 , 10 , 10 ); viewer.addCoordinateSystem (1); //Principal loop (child execution), no ends until end of program while (run){ //Refresh the viewer display viewer.spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (10000)); turn_view=turn_view + 0.25; if (turn_on) {viewer.setCameraPosition( 20* cos(turn_view*(3.1416/180)) , 20* sin(turn_view*(3.1416/180)) ,10 , 20* cos(turn_view*(3.1416/180)) , 20 * sin(turn_view*(3.1416/180)) , 20 );} //Check if it has received signal if (semctl(semid,5,GETVAL,0) > 0){ //Reset the switch possibilities operation.sem_num = 5; operation.sem_op = -1; semop(semid,&operation,1); //Read instruction from pipe read(pipe_flags[0],&act,1); //SWITCH POSSIBILITIES //Load simple cloud if (act >= LOAD_XYZ_CLOUD && act < LOAD_XYZ_CLOUD + 10 && diff == false){ pcl::PCDReader reader; boost::shared_ptr<pcl::PointCloud<pcl::PointXYZ> > cloud (new pcl::PointCloud<pcl::PointXYZ> ); reader.read ("temp.pcd", *cloud); if (act-LOAD_XYZ_CLOUD == 1){ obj1++; sprintf(cloud_name,"object_v1 %d",obj1); viewer.addPointCloud<pcl::PointXYZ> (cloud, cloud_name,v1); } else if (act-LOAD_XYZ_CLOUD == 2){ obj2++; sprintf(cloud_name,"object_v2 %d",obj2); viewer.addPointCloud<pcl::PointXYZ> (cloud, cloud_name,v2); } else if (act-LOAD_XYZ_CLOUD == 3){ obj3++; sprintf(cloud_name,"object_v3 %d",obj3); viewer.addPointCloud<pcl::PointXYZ> (cloud, cloud_name,v3); } //Reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } //Load simple cloud (diff) else if (act >= LOAD_XYZ_CLOUD && act < LOAD_XYZ_CLOUD + 10 && diff == true){ pcl::PCDReader reader; boost::shared_ptr<pcl::PointCloud<pcl::PointXYZ> > cloud (new pcl::PointCloud<pcl::PointXYZ> ); reader.read ("temp.pcd", *cloud); //temp double r,g,b; r = 0; b = 0; g = 0; diff = false; if (act-LOAD_XYZ_CLOUD == 1){ obj1++; sprintf(cloud_name,"object_v1 %d",obj1); if (obj1 == 1) r = 255; if (obj1 == 2) b = 255; if (obj1 == 3) g = 255; if (obj1 > 3) { r = 255 - 100 * obj1; b = 255 - 100 * obj1; g = 255 - 100 * obj1; } pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> single_color(cloud, r, g, b); viewer.addPointCloud<pcl::PointXYZ> (cloud,single_color, cloud_name,v1); } else if (act-LOAD_XYZ_CLOUD == 2){ obj2++; sprintf(cloud_name,"object_v2 %d",obj2); if (obj2 == 1) r = 255; if (obj2 == 2) b = 255; if (obj2 == 3) g = 255; if (obj2 > 3) { r = 255 - 100 * obj2; b = 255 - 100 * obj2; g = 255 - 100 * obj2; } pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> single_color(cloud, r, g, b); viewer.addPointCloud<pcl::PointXYZ> (cloud,single_color, cloud_name,v2); } else if (act-LOAD_XYZ_CLOUD == 3){ obj3++; sprintf(cloud_name,"object_v3 %d",obj3); if (obj3 == 1) r = 255; if (obj3 == 2) b = 255; if (obj3 == 3) g = 255; if (obj3 > 3) { r = 255 - 100 * obj3; b = 255 - 100 * obj3; g = 255 - 100 * obj3; } pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> single_color(cloud, r, g, b); viewer.addPointCloud<pcl::PointXYZ> (cloud,single_color, cloud_name,v3); } //reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } //Load RBG cloud if (act >= LOAD_XYZRBG_CLOUD && act < LOAD_XYZRBG_CLOUD + 10){ pcl::PCDReader reader; pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudRBG (new pcl::PointCloud<pcl::PointXYZRGB>); reader.read ("temp.pcd", *cloudRBG); if (act-LOAD_XYZRBG_CLOUD == 1){ obj1++; sprintf(cloud_name,"object_v1 %d",obj1); viewer.addPointCloud<pcl::PointXYZRGB> (cloudRBG, cloud_name,v1); } else if (act-LOAD_XYZRBG_CLOUD == 2){ obj2++; sprintf(cloud_name,"object_v2 %d",obj2); viewer.addPointCloud<pcl::PointXYZRGB> (cloudRBG, cloud_name,v2); } else if (act-LOAD_XYZRBG_CLOUD == 3){ obj3++; sprintf(cloud_name,"object_v3 %d",obj3); viewer.addPointCloud<pcl::PointXYZRGB> (cloudRBG, cloud_name,v3); } //Reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } //Load mesh if (act >= LOAD_MESH && act < LOAD_MESH + 10){ pcl::PolygonMesh polymesh; pcl::io::loadPolygonFile ("temp.vtk", polymesh); if (act-LOAD_MESH == 1){ obj1++; sprintf(cloud_name,"object_v1 %d",obj1); viewer.addPolygonMesh (polymesh, cloud_name,v1); } else if (act-LOAD_MESH == 2){ obj2++; sprintf(cloud_name,"object_v2 %d",obj2); viewer.addPolygonMesh (polymesh, cloud_name,v2); } else if (act-LOAD_MESH == 3){ obj3++; sprintf(cloud_name,"object_v3 %d",obj3); viewer.addPolygonMesh (polymesh, cloud_name,v3); } //Reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } //Delete last cloud added else if (act >= DELETE_LAST && act < DELETE_LAST + 10){ if (act-DELETE_LAST == 1){ sprintf(cloud_name,"object_v1 %d",obj1); if (viewer.removePointCloud(cloud_name,v1)){ obj1--; } } else if (act-DELETE_LAST == 2){ sprintf(cloud_name,"object_v2 %d",obj2); if (viewer.removePointCloud(cloud_name,v2)){ obj2--; } } else if (act-DELETE_LAST == 3){ sprintf(cloud_name,"object_v3 %d",obj3); if (viewer.removePointCloud(cloud_name,v3)){ obj3--; } } //Reset semaphore and load variables act=0; operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); } //Delete all clouds else if (act >= DELETE_ALL && act < DELETE_ALL + 10 ){ if (act-DELETE_ALL == 1){ if (viewer.removeAllPointClouds(v1)){ obj1=0; } } else if (act-DELETE_ALL == 2){ if (viewer.removeAllPointClouds(v2)){ obj2=0; } } else if (act-DELETE_ALL == 3){ if (viewer.removeAllPointClouds(v3)){ obj3=0; } } else { if (viewer.removeAllPointClouds(v1)){ obj1=0; } if (viewer.removeAllPointClouds(v2)){ obj2=0; } if (viewer.removeAllPointClouds(v3)){ obj3=0; } } //Reset semaphore and load variables act=0; operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); } //Activate/deactivate turn else if (act == ACT_GIRO){ if (turn_on) { turn_on = false; } else { turn_on = true; } //Reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } //Activate diff cloud else if (act == SET_DIFF){ diff = true; //Reset semaphore and load variables operation.sem_num = REQUEST_LOAD_CLOUD; operation.sem_op = 1; semop(semid,&operation,1); act = 0; } }//END OF SWITCH POSSIBILITIES }// END OF PRINCIPAL LOOP }// END OF CHILD PROCCESS ///temporal //RGB cloud /* else if (pid > 0){ std::cout << "Prueba de carga de RGB" << std::endl; pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_ptr (new pcl::PointCloud<pcl::PointXYZRGB>); uint8_t r(255), g(15), b(15); for (float z(-1.0); z <= 1.0; z += 0.05){ for (float angle(0.0); angle <= 360.0; angle += 5.0){ pcl::PointXYZRGB point; point.x = 0.5 * cosf (pcl::deg2rad(angle)); point.y = sinf (pcl::deg2rad(angle)); point.z = z; uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b)); point.rgb = *reinterpret_cast<float*>(&rgb); point_cloud_ptr->points.push_back (point); } if (z < 0.0){ r -= 12; g += 12; } else{ g -= 12; b += 12; } } point_cloud_ptr->width = (int) point_cloud_ptr->points.size (); point_cloud_ptr->height = 1; load_xyz_rbg (point_cloud_ptr, 1); getchar(); } */ //MESH /* else if (pid > 0){ std::cout << "Prueba de carga de mallado" << std::endl; pcl::PolygonMesh malla; pcl::io::loadPolygonFileVTK ("mesh.vtk", malla); load_mesh(malla, 1); getchar(); } */ ///temporal }
int main( int argc, char** argv ) { float nearClip = 0.01f; float farClip = 10000.0f; float pixelsPerDisplayPixel = 1.0; bool useTimewarp = true; osg::ref_ptr<OculusDevice> oculusDevice = new OculusDevice(nearClip, farClip, pixelsPerDisplayPixel, useTimewarp); // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // read the scene from the list of file specified command line arguments. osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles(arguments); // if not loaded assume no arguments passed in, try use default cow model instead. if (!loadedModel) loadedModel = osgDB::readNodeFile("cow.osgt"); // Still no loaded model, then exit if (!loadedModel) return 0; // Calculate the texture size const int textureWidth = oculusDevice->renderTargetWidth()/2; const int textureHeight = oculusDevice->renderTargetHeight(); // Setup textures for the RTT cameras osg::ref_ptr<osg::Texture2D> textureLeft = new osg::Texture2D; textureLeft->setTextureSize(textureWidth, textureHeight); textureLeft->setInternalFormat(GL_RGBA); osg::ref_ptr<osg::Texture2D> textureRight = new osg::Texture2D; textureRight->setTextureSize(textureWidth, textureHeight); textureRight->setInternalFormat(GL_RGBA); // Initialize RTT cameras for each eye osg::ref_ptr<osg::Camera> leftEyeRTTCamera = oculusDevice->createRTTCamera(textureLeft, OculusDevice::LEFT, osg::Camera::ABSOLUTE_RF); leftEyeRTTCamera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR ); leftEyeRTTCamera->addChild( loadedModel ); osg::ref_ptr<osg::Camera> rightEyeRTTCamera = oculusDevice->createRTTCamera(textureRight, OculusDevice::RIGHT, osg::Camera::ABSOLUTE_RF); rightEyeRTTCamera->setComputeNearFarMode( osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR ); rightEyeRTTCamera->addChild( loadedModel ); // Create HUD cameras for each eye osg::ref_ptr<osg::Camera> leftCameraWarp = oculusDevice->createWarpOrthoCamera(0.0, 1.0, 0.0, 1.0); osg::ref_ptr<osg::Camera> rightCameraWarp = oculusDevice->createWarpOrthoCamera(0.0, 1.0, 0.0, 1.0); // Create shader program osg::ref_ptr<osg::Program> program = oculusDevice->createShaderProgram(); // Create distortionMesh for each camera osg::ref_ptr<osg::Geode> leftDistortionMesh = oculusDevice->distortionMesh(OculusDevice::LEFT, program, 0, 0, textureWidth, textureHeight, true); leftCameraWarp->addChild(leftDistortionMesh); osg::ref_ptr<osg::Geode> rightDistortionMesh = oculusDevice->distortionMesh(OculusDevice::RIGHT, program, 0, 0, textureWidth, textureHeight, true); rightCameraWarp->addChild(rightDistortionMesh); // Add pre draw camera to handle time warp leftCameraWarp->setPreDrawCallback(new WarpCameraPreDrawCallback(oculusDevice)); rightCameraWarp->setPreDrawCallback(new WarpCameraPreDrawCallback(oculusDevice)); // Attach shaders to each distortion mesh osg::ref_ptr<osg::StateSet> leftEyeStateSet = leftDistortionMesh->getOrCreateStateSet(); osg::ref_ptr<osg::StateSet> rightEyeStateSet = rightDistortionMesh->getOrCreateStateSet(); oculusDevice->applyShaderParameters(leftEyeStateSet, program, textureLeft, OculusDevice::LEFT); oculusDevice->applyShaderParameters(rightEyeStateSet, program, textureRight, OculusDevice::RIGHT); // Create Trackball manipulator osg::ref_ptr<osgGA::CameraManipulator> cameraManipulator = new osgGA::TrackballManipulator; const osg::BoundingSphere& bs = loadedModel->getBound(); if (bs.valid()) { // Adjust view to object view cameraManipulator->setHomePosition(osg::Vec3(0, bs.radius()*1.5, 0), osg::Vec3(0, 0, 0), osg::Vec3(0, 0, 1)); } // Add cameras to groups osg::ref_ptr<osg::Group> leftRoot = new osg::Group; osg::ref_ptr<osg::Group> rightRoot = new osg::Group; leftRoot->addChild(leftEyeRTTCamera); leftRoot->addChild(leftCameraWarp); rightRoot->addChild(rightEyeRTTCamera); rightRoot->addChild(rightCameraWarp); osg::ref_ptr<osg::GraphicsContext::Traits> traits = oculusDevice->graphicsContextTraits(); osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits); // Attach to window, needed for direct mode oculusDevice->attachToWindow(gc); // Attach a callback to detect swap osg::ref_ptr<OculusSwapCallback> swapCallback = new OculusSwapCallback(oculusDevice); gc->setSwapCallback(swapCallback); // Create a composite viewer osgViewer::CompositeViewer viewer(arguments); // Create views and attach camera groups to them osg::ref_ptr<osgViewer::View> leftView = new osgViewer::View; leftView->setName("LeftEyeView"); viewer.addView(leftView); leftView->setSceneData(leftRoot); leftView->getCamera()->setName("LeftEyeCamera"); leftView->getCamera()->setClearColor(osg::Vec4(0.0f, 0.0f, 0.0f, 1.0f)); leftView->getCamera()->setViewport(new osg::Viewport(0, 0, oculusDevice->screenResolutionWidth() / 2, oculusDevice->screenResolutionHeight())); leftView->getCamera()->setGraphicsContext(gc); // Add statistics view to only one view leftView->addEventHandler(new osgViewer::StatsHandler); // Add Oculus Keyboard Handler to only one view leftView->addEventHandler(new OculusEventHandler(oculusDevice)); leftView->setCameraManipulator(cameraManipulator); osg::ref_ptr<osgViewer::View> rightView = new osgViewer::View; rightView->setName("RightEyeView"); viewer.addView(rightView); rightView->setSceneData(rightRoot); rightView->getCamera()->setClearColor(osg::Vec4(0.0f, 0.0f, 0.0f, 1.0f)); rightView->getCamera()->setName("RightEyeCamera"); rightView->getCamera()->setViewport(new osg::Viewport(oculusDevice->screenResolutionWidth() / 2, 0, oculusDevice->screenResolutionWidth() / 2, oculusDevice->screenResolutionHeight())); rightView->getCamera()->setGraphicsContext(gc); rightView->setCameraManipulator(cameraManipulator); // Realize viewer if (!viewer.isRealized()) { viewer.realize(); } // Create matrix for camera position and orientation & position (from HMD) osg::Matrix cameraManipulatorViewMatrix; osg::Matrix hmdMatrix; // Get the view matrix for each eye for later use osg::Matrix leftEyeViewMatrix = oculusDevice->viewMatrixLeft(); osg::Matrix rightEyeViewMatrix = oculusDevice->viewMatrixRight(); // Set the projection matrix for each eye leftEyeRTTCamera->setProjectionMatrix(oculusDevice->projectionMatrixLeft()); rightEyeRTTCamera->setProjectionMatrix(oculusDevice->projectionMatrixRight()); // Start Viewer while (!viewer.done()) { // Update the pose oculusDevice->updatePose(swapCallback->frameIndex()); osg::Vec3 position = oculusDevice->position(); osg::Quat orientation = oculusDevice->orientation(); hmdMatrix.makeRotate(orientation); hmdMatrix.preMultTranslate(position); leftEyeViewMatrix = oculusDevice->viewMatrixLeft(); rightEyeViewMatrix = oculusDevice->viewMatrixRight(); // Get camera matrix from manipulator cameraManipulatorViewMatrix = cameraManipulator->getInverseMatrix(); leftEyeRTTCamera->setViewMatrix(cameraManipulatorViewMatrix*hmdMatrix*leftEyeViewMatrix); rightEyeRTTCamera->setViewMatrix(cameraManipulatorViewMatrix*hmdMatrix*rightEyeViewMatrix); viewer.frame(); } return 0; }
int main_spark( int argc, char** argv ) { osg::ArgumentParser arguments( &argc, argv ); bool trackingModel = false; int effectType = 0; if ( arguments.read("--simple") ) effectType = 0; else if ( arguments.read("--explosion") ) effectType = 1; else if ( arguments.read("--fire") ) effectType = 2; else if ( arguments.read("--rain") ) effectType = 3; else if ( arguments.read("--smoke") ) effectType = 4; effectType = 3; spark::init(); osg::ref_ptr<SparkDrawable> spark = new SparkDrawable; osg::ref_ptr<osg::Geode> geode = new osg::Geode; switch ( effectType ) { case 1: // Explosion spark->setBaseSystemCreator( &createExplosion ); spark->addParticleSystem(); spark->setSortParticles( true ); spark->addImage( "explosion", osgDB::readImageFile("data/explosion.bmp"), GL_ALPHA ); spark->addImage( "flash", osgDB::readImageFile("data/flash.bmp"), GL_RGB ); spark->addImage( "spark1", osgDB::readImageFile("data/spark1.bmp"), GL_RGB ); spark->addImage( "spark2", osgDB::readImageFile("data/point.bmp"), GL_ALPHA ); spark->addImage( "wave", osgDB::readImageFile("data/wave.bmp"), GL_RGBA ); break; case 2: // Fire spark->setBaseSystemCreator( /*&createFire*/&fire_creator(2).createFire ); spark->addParticleSystem(); spark->addImage( "fire", osgDB::readImageFile("data/fire2.bmp"), GL_ALPHA ); spark->addImage( "explosion", osgDB::readImageFile("data/explosion.bmp"), GL_ALPHA ); break; case 3: // Rain spark->setBaseSystemCreator( &createRain, true ); // Must use the proto type directly spark->addImage( "waterdrops", osgDB::readImageFile("data/waterdrops.bmp"), GL_ALPHA ); geode = new osg::Geode; break; case 4: // Smoke spark->setBaseSystemCreator( &createSmoke ); spark->addParticleSystem(); spark->addImage( "smoke", osgDB::readImageFile("data/smoke.png"), GL_RGBA ); trackingModel = true; break; default: // Simple spark->setBaseSystemCreator( &createSimpleSystem ); spark->addParticleSystem(); spark->addImage( "flare", osgDB::readImageFile("data/flare.bmp"), GL_ALPHA ); break; } geode->addDrawable( spark.get() ); geode->getOrCreateStateSet()->setRenderingHint( osg::StateSet::TRANSPARENT_BIN ); geode->getOrCreateStateSet()->setMode( GL_LIGHTING, osg::StateAttribute::OFF ); osg::ref_ptr<SparkUpdatingHandler> handler = new SparkUpdatingHandler; handler->addSpark( spark.get() ); osg::ref_ptr<osg::MatrixTransform> model = new osg::MatrixTransform; model->addChild( osgDB::readNodeFile("glider.osg") ); if ( trackingModel ) { handler->setTrackee( 0, model.get() ); osg::ref_ptr<osg::AnimationPathCallback> apcb = new osg::AnimationPathCallback; apcb->setAnimationPath( createAnimationPath(4.0f, 6.0f) ); model->setUpdateCallback( apcb.get() ); } osg::ref_ptr<osg::Group> root = new osg::Group; root->addChild( geode.get() ); root->addChild( model.get() ); osgViewer::Viewer viewer(arguments); viewer.getCamera()->setClearColor( osg::Vec4(0.0f, 0.0f, 0.0f, 1.0f) ); viewer.setSceneData( root.get() ); viewer.addEventHandler( new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()) ); viewer.addEventHandler( new osgViewer::StatsHandler ); viewer.addEventHandler( new osgViewer::WindowSizeHandler ); viewer.addEventHandler( handler.get() ); return viewer.run(); }
int main(int argc, char **argv) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc, argv); // read the scene from the list of file specified commandline args. osg::ref_ptr<osg::Node> scene = osgDB::readNodeFiles(arguments); if (!scene) { std::cout << argv[0] << ": requires filename argument." << std::endl; return 1; } // construct the viewer. osgViewer::CompositeViewer viewer(arguments); if (arguments.read("-1")) { { osgViewer::View* view = new osgViewer::View; view->setName("Single view"); view->setSceneData(scene.get()); view->addEventHandler(new osgViewer::StatsHandler); view->setUpViewAcrossAllScreens(); view->setCameraManipulator(new osgGA::TrackballManipulator); viewer.addView(view); } } if (arguments.read("-2")) { // view one { osgViewer::View* view = new osgViewer::View; view->setName("View one"); viewer.addView(view); view->setUpViewOnSingleScreen(0); view->setSceneData(scene.get()); view->setCameraManipulator(new osgGA::TrackballManipulator); // add the state manipulator osg::ref_ptr<osgGA::StateSetManipulator> statesetManipulator = new osgGA::StateSetManipulator; statesetManipulator->setStateSet(view->getCamera()->getOrCreateStateSet()); view->addEventHandler(statesetManipulator.get()); } // view two { osgViewer::View* view = new osgViewer::View; view->setName("View two"); viewer.addView(view); view->setUpViewOnSingleScreen(1); view->setSceneData(scene.get()); view->setCameraManipulator(new osgGA::TrackballManipulator); view->addEventHandler(new osgViewer::StatsHandler); // add the handler for doing the picking view->addEventHandler(new PickHandler()); } } if (arguments.read("-3") || viewer.getNumViews() == 0) { osg::GraphicsContext::WindowingSystemInterface* wsi = osg::GraphicsContext::getWindowingSystemInterface(); if (!wsi) { osg::notify(osg::NOTICE) << "Error, no WindowSystemInterface available, cannot create windows." << std::endl; return 1; } unsigned int width, height; wsi->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0), width, height); osg::ref_ptr<osg::GraphicsContext::Traits> traits = new osg::GraphicsContext::Traits; traits->x = 100; traits->y = 100; traits->width = 1000; traits->height = 800; traits->windowDecoration = true; traits->doubleBuffer = true; traits->sharedContext = 0; osg::ref_ptr<osg::GraphicsContext> gc = osg::GraphicsContext::createGraphicsContext(traits.get()); if (gc.valid()) { osg::notify(osg::INFO) << " GraphicsWindow has been created successfully." << std::endl; // need to ensure that the window is cleared make sure that the complete window is set the correct colour // rather than just the parts of the window that are under the camera's viewports gc->setClearColor(osg::Vec4f(0.2f, 0.2f, 0.6f, 1.0f)); gc->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); } else { osg::notify(osg::NOTICE) << " GraphicsWindow has not been created successfully." << std::endl; } // view one { osgViewer::View* view = new osgViewer::View; view->setName("View one"); viewer.addView(view); view->setSceneData(scene.get()); view->getCamera()->setName("Cam one"); view->getCamera()->setViewport(new osg::Viewport(0, 0, traits->width / 2, traits->height / 2)); view->getCamera()->setGraphicsContext(gc.get()); view->setCameraManipulator(new osgGA::TrackballManipulator); // add the state manipulator osg::ref_ptr<osgGA::StateSetManipulator> statesetManipulator = new osgGA::StateSetManipulator; statesetManipulator->setStateSet(view->getCamera()->getOrCreateStateSet()); view->addEventHandler(statesetManipulator.get()); view->addEventHandler(new osgViewer::HelpHandler); view->addEventHandler(new osgViewer::WindowSizeHandler); view->addEventHandler(new osgViewer::ThreadingHandler); view->addEventHandler(new osgViewer::RecordCameraPathHandler); osgVdpm::SRMeshUserData* userData = new osgVdpm::SRMeshUserData(); userData->setPause(true); view->setUserData(userData); } // view two { osgViewer::View* view = new osgViewer::View; view->setName("View two"); viewer.addView(view); view->setSceneData(scene.get()); view->getCamera()->setName("Cam two"); view->getCamera()->setViewport(new osg::Viewport(traits->width / 2, 0, traits->width / 2, traits->height / 2)); view->getCamera()->setGraphicsContext(gc.get()); view->setCameraManipulator(new osgGA::TrackballManipulator); // add the state manipulator osg::ref_ptr<osgGA::StateSetManipulator> statesetManipulator = new osgGA::StateSetManipulator; statesetManipulator->setStateSet(view->getCamera()->getOrCreateStateSet()); view->addEventHandler(statesetManipulator.get()); // add the handler for doing the picking view->addEventHandler(new PickHandler()); osgVdpm::SRMeshUserData* userData = new osgVdpm::SRMeshUserData(); userData->setPause(true); view->setUserData(userData); } // view three { osgViewer::View* view = new osgViewer::View; view->setName("View three"); viewer.addView(view); view->setSceneData(scene.get()); view->getCamera()->setName("Cam three"); view->getCamera()->setProjectionMatrixAsPerspective(30.0, double(traits->width) / double(traits->height / 2), 1.0, 1000.0); view->getCamera()->setViewport(new osg::Viewport(0, traits->height / 2, traits->width, traits->height / 2)); view->getCamera()->setGraphicsContext(gc.get()); view->setCameraManipulator(new osgGA::TrackballManipulator); // add the state manipulator osg::ref_ptr<osgGA::StateSetManipulator> statesetManipulator = new osgGA::StateSetManipulator; statesetManipulator->setStateSet(view->getCamera()->getOrCreateStateSet()); view->addEventHandler(statesetManipulator.get()); osgViewer::StatsHandler* statsHandler = new osgViewer::StatsHandler; osgVdpm::SRMeshUserStats::init(statsHandler); view->addEventHandler(statsHandler); osgVdpm::SRMeshUserData* userData = new osgVdpm::SRMeshUserData(); userData->setStats(viewer.getViewerStats()); float tau; if (arguments.read("--tau", tau)) userData->setTau(tau); unsigned int targetAFaceCount; if (arguments.read("--afaces", targetAFaceCount)) userData->setTargetAFaceCount(targetAFaceCount); unsigned int amortizeStep; if (arguments.read("--amortize", amortizeStep)) userData->setAmortizeStep(amortizeStep); unsigned int gtime; if (arguments.read("--gtime", gtime)) userData->setGTime(gtime); view->setUserData(userData); } } while (arguments.read("-s")) { viewer.setThreadingModel(osgViewer::CompositeViewer::SingleThreaded); } while (arguments.read("-g")) { viewer.setThreadingModel(osgViewer::CompositeViewer::CullDrawThreadPerContext); } while (arguments.read("-c")) { viewer.setThreadingModel(osgViewer::CompositeViewer::CullThreadPerCameraDrawThreadPerContext); } // run the viewer's main frame loop return viewer.run(); }
void shot_detector::visualizeICP() { std::cerr << "verifying" << std::endl; if (rototranslations.size () <= 0) { cerr << "*** No instances found! ***" << endl; return; } else { cerr << "Recognized Instances: " << rototranslations.size () << endl << endl; } /** * Generates clouds for each instances found */ std::vector<pcl::PointCloud<PointType>::ConstPtr> instances; for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::PointCloud<PointType>::Ptr rotated_model (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *rotated_model, rototranslations[i]); instances.push_back (rotated_model); } std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > final_transforms; /** * ICP */ std::vector<pcl::PointCloud<PointType>::ConstPtr> registered_instances; if (true) { cerr << "--- ICP ---------" << endl; for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::IterativeClosestPoint<PointType, PointType> icp; icp.setMaximumIterations (icp_max_iter_); icp.setMaxCorrespondenceDistance (icp_corr_distance_); icp.setInputTarget (scene); icp.setInputSource (instances[i]); pcl::PointCloud<PointType>::Ptr registered (new pcl::PointCloud<PointType>); icp.align (*registered); icp.setMaxCorrespondenceDistance (.01); icp.align (*registered); icp.setMaxCorrespondenceDistance (.005); icp.align (*registered); registered_instances.push_back (registered); std::cerr << rototranslations[i] << std::endl; final_transforms.push_back(icp.getFinalTransformation()); std::cerr << "answer" << icp.getFinalTransformation()* rototranslations[i] << std::endl; cerr << "Instance " << i << std::endl; if (icp.hasConverged ()) { cerr << "Aligned!" << endl; } else { cerr << "Not Aligned!" << endl; } } cerr << "-----------------" << endl << endl; } /** * Hypothesis Verification */ cerr << "--- Hypotheses Verification ---" << endl; std::vector<bool> hypotheses_mask; // Mask Vector to identify positive hypotheses pcl::GlobalHypothesesVerification<PointType, PointType> GoHv; GoHv.setSceneCloud (scene); // Scene Cloud GoHv.addModels (registered_instances, true); //Models to verify GoHv.setInlierThreshold (hv_inlier_th_); GoHv.setOcclusionThreshold (hv_occlusion_th_); GoHv.setRegularizer (hv_regularizer_); GoHv.setRadiusClutter (hv_rad_clutter_); GoHv.setClutterRegularizer (hv_clutter_reg_); GoHv.setDetectClutter (hv_detect_clutter_); GoHv.setRadiusNormals (hv_rad_normals_); GoHv.verify (); GoHv.getMask (hypotheses_mask); // i-element TRUE if hvModels[i] verifies hypotheses for (int i = 0; i < hypotheses_mask.size (); i++) { if (hypotheses_mask[i]) { cerr << "Instance " << i << " is GOOD! <---" << endl; } else { cerr << "Instance " << i << " is bad!" << endl; } } cerr << "-------------------------------" << endl; /** * Visualization */ pcl::visualization::PCLVisualizer viewer ("Hypotheses Verification"); viewer.addPointCloud (scene, "scene_cloud"); std::cerr << scene->points[1].x << scene->points[1].y << scene->points[1].z << std::endl; viewer.addCoordinateSystem (1.0); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "scene_cloud"); /* pcl::PointCloud<PointType>::Ptr off_scene_model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_scene_model_keypoints (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_model_good_kp (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *off_scene_model, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_keypoints, *off_scene_model_keypoints, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_good_kp, *off_model_good_kp, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0));*/ /* //if (show_keypoints_) { CloudStyle modelStyle = style_white; pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_color_handler (off_scene_model, modelStyle.r, modelStyle.g, modelStyle.b); viewer.addPointCloud (off_scene_model, off_scene_model_color_handler, "off_scene_model"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, modelStyle.size, "off_scene_model"); } // if (show_keypoints_) { CloudStyle goodKeypointStyle = style_violet; pcl::visualization::PointCloudColorHandlerCustom<PointType> model_good_keypoints_color_handler (off_model_good_kp, goodKeypointStyle.r, goodKeypointStyle.g, goodKeypointStyle.b); viewer.addPointCloud (off_model_good_kp, model_good_keypoints_color_handler, "model_good_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, goodKeypointStyle.size, "model_good_keypoints"); pcl::visualization::PointCloudColorHandlerCustom<PointType> scene_good_keypoints_color_handler (scene_good_kp, goodKeypointStyle.r, goodKeypointStyle.g, goodKeypointStyle.b); viewer.addPointCloud (scene_good_kp, scene_good_keypoints_color_handler, "scene_good_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, goodKeypointStyle.size, "scene_good_keypoints"); } */ for (size_t i = 0; i < instances.size (); ++i) { std::stringstream ss_instance; ss_instance << "instance_" << i; /* CloudStyle clusterStyle = style_red; pcl::visualization::PointCloudColorHandlerCustom<PointType> instance_color_handler (instances[i], clusterStyle.r, clusterStyle.g, clusterStyle.b); viewer.addPointCloud (instances[i], instance_color_handler, ss_instance.str ()); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, clusterStyle.size, ss_instance.str ()); */ CloudStyle registeredStyles = hypotheses_mask[i] ? style_green : style_cyan; if(hypotheses_mask[i]) { std::cerr << final_transforms[i] << std::endl; ss_instance << "_registered" << endl; pcl::visualization::PointCloudColorHandlerCustom<PointType> registered_instance_color_handler (registered_instances[i], registeredStyles.r, registeredStyles.g, registeredStyles.b); viewer.addPointCloud (registered_instances[i], registered_instance_color_handler, ss_instance.str ()); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, registeredStyles.size, ss_instance.str ()); } } while (!viewer.wasStopped ()) { viewer.spinOnce (); } }
void shot_detector::visualizeCorrespondences() { std::cerr << "Model instances found: " << rototranslations.size () << std::endl; for (size_t i = 0; i < rototranslations.size (); ++i) { std::cerr << "\n Instance " << i + 1 << ":" << std::endl; std::cerr << " Correspondences belonging to this instance: " << clustered_corrs[i].size () << std::endl; // Print the rotation matrix and translation vector Eigen::Matrix3f rotation = rototranslations[i].block<3, 3>(0, 0); Eigen::Vector3f translation = rototranslations[i].block<3, 1>(0, 3); printf ("\n"); printf (" | %6.3f %6.3f %6.3f | \n", rotation (0, 0), rotation (0, 1), rotation (0, 2)); printf (" R = | %6.3f %6.3f %6.3f | \n", rotation (1, 0), rotation (1, 1), rotation (1, 2)); printf (" | %6.3f %6.3f %6.3f | \n", rotation (2, 0), rotation (2, 1), rotation (2, 2)); printf ("\n"); printf (" t = < %0.3f, %0.3f, %0.3f >\n", translation (0), translation (1), translation (2)); } // // Visualization // pcl::visualization::PCLVisualizer viewer ("Correspondence Grouping"); viewer.addCoordinateSystem (1.0); viewer.addPointCloud (scene, "scene_cloud"); pcl::PointCloud<PointType>::Ptr off_scene_model (new pcl::PointCloud<PointType> ()); pcl::PointCloud<PointType>::Ptr off_scene_model_keypoints (new pcl::PointCloud<PointType> ()); // if (show_correspondences_ || show_keypoints_) // { // We are translating the model so that it doesn't end in the middle of the scene representation pcl::transformPointCloud (*model, *off_scene_model, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::transformPointCloud (*model_keypoints, *off_scene_model_keypoints, Eigen::Vector3f (-1, 0, 0), Eigen::Quaternionf (1, 0, 0, 0)); pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_color_handler (off_scene_model, 255, 255, 128); viewer.addPointCloud (off_scene_model, "off_scene_model"); // } //if (show_keypoints_) // { /*pcl::visualization::PointCloudColorHandlerCustom<PointType> scene_keypoints_color_handler (scene_keypoints, 0, 0, 255); viewer.addPointCloud (scene_keypoints, scene_keypoints_color_handler, "scene_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "scene_keypoints");*/ /* pcl::visualization::PointCloudColorHandlerCustom<PointType> off_scene_model_keypoints_color_handler (off_scene_model_keypoints, 0, 0, 255); viewer.addPointCloud (off_scene_model_keypoints, "off_scene_model_keypoints"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "off_scene_model_keypoints");*/ // } pcl::PointCloud<PointType>::Ptr scene_corr (new pcl::PointCloud<PointType> ()); for (int idx = 0; idx < model_scene_corrs->size(); ++idx) { PointType temp = scene_keypoints->at(model_scene_corrs->at(idx).index_match); scene_corr->push_back(temp); } pcl::visualization::PointCloudColorHandlerCustom<PointType> scene_corr_color_handler (scene_corr, 255, 0, 0); viewer.addPointCloud (scene_corr, scene_corr_color_handler, "scene_corr"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "scene_corr"); for (size_t i = 0; i < rototranslations.size (); ++i) { pcl::PointCloud<PointType>::Ptr rotated_model (new pcl::PointCloud<PointType> ()); pcl::transformPointCloud (*model, *rotated_model, rototranslations[i]); std::stringstream ss_cloud; ss_cloud << "instance" << i; pcl::visualization::PointCloudColorHandlerCustom<PointType> rotated_model_color_handler (rotated_model, 255, 0, 0); viewer.addPointCloud (rotated_model, rotated_model_color_handler, ss_cloud.str ()); //if (show_correspondences_) //{ pcl::PointCloud<PointType>::Ptr scene_corr (new pcl::PointCloud<PointType> ()); for (size_t j = 0; j < clustered_corrs[i].size (); ++j) { std::stringstream ss_line; ss_line << "correspondence_line" << i << "_" << j; PointType& model_point = off_scene_model_keypoints->at (clustered_corrs[i][j].index_query); PointType& scene_point = scene_keypoints->at (clustered_corrs[i][j].index_match); // We are drawing a line for each pair of clustered correspondences found between the model and the scene viewer.addLine<PointType, PointType> (model_point, scene_point, 0, 255, 0, ss_line.str ()); } // } } while (!viewer.wasStopped ()) { viewer.spinOnce (); } }
int main( int argc, char **argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName()); arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" is the application for presenting 3D interactive slide shows."); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ..."); arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information"); arguments.getApplicationUsage()->addCommandLineOption("-a","Turn auto stepping on by default"); arguments.getApplicationUsage()->addCommandLineOption("-d <float>","Time duration in seconds between layers/slides"); arguments.getApplicationUsage()->addCommandLineOption("-s <float> <float> <float>","width, height, distance and of the screen away from the viewer"); arguments.getApplicationUsage()->addCommandLineOption("--viewer","Start Present3D as the viewer version."); arguments.getApplicationUsage()->addCommandLineOption("--authoring","Start Present3D as the authoring version, license required."); arguments.getApplicationUsage()->addCommandLineOption("--master","Start Present3D as the master version, license required."); arguments.getApplicationUsage()->addCommandLineOption("--slave","Start Present3D as the slave version, license required."); arguments.getApplicationUsage()->addCommandLineOption("--publishing","Start Present3D as the publishing version, license required."); arguments.getApplicationUsage()->addCommandLineOption("--timeDelayOnNewSlideWithMovies","Set the time delay on new slide with movies, done to allow movie threads to get in sync with rendering thread."); arguments.getApplicationUsage()->addCommandLineOption("--targetFrameRate","Set the target frame rate, defaults to 80Hz."); arguments.getApplicationUsage()->addCommandLineOption("--version","Report the Present3D version."); arguments.getApplicationUsage()->addCommandLineOption("--print <filename>","Print out slides to a series of image files."); arguments.getApplicationUsage()->addCommandLineOption("--html <filename>","Print out slides to a series of html & image files."); arguments.getApplicationUsage()->addCommandLineOption("--loop","Switch on looping of presentation."); arguments.getApplicationUsage()->addCommandLineOption("--devices","Print the Video input capability via QuickTime and exit."); // add alias from xml to p3d to provide backwards compatibility for old p3d files. osgDB::Registry::instance()->addFileExtensionAlias("xml","p3d"); // if user requests devices video capability. if (arguments.read("-devices") || arguments.read("--devices")) { // Force load QuickTime plugin, probe video capability, exit osgDB::readImageFile("devices.live"); return 1; } // read any env vars from presentations before we create viewer to make sure the viewer // utilises these env vars if (p3d::readEnvVars(arguments)) { osg::DisplaySettings::instance()->readEnvironmentalVariables(); } // set up any logins required for http access std::string url, username, password; while(arguments.read("--login",url, username, password)) { if (!osgDB::Registry::instance()->getAuthenticationMap()) { osgDB::Registry::instance()->setAuthenticationMap(new osgDB::AuthenticationMap); osgDB::Registry::instance()->getAuthenticationMap()->addAuthenticationDetails( url, new osgDB::AuthenticationDetails(username, password) ); } } #ifdef USE_SDL SDLIntegration sdlIntegration; osg::notify(osg::INFO)<<"USE_SDL"<<std::endl; #endif bool doSetViewer = true; std::string configurationFile; // check env vars for configuration file const char* str = getenv("PRESENT3D_CONFIG_FILE"); if (!str) str = getenv("OSG_CONFIG_FILE"); if (str) configurationFile = str; // check command line parameters for configuration file. while (arguments.read("-c",configurationFile)) {} osg::Vec4 clearColor(0.0f,0.0f,0.0f,0.0f); while (arguments.read("--clear-color",clearColor[0],clearColor[1],clearColor[2],clearColor[3])) {} std::string filename; if (arguments.read("--spell-check",filename)) { p3d::SpellChecker spellChecker; spellChecker.checkP3dXml(filename); return 1; } if (arguments.read("--strip-text",filename)) { p3d::XmlPatcher patcher; // patcher.stripP3dXml(filename, osg::notify(osg::NOTICE)); osg::ref_ptr<osgDB::XmlNode> newNode = patcher.simplifyP3dXml(filename); if (newNode.valid()) { newNode->write(std::cout); } return 1; } std::string lhs_filename, rhs_filename; if (arguments.read("--merge",lhs_filename, rhs_filename)) { p3d::XmlPatcher patcher; osg::ref_ptr<osgDB::XmlNode> newNode = patcher.mergeP3dXml(lhs_filename, rhs_filename); if (newNode.valid()) { newNode->write(std::cout); } return 1; } // construct the viewer. osgViewer::Viewer viewer(arguments); // set clear colour to black by default. viewer.getCamera()->setClearColor(clearColor); if (!configurationFile.empty()) { viewer.readConfiguration(configurationFile); doSetViewer = false; } const char* p3dDevice = getenv("P3D_DEVICE"); if (p3dDevice) { osgDB::StringList devices; osgDB::split(p3dDevice, devices); for(osgDB::StringList::iterator i = devices.begin(); i != devices.end(); ++i) { addDeviceTo(viewer, *i); } } std::string device; while (arguments.read("--device", device)) { addDeviceTo(viewer, device); } if (arguments.read("--http-control")) { std::string server_address = "localhost"; std::string server_port = "8080"; std::string document_root = "htdocs"; while (arguments.read("--http-server-address", server_address)) {} while (arguments.read("--http-server-port", server_port)) {} while (arguments.read("--http-document-root", document_root)) {} osg::ref_ptr<osgDB::Options> device_options = new osgDB::Options("documentRegisteredHandlers"); osg::ref_ptr<osgGA::Device> rest_http_device = osgDB::readFile<osgGA::Device>(server_address+":"+server_port+"/"+document_root+".resthttp", device_options.get()); if (rest_http_device.valid()) { viewer.addDevice(rest_http_device.get()); } } // set up stereo masks viewer.getCamera()->setCullMaskLeft(0x00000001); viewer.getCamera()->setCullMaskRight(0x00000002); bool assignLeftCullMaskForMono = true; if (assignLeftCullMaskForMono) { viewer.getCamera()->setCullMask(viewer.getCamera()->getCullMaskLeft()); } else { viewer.getCamera()->setCullMask(0xffffffff); } // set up the camera manipulators. { osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator; keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() ); keyswitchManipulator->addMatrixManipulator( '2', "Flight", new osgGA::FlightManipulator() ); keyswitchManipulator->addMatrixManipulator( '3', "Drive", new osgGA::DriveManipulator() ); keyswitchManipulator->addMatrixManipulator( '4', "Terrain", new osgGA::TerrainManipulator() ); std::string pathfile; char keyForAnimationPath = '5'; while (arguments.read("-p",pathfile)) { osgGA::AnimationPathManipulator* apm = new osgGA::AnimationPathManipulator(pathfile); if (apm || !apm->valid()) { unsigned int num = keyswitchManipulator->getNumMatrixManipulators(); keyswitchManipulator->addMatrixManipulator( keyForAnimationPath, "Path", apm ); keyswitchManipulator->selectMatrixManipulator(num); ++keyForAnimationPath; } } viewer.setCameraManipulator( keyswitchManipulator.get() ); } // add the state manipulator osg::ref_ptr<osgGA::StateSetManipulator> ssManipulator = new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()); ssManipulator->setKeyEventToggleTexturing('e'); viewer.addEventHandler( ssManipulator.get() ); // add the state manipulator viewer.addEventHandler( new osgViewer::StatsHandler() ); viewer.addEventHandler( new osgViewer::WindowSizeHandler() ); // neeed to address. // viewer.getScene()->getUpdateVisitor()->setTraversalMode(osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN); const char* p3dCursor = getenv("P3D_CURSOR"); std::string cursorFileName( p3dCursor ? p3dCursor : ""); while (arguments.read("--cursor",cursorFileName)) {} const char* p3dShowCursor = getenv("P3D_SHOW_CURSOR"); std::string showCursor( p3dShowCursor ? p3dShowCursor : "YES"); while (arguments.read("--show-cursor")) { showCursor="YES"; } while (arguments.read("--hide-cursor")) { showCursor="NO"; } bool hideCursor = (showCursor=="No" || showCursor=="NO" || showCursor=="no"); while (arguments.read("--set-viewer")) { doSetViewer = true; } while (arguments.read("--no-set-viewer")) { doSetViewer = false; } // if we want to hide the cursor override the custom cursor. if (hideCursor) cursorFileName.clear(); // cluster related entries. int socketNumber=8100; while (arguments.read("-n",socketNumber)) {} float camera_fov=-1.0f; while (arguments.read("-f",camera_fov)) {} float camera_offset=45.0f; while (arguments.read("-o",camera_offset)) {} std::string exportName; while (arguments.read("--print",exportName)) {} while (arguments.read("--html",exportName)) {} // read any time delay argument. float timeDelayBetweenSlides = 1.0f; while (arguments.read("-d",timeDelayBetweenSlides)) {} bool autoSteppingActive = false; while (arguments.read("-a")) autoSteppingActive = true; bool loopPresentation = false; while (arguments.read("--loop")) loopPresentation = true; { // set update hte default traversal mode settings for update visitor // default to osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN. osg::NodeVisitor::TraversalMode updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN; // viewer.getUpdateVisitor()->getTraversalMode(); const char* p3dUpdateStr = getenv("P3D_UPDATE"); if (p3dUpdateStr) { std::string updateStr(p3dUpdateStr); if (updateStr=="active" || updateStr=="Active" || updateStr=="ACTIVE") updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN; else if (updateStr=="all" || updateStr=="All" || updateStr=="ALL") updateTraversalMode = osg::NodeVisitor::TRAVERSE_ALL_CHILDREN; } while(arguments.read("--update-active")) updateTraversalMode = osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN; while(arguments.read("--update-all")) updateTraversalMode = osg::NodeVisitor::TRAVERSE_ALL_CHILDREN; viewer.getUpdateVisitor()->setTraversalMode(updateTraversalMode); } // register the slide event handler - which moves the presentation from slide to slide, layer to layer. osg::ref_ptr<osgPresentation::SlideEventHandler> seh = new osgPresentation::SlideEventHandler(&viewer); viewer.addEventHandler(seh.get()); seh->setAutoSteppingActive(autoSteppingActive); seh->setTimeDelayBetweenSlides(timeDelayBetweenSlides); seh->setLoopPresentation(loopPresentation); double targetFrameRate = 80.0; while (arguments.read("--targetFrameRate",targetFrameRate)) {} // set the time delay float timeDelayOnNewSlideWithMovies = 0.4f; while (arguments.read("--timeDelayOnNewSlideWithMovies",timeDelayOnNewSlideWithMovies)) {} seh->setTimeDelayOnNewSlideWithMovies(timeDelayOnNewSlideWithMovies); // set up optimizer options unsigned int optimizer_options = osgUtil::Optimizer::DEFAULT_OPTIMIZATIONS; bool relase_and_compile = false; while (arguments.read("--release-and-compile")) { relase_and_compile = true; } seh->setReleaseAndCompileOnEachNewSlide(relase_and_compile); if (relase_and_compile) { // make sure that imagery stays around after being applied to textures. viewer.getDatabasePager()->setUnrefImageDataAfterApplyPolicy(true,false); optimizer_options &= ~osgUtil::Optimizer::OPTIMIZE_TEXTURE_SETTINGS; } // // osgDB::Registry::instance()->getOrCreateDatabasePager()->setUnrefImageDataAfterApplyPolicy(true,false); // optimizer_options &= ~osgUtil::Optimizer::OPTIMIZE_TEXTURE_SETTINGS; // osg::Texture::getTextureObjectManager()->setExpiryDelay(0.0f); // osgDB::Registry::instance()->getOrCreateDatabasePager()->setExpiryDelay(1.0f); // register the handler for modifying the point size osg::ref_ptr<PointsEventHandler> peh = new PointsEventHandler; viewer.addEventHandler(peh.get()); // add the screen capture handler std::string screenCaptureFilename = "screen_shot.jpg"; while(arguments.read("--screenshot", screenCaptureFilename)) {} osg::ref_ptr<osgViewer::ScreenCaptureHandler::WriteToFile> writeFile = new osgViewer::ScreenCaptureHandler::WriteToFile( osgDB::getNameLessExtension(screenCaptureFilename), osgDB::getFileExtension(screenCaptureFilename) ); osg::ref_ptr<osgViewer::ScreenCaptureHandler> screenCaptureHandler = new osgViewer::ScreenCaptureHandler(writeFile.get()); screenCaptureHandler->setKeyEventTakeScreenShot('m');//osgGA::GUIEventAdapter::KEY_Print); screenCaptureHandler->setKeyEventToggleContinuousCapture('M'); viewer.addEventHandler(screenCaptureHandler.get()); // osg::DisplaySettings::instance()->setSplitStereoAutoAjustAspectRatio(false); float width = osg::DisplaySettings::instance()->getScreenWidth(); float height = osg::DisplaySettings::instance()->getScreenHeight(); float distance = osg::DisplaySettings::instance()->getScreenDistance(); while (arguments.read("-s", width, height, distance)) { osg::DisplaySettings::instance()->setScreenDistance(distance); osg::DisplaySettings::instance()->setScreenHeight(height); osg::DisplaySettings::instance()->setScreenWidth(width); } std::string outputFileName; while(arguments.read("--output",outputFileName)) {} // get details on keyboard and mouse bindings used by the viewer. viewer.getUsage(*arguments.getApplicationUsage()); // if user request help write it out to cout. unsigned int helpType = 0; if ((helpType = arguments.readHelpType())) { arguments.getApplicationUsage()->write(std::cout, helpType); return 1; } P3DApplicationType P3DApplicationType = VIEWER; str = getenv("PRESENT3D_TYPE"); if (str) { if (strcmp(str,"viewer")==0) P3DApplicationType = VIEWER; else if (strcmp(str,"master")==0) P3DApplicationType = MASTER; else if (strcmp(str,"slave")==0) P3DApplicationType = SLAVE; } while (arguments.read("--viewer")) { P3DApplicationType = VIEWER; } while (arguments.read("--master")) { P3DApplicationType = MASTER; } while (arguments.read("--slave")) { P3DApplicationType = SLAVE; } while (arguments.read("--version")) { std::string appTypeName = "invalid"; switch(P3DApplicationType) { case(VIEWER): appTypeName = "viewer"; break; case(MASTER): appTypeName = "master"; break; case(SLAVE): appTypeName = "slave"; break; } osg::notify(osg::NOTICE)<<std::endl; osg::notify(osg::NOTICE)<<"Present3D "<<appTypeName<<" version : "<<s_version<<std::endl; osg::notify(osg::NOTICE)<<std::endl; return 0; } // any option left unread are converted into errors to write out later. //arguments.reportRemainingOptionsAsUnrecognized(); // report any errors if they have ocured when parsing the program aguments. if (arguments.errors()) { arguments.writeErrorMessages(osg::notify(osg::INFO)); return 1; } // read files name from arguments. p3d::FileNameList xmlFiles, normalFiles; if (!p3d::getFileNames(arguments, xmlFiles, normalFiles)) { osg::notify(osg::NOTICE)<<std::endl; osg::notify(osg::NOTICE)<<"No file specified, please specify and file to load."<<std::endl; osg::notify(osg::NOTICE)<<std::endl; return 1; } bool viewerInitialized = false; if (!xmlFiles.empty()) { osg::ref_ptr<osg::Node> holdingModel = p3d::readHoldingSlide(xmlFiles.front()); if (holdingModel.valid()) { viewer.setSceneData(holdingModel.get()); seh->selectSlide(0); if (!viewerInitialized) { // pass the global stateset to the point event handler so that it can // alter the point size of all points in the scene. peh->setStateSet(viewer.getCamera()->getOrCreateStateSet()); // create the windows and run the threads. viewer.realize(); if (doSetViewer) setViewer(viewer, width, height, distance); viewerInitialized = true; } seh->home(); // render a frame viewer.frame(); } } osg::Timer timer; osg::Timer_t start_tick = timer.tick(); osg::ref_ptr<osgDB::ReaderWriter::Options> cacheAllOption = new osgDB::ReaderWriter::Options; cacheAllOption->setObjectCacheHint(osgDB::ReaderWriter::Options::CACHE_ALL); osgDB::Registry::instance()->setOptions(cacheAllOption.get()); // read the scene from the list of file specified commandline args. osg::ref_ptr<osg::Node> loadedModel = p3d::readShowFiles(arguments,cacheAllOption.get()); // osgDB::readNodeFiles(arguments, cacheAllOption.get()); osgDB::Registry::instance()->setOptions( 0 ); // if no model has been successfully loaded report failure. if (!loadedModel) { osg::notify(osg::INFO) << arguments.getApplicationName() <<": No data loaded" << std::endl; return 1; } osg::Timer_t end_tick = timer.tick(); osg::notify(osg::INFO) << "Time to load = "<<timer.delta_s(start_tick,end_tick)<<std::endl; if (loadedModel->getNumDescriptions()>0) { for(unsigned int i=0; i<loadedModel->getNumDescriptions(); ++i) { const std::string& desc = loadedModel->getDescription(i); if (desc=="loop") { osg::notify(osg::NOTICE)<<"Enabling looping"<<std::endl; seh->setLoopPresentation(true); } else if (desc=="auto") { osg::notify(osg::NOTICE)<<"Enabling auto run"<<std::endl; seh->setAutoSteppingActive(true); } } } processLoadedModel(loadedModel, optimizer_options, cursorFileName); // set the scene to render viewer.setSceneData(loadedModel.get()); if (!viewerInitialized) { // pass the global stateset to the point event handler so that it can // alter the point size of all points in the scene. peh->setStateSet(viewer.getCamera()->getOrCreateStateSet()); // create the windows and run the threads. viewer.realize(); if (doSetViewer) setViewer(viewer, width, height, distance); viewerInitialized = true; } // pass the model to the slide event handler so it knows which to manipulate. seh->set(loadedModel.get()); seh->selectSlide(0); seh->home(); if (!outputFileName.empty()) { osgDB::writeNodeFile(*loadedModel,outputFileName); return 0; } if (!cursorFileName.empty() || hideCursor) { // have to add a frame in here to avoid problems with X11 threading issue on switching off the cursor // not yet sure why it makes a difference, but it at least fixes the crash that would otherwise occur // under X11. viewer.frame(); // switch off the cursor osgViewer::Viewer::Windows windows; viewer.getWindows(windows); for(osgViewer::Viewer::Windows::iterator itr = windows.begin(); itr != windows.end(); ++itr) { (*itr)->useCursor(false); } } osg::Timer_t startOfFrameTick = osg::Timer::instance()->tick(); double targetFrameTime = 1.0/targetFrameRate; if (exportName.empty()) { // objects for managing the broadcasting and recieving of camera packets. CameraPacket cp; Broadcaster bc; Receiver rc; bc.setPort(static_cast<short int>(socketNumber)); rc.setPort(static_cast<short int>(socketNumber)); bool masterKilled = false; DataConverter scratchPad(1024); while( !viewer.done() && !masterKilled) { // wait for all cull and draw threads to complete. viewer.advance(); osg::Timer_t currentTick = osg::Timer::instance()->tick(); double deltaTime = osg::Timer::instance()->delta_s(startOfFrameTick, currentTick); if (deltaTime<targetFrameTime) { OpenThreads::Thread::microSleep(static_cast<unsigned int>((targetFrameTime-deltaTime)*1000000.0)); } startOfFrameTick = osg::Timer::instance()->tick(); #if 0 if (kmcb) { double time = kmcb->getTime(); viewer.getFrameStamp()->setReferenceTime(time); } #endif #ifdef USE_SDL sdlIntegration.update(viewer); #endif if (P3DApplicationType==MASTER) { // take camera zero as the guide. osg::Matrix modelview(viewer.getCamera()->getViewMatrix()); cp.setPacket(modelview,viewer.getFrameStamp()); // cp.readEventQueue(viewer); scratchPad.reset(); scratchPad.write(cp); scratchPad.reset(); scratchPad.read(cp); bc.setBuffer(scratchPad.startPtr(), scratchPad.numBytes()); std::cout << "bc.sync()"<<scratchPad.numBytes()<<std::endl; bc.sync(); } else if (P3DApplicationType==SLAVE) { rc.setBuffer(scratchPad.startPtr(), scratchPad.numBytes()); rc.sync(); scratchPad.reset(); scratchPad.read(cp); // cp.writeEventQueue(viewer); if (cp.getMasterKilled()) { std::cout << "Received master killed."<<std::endl; // break out of while (!done) loop since we've now want to shut down. masterKilled = true; } } // update the scene by traversing it with the the update visitor which will // call all node update callbacks and animations. viewer.eventTraversal(); if (seh->getRequestReload()) { OSG_INFO<<"Reload requested"<<std::endl; seh->setRequestReload(false); int previous_ActiveSlide = seh->getActiveSlide(); int previous_ActiveLayer = seh->getActiveLayer(); // reset time so any event key generate loadedModel = p3d::readShowFiles(arguments,cacheAllOption.get()); processLoadedModel(loadedModel, optimizer_options, cursorFileName); if (!loadedModel) { return 0; } viewer.setSceneData(loadedModel.get()); seh->set(loadedModel.get()); seh->selectSlide(previous_ActiveSlide, previous_ActiveLayer); continue; } // update the scene by traversing it with the the update visitor which will // call all node update callbacks and animations. viewer.updateTraversal(); if (P3DApplicationType==SLAVE) { osg::Matrix modelview; cp.getModelView(modelview,camera_offset); viewer.getCamera()->setViewMatrix(modelview); } // fire off the cull and draw traversals of the scene. if(!masterKilled) viewer.renderingTraversals(); } } else { ExportHTML::write(seh.get(), viewer, exportName); } return 0; }
int main( int argc, char **argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName()); arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" is the a modified version of standard OpenSceneGraph example which loads and visualises 3d models and sounds."); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ..."); arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display command line paramters"); arguments.getApplicationUsage()->addCommandLineOption("--help-env","Display environmental variables available"); arguments.getApplicationUsage()->addCommandLineOption("--help-keys","Display keyboard & mouse bindings available"); arguments.getApplicationUsage()->addCommandLineOption("--help-all","Display all command line, env vars and keyboard & mouse bindigs."); arguments.getApplicationUsage()->addCommandLineOption("--maxsounds <n>","Sets the maximum number of sounds allowed."); arguments.getApplicationUsage()->addCommandLineOption("--gain <n>","Sets the global gain (volume)"); arguments.getApplicationUsage()->addCommandLineOption("--dopplerfactor <n>","Sets the doppler factor"); arguments.getApplicationUsage()->addCommandLineOption("--distancemodel <mode>", "NONE | INVERSE_DISTANCE |INVERSE_DISTANCE_CLAMPED"); // construct the viewer. osgViewer::Viewer viewer(arguments); // get details on keyboard and mouse bindings used by the viewer. viewer.getUsage(*arguments.getApplicationUsage()); // if user request help write it out to cout. bool helpAll = arguments.read("--help-all"); unsigned int helpType = ((helpAll || arguments.read("-h") || arguments.read("--help"))? osg::ApplicationUsage::COMMAND_LINE_OPTION : 0 ) | ((helpAll || arguments.read("--help-env"))? osg::ApplicationUsage::ENVIRONMENTAL_VARIABLE : 0 ) | ((helpAll || arguments.read("--help-keys"))? osg::ApplicationUsage::KEYBOARD_MOUSE_BINDING : 0 ); if (helpType) { arguments.getApplicationUsage()->write(std::cout, helpType); return 1; } // Parsing the sound-related options unsigned int maxSounds = OSGAL_DEFAULT_MAX_SOUNDSOURCES_ALLOWED; arguments.read("--maxsounds", maxSounds); float gain = OSGAL_DEFAULT_GAIN; arguments.read("--gain", gain); float dopplerFactor = OSGAL_DEFAULT_DOPPLER_FACTOR; arguments.read("--dopplerfactor", dopplerFactor); openalpp::DistanceModel distanceModel = OSGAL_DEFAULT_DISTANCE_MODEL; std::string s_model; #undef None // Someone in Linux is defining it 8| if (arguments.read("--distancemodel", s_model)) { if (s_model == "NONE") distanceModel = openalpp::None; else if (s_model == "INVERSE_DISTANCE") distanceModel = openalpp::InverseDistance; else if (s_model == "INVERSE_DISTANCE_CLAMPED") distanceModel = openalpp::InverseDistanceClamped; else osg::notify(osg::WARN) << "WARNING: I do not understand that -distancemodel parameter" << std::endl; } // report any errors if they have occured when parsing the program aguments. if (arguments.errors()) { arguments.writeErrorMessages(std::cout); return 1; } if (arguments.argc()<=1) { arguments.getApplicationUsage()->write(std::cout,osg::ApplicationUsage::COMMAND_LINE_OPTION); return 1; } try { // here we init the SoundManager osgAL::SoundManager::instance()->init(maxSounds); osgAL::SoundManager::instance()->getEnvironment()->setGain(gain); osgAL::SoundManager::instance()->getEnvironment()->setDopplerFactor(dopplerFactor); osgAL::SoundManager::instance()->getEnvironment()->setDistanceModel(distanceModel); osg::Timer_t start_tick = osg::Timer::instance()->tick(); // read the scene from the list of file specified commandline args. osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles(arguments); // if no model has been successfully loaded report failure. if (!loadedModel) { std::cout << arguments.getApplicationName() <<": No data loaded" << std::endl; return 1; } // any option left unread are converted into errors to write out later. arguments.reportRemainingOptionsAsUnrecognized(); // report any errors if they have occured when parsing the program aguments. if (arguments.errors()) { arguments.writeErrorMessages(std::cout); } osg::Timer_t end_tick = osg::Timer::instance()->tick(); std::cout << "Time to load = "<<osg::Timer::instance()->delta_s(start_tick,end_tick)<<std::endl; // optimize the scene graph, remove rendundent nodes and state etc. osgUtil::Optimizer optimizer; optimizer.optimize(loadedModel.get()); // *********** Sound Root Node // Used for update the listener position and direction // First, here we find out how many sound root nodes there are FindSoundRootNodesVisitor fsrnv; fsrnv.setMode(FindSoundRootNodesVisitor::SEARCH); loadedModel->accept(fsrnv); if (fsrnv.getHits() == 0) { // there is no osgAL::SoundRoot to update the listener, so we insert one osg::ref_ptr<osgAL::SoundRoot> soundRoot = new osgAL::SoundRoot(); osg::ref_ptr<osg::Group> rootGroup = static_cast<osg::Group*>(loadedModel.get()); rootGroup->addChild(soundRoot.get()); } else if (fsrnv.getHits() == 1) { // there is one, so no problem } else if (fsrnv.getHits() > 1) { // there are more than one SoundRoots !!! This is no critical, but // no useful. So, we warn about it... osg::notify(osg::WARN) << "WARNING!: the loaded model(s) has more than one sound root." << std::endl; osg::notify(osg::WARN) << " If you are using more than one model, maybe each one has its" << std::endl; osg::notify(osg::WARN) << " own osgAL::SoundRoot. I left in the loaded scene graph just one." << std::endl; // destroy all the SoundRoot nodes... fsrnv.setMode(FindSoundRootNodesVisitor::SEARCH_AND_DESTROY); loadedModel->accept(fsrnv); // and add just one SoundRoot node osg::ref_ptr<osgAL::SoundRoot> soundRoot = new osgAL::SoundRoot(); osg::ref_ptr<osg::Group> rootGroup = static_cast<osg::Group*>(loadedModel.get()); rootGroup->addChild(soundRoot.get()); } // pass the loaded scene graph to the viewer. viewer.setSceneData(loadedModel.get()); // create the windows and run the threads. viewer.realize(); viewer.run(); } catch(std::runtime_error& e) { std::cerr << "Caught: " << e.what() << std::endl; } // Very important to call before end of main! if (osg::Referenced::getDeleteHandler()) osg::Referenced::getDeleteHandler()->setNumFramesToRetainObjects(0); osgAL::SoundManager::instance()->shutdown(); return 0; }
int main(int argc, char *argv[]) { std::signal(SIGINT, sigHandler); std::string source; std::string file_name; std::string save_path; po::options_description visible_options("OPTIONS"); try { po::options_description options("INFO"); options.add_options() ("help", "Produce help message.") ("version,v", "Print version information.") ; po::options_description config("CONFIGURATION"); config.add_options() ("filename,n", po::value<std::string>(&file_name), "The base snapshot file name.\n" "The timestamp of the snapshot will be prepended to this name." "If not provided, the SOURCE name will be used.\n") ("folder,f", po::value<std::string>(&save_path), "The folder in which snapshots will be saved.") ; po::options_description hidden("HIDDEN OPTIONS"); hidden.add_options() ("source", po::value<std::string>(&source), "The name of the frame SOURCE that supplies frames to view.\n") ; po::positional_options_description positional_options; positional_options.add("source", -1); po::options_description all_options("ALL OPTIONS"); all_options.add(options).add(hidden); visible_options.add(options).add(config); po::variables_map variable_map; po::store(po::command_line_parser(argc, argv) .options(all_options) .positional(positional_options) .run(), variable_map); po::notify(variable_map); // Use the parsed options if (variable_map.count("help")) { printUsage(visible_options); return 0; } if (variable_map.count("version")) { std::cout << "Oat Frame Viewer version " << Oat_VERSION_MAJOR << "." << Oat_VERSION_MINOR << "\n"; std::cout << "Written by Jonathan P. Newman in the MWL@MIT.\n"; std::cout << "Licensed under the GPL3.0.\n"; return 0; } if (!variable_map.count("source")) { printUsage(visible_options); std::cerr << oat::Error("A SOURCE must be specified. Exiting.\n"); return -1; } if (!variable_map.count("folder")) { save_path = "."; } if (!variable_map.count("filename")) { file_name = ""; } } catch (std::exception& e) { std::cerr << oat::Error(e.what()) << "\n"; return -1; } catch (...) { std::cerr << oat::Error("Exception of unknown type.\n"); return -1; } // Make the viewer Viewer viewer(source, save_path, file_name); // Tell user std::cout << oat::whoMessage(viewer.get_name(), "Listening to source " + oat::sourceText(source) + ".\n") << oat::whoMessage(viewer.get_name(), "Press 's' on the viewer window to take a snapshot.\n") << oat::whoMessage(viewer.get_name(), "Press CTRL+C to exit.\n"); try { // Infinite loop until ctrl-c or end of stream signal run(&viewer); // Tell user std::cout << oat::whoMessage(viewer.get_name(), "Exiting.\n"); // Exit return 0; } catch (const std::runtime_error& ex) { std::cerr << oat::whoError(viewer.get_name(), ex.what()) << "\n"; } catch (const cv::Exception& ex) { std::cerr << oat::whoError(viewer.get_name(), ex.msg) << "\n"; } catch (...) { std::cerr << oat::whoError(viewer.get_name(), "Unknown exception.\n"); } // Exit failure return -1; }
void Light(float colorOut[4], const float colorIn[4], Vec3 pos, Vec3 normal, float dots[4]) { // could cache a lot of stuff, such as ambient, across vertices... bool doShadeMapping = (gstate.texmapmode & 0x3) == 2; if (!doShadeMapping && !(gstate.lightEnable[0]&1) && !(gstate.lightEnable[1]&1) && !(gstate.lightEnable[2]&1) && !(gstate.lightEnable[3]&1)) { memcpy(colorOut, colorIn, sizeof(float) * 4); return; } Color4 emissive; emissive.GetFromRGB(gstate.materialemissive); Color4 globalAmbient; globalAmbient.GetFromRGB(gstate.ambientcolor); globalAmbient.GetFromA(gstate.ambientalpha); Vec3 norm = normal.Normalized(); Color4 in(colorIn); Color4 ambient; if (gstate.materialupdate & 1) { ambient = in; } else { ambient.GetFromRGB(gstate.materialambient); ambient.a=1.0f; } Color4 diffuse; if (gstate.materialupdate & 2) { diffuse = in; } else { diffuse.GetFromRGB(gstate.materialdiffuse); diffuse.a=1.0f; } Color4 specular; if (gstate.materialupdate & 4) { specular = in; } else { specular.GetFromRGB(gstate.materialspecular); specular.a=1.0f; } float specCoef = getFloat24(gstate.materialspecularcoef); Vec3 viewer(-gstate.viewMatrix[9], -gstate.viewMatrix[10], -gstate.viewMatrix[11]); Color4 lightSum = globalAmbient * ambient + emissive; // Try lights.elf - there's something wrong with the lighting for (int l = 0; l < 4; l++) { // can we skip this light? //if ((gstate.lightEnable[l] & 1) == 0) // && !doShadeMapping) // continue; GELightComputation comp = (GELightComputation)(gstate.ltype[l]&3); GELightType type = (GELightType)((gstate.ltype[l]>>8)&3); Vec3 toLight; if (type == GE_LIGHTTYPE_DIRECTIONAL) toLight = Vec3(gstate.lightpos[l]); // lightdir is for spotlights else toLight = Vec3(gstate.lightpos[l]) - pos; bool doSpecular = (comp != GE_LIGHTCOMP_ONLYDIFFUSE); bool poweredDiffuse = comp == GE_LIGHTCOMP_BOTHWITHPOWDIFFUSE; float lightScale = 1.0f; if (type != GE_LIGHTTYPE_DIRECTIONAL) { float distance = toLight.Normalize(); lightScale = 1.0f / (gstate.lightatt[l][0] + gstate.lightatt[l][1]*distance + gstate.lightatt[l][2]*distance*distance); if (lightScale > 1.0f) lightScale = 1.0f; } float dot = toLight * norm; // Clamp dot to zero. if (dot < 0.0f) dot = 0.0f; if (poweredDiffuse) dot = powf(dot, specCoef); Color4 diff = (gstate.lightColor[1][l] * diffuse) * (dot * lightScale); Color4 spec(0,0,0,0); // Real PSP specular Vec3 toViewer(0,0,1); // Better specular //Vec3 toViewer = (viewer - pos).Normalized(); if (doSpecular) { Vec3 halfVec = toLight; halfVec += toViewer; halfVec.Normalize(); dot = halfVec * norm; if (dot >= 0) { spec += (gstate.lightColor[2][l] * specular * (powf(dot, specCoef)*lightScale)); } } dots[l] = dot; if (gstate.lightEnable[l] & 1) { lightSum += gstate.lightColor[0][l]*ambient + diff + spec; } } for (int i = 0; i < 3; i++) colorOut[i] = lightSum[i]; }
int main( int argc, char **argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName()); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ..."); arguments.getApplicationUsage()->addCommandLineOption("--image <filename>","Load an image and render it on a quad"); arguments.getApplicationUsage()->addCommandLineOption("--dem <filename>","Load an image/DEM and render it on a HeightField"); arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display command line parameters"); arguments.getApplicationUsage()->addCommandLineOption("--help-env","Display environmental variables available"); arguments.getApplicationUsage()->addCommandLineOption("--help-keys","Display keyboard & mouse bindings available"); arguments.getApplicationUsage()->addCommandLineOption("--help-all","Display all command line, env vars and keyboard & mouse bindings."); arguments.getApplicationUsage()->addCommandLineOption("--dragger <draggername>","Use the specified dragger for manipulation [TabPlaneDragger, TabPlaneTrackballDragger, TrackballDragger, Translate1DDragger, Translate2DDragger, TranslateAxisDragger, TabBoxDragger, TranslatePlaneDragger, Scale1DDragger, Scale2DDragger, RotateCylinderDragger, RotateSphereDragger]"); arguments.getApplicationUsage()->addCommandLineOption("--fixedDraggerSize","Fix the size of the dragger geometry in the screen space"); bool fixedSizeInScreen = false; while (arguments.read("--fixedDraggerSize")) { fixedSizeInScreen = true; } // construct the viewer. osgViewer::Viewer viewer(arguments); // add the window size toggle handler viewer.addEventHandler(new osgViewer::WindowSizeHandler); // get details on keyboard and mouse bindings used by the viewer. viewer.getUsage(*arguments.getApplicationUsage()); if (arguments.read("--test-NodeMask")) { const osg::ref_ptr<osg::Group> group = new osg::Group(); group->setNodeMask(0); const osg::ref_ptr<osgManipulator::AntiSquish> antiSquish = new osgManipulator::AntiSquish(); group->addChild(antiSquish.get()); const osg::ref_ptr<osg::Node> node = new osg::Node(); node->setInitialBound(osg::BoundingSphere(osg::Vec3(0.0, 0.0, 0.0), 1.0)); antiSquish->addChild(node.get()); group->getBound(); return 1; } // if user request help write it out to cout. bool helpAll = arguments.read("--help-all"); unsigned int helpType = ((helpAll || arguments.read("-h") || arguments.read("--help"))? osg::ApplicationUsage::COMMAND_LINE_OPTION : 0 ) | ((helpAll || arguments.read("--help-env"))? osg::ApplicationUsage::ENVIRONMENTAL_VARIABLE : 0 ) | ((helpAll || arguments.read("--help-keys"))? osg::ApplicationUsage::KEYBOARD_MOUSE_BINDING : 0 ); if (helpType) { arguments.getApplicationUsage()->write(std::cout, helpType); return 1; } // report any errors if they have occurred when parsing the program arguments. if (arguments.errors()) { arguments.writeErrorMessages(std::cout); return 1; } std::string dragger_name = "TabBoxDragger"; arguments.read("--dragger", dragger_name); osg::Timer_t start_tick = osg::Timer::instance()->tick(); // read the scene from the list of file specified command line args. osg::ref_ptr<osg::Node> loadedModel = osgDB::readRefNodeFiles(arguments); // if no model has been successfully loaded report failure. bool tragger2Scene(true); if (!loadedModel) { //std::cout << arguments.getApplicationName() <<": No data loaded" << std::endl; //return 1; loadedModel = createDemoScene(fixedSizeInScreen); tragger2Scene=false; } // any option left unread are converted into errors to write out later. arguments.reportRemainingOptionsAsUnrecognized(); // report any errors if they have occurred when parsing the program arguments. if (arguments.errors()) { arguments.writeErrorMessages(std::cout); } osg::Timer_t end_tick = osg::Timer::instance()->tick(); std::cout << "Time to load = "<<osg::Timer::instance()->delta_s(start_tick,end_tick)<<std::endl; // optimize the scene graph, remove redundant nodes and state etc. osgUtil::Optimizer optimizer; optimizer.optimize(loadedModel); // pass the loaded scene graph to the viewer. if ( tragger2Scene ) { viewer.setSceneData(addDraggerToScene(loadedModel.get(), dragger_name, fixedSizeInScreen)); } else { viewer.setSceneData(loadedModel); } return viewer.run(); }
int main(int argc, char **argv) { // Prepare the environment int sys_return_val = 0; std::string cmd = "l3dclient_mklocal_dir.sh"; sys_return_val = std::system(cmd.c_str()); std::cout<<std::endl<<"SYSTEM -> "<<sys_return_val<<std::endl; try { std::string name = "NONAME"; std::string ip = "localhost"; std::string port = "8080"; QApplication app(argc, argv); // SoQt::init ... only after QApplication... SoQt::init(argv[0]); // Log IN login_dialog ldia; int dialogCode = ldia.exec(); if( dialogCode == QDialog::Rejected ) { return 0; } else { name = ldia.lineEdit_name->text().toStdString(); ip = ldia.lineEdit_ip->text().toStdString(); port = ldia.lineEdit_port->text().toStdString(); } // END LOG IN boost::asio::io_service io_service; boost::asio::ip::tcp::resolver resolver(io_service); boost::asio::ip::tcp::resolver::query query(ip.c_str(), port.c_str()); boost::asio::ip::tcp::resolver::iterator iterator = resolver.resolve(query); client c(io_service, iterator); std::string executors_name = name.c_str(); command_executor client_exec(executors_name); c.add_observer(&client_exec); global_client = &c; qDebug()<<"==============================\n"; // DOWNLOAD WORLD RESOURCES c.send(std::string("@get_resources")); qDebug()<<"==============================\n"; int avatar_gender = ldia.selected_avatar; avatar my_avatar(executors_name, c, avatar_gender ); user_avatar = & my_avatar; //start BOOST_ASIO boost::thread asio_th(boost::bind(&boost::asio::io_service::run, &io_service)); if(asio_th.joinable()) { cout << "Detaching thread" << endl; asio_th.detach(); } // The scene SoSeparator *root = startUpScene( my_avatar.get3d_model() ); root->ref(); gui viewer(root, app, camera); client_exec.set_vrml_tree_node( root ); viewer.showMaximized(); return app.exec(); } catch(std::exception &e) { std::cout<<std::endl<<e.what(); exit(EXIT_FAILURE); } catch(std::string msg) { std::cout<<std::endl<<msg; exit(EXIT_FAILURE); } return EXIT_SUCCESS; }
int main(int argc, char** argv) { osg::ArgumentParser arguments(&argc, argv); osgViewer::Viewer viewer(arguments); #if 0 typedef std::list< osg::ref_ptr<osg::Script> > Scripts; Scripts scripts; osg::ref_ptr<osg::Group> model = new osg::Group; std::string filename; while(arguments.read("--script",filename)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(filename); if (script.valid()) scripts.push_back(script.get()); } // assgin script engine to scene graphs model->getOrCreateUserDataContainer()->addUserObject(osgDB::readFile<osg::ScriptEngine>("ScriptEngine.lua")); model->getOrCreateUserDataContainer()->addUserObject(osgDB::readFile<osg::ScriptEngine>("ScriptEngine.python")); model->getOrCreateUserDataContainer()->addUserObject(osgDB::readFile<osg::ScriptEngine>("ScriptEngine.js")); // assign scripts to scene graph for(Scripts::iterator itr = scripts.begin(); itr != scripts.end(); ++itr) { model->addUpdateCallback(new osg::ScriptNodeCallback(itr->get())); } std::string str; osg::ref_ptr<osg::ScriptEngine> luaScriptEngine = osgDB::readFile<osg::ScriptEngine>("ScriptEngine.lua"); if (luaScriptEngine.valid()) { while (arguments.read("--lua", str)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(str); if (script.valid()) { luaScriptEngine->run(script.get()); } } } osg::ref_ptr<osg::ScriptEngine> v8ScriptEngine = osgDB::readFile<osg::ScriptEngine>("ScriptEngine.V8"); if (v8ScriptEngine.valid()) { while (arguments.read("--js",str)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(str); if (script.valid()) { v8ScriptEngine->run(script.get()); } } } osg::ref_ptr<osg::ScriptEngine> pythonScriptEngine = osgDB::readFile<osg::ScriptEngine>("ScriptEngine.python"); if (pythonScriptEngine.valid()) { while (arguments.read("--python",str)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(str); if (script.valid()) { pythonScriptEngine->run(script.get()); } } } return 0; #endif #if 1 osg::ref_ptr<osgPresentation::Presentation> presentation = new osgPresentation::Presentation; osg::ref_ptr<osgPresentation::Slide> slide = new osgPresentation::Slide; osg::ref_ptr<osgPresentation::Layer> layer = new osgPresentation::Layer; osg::ref_ptr<osgPresentation::Group> group = new osgPresentation::Group; osg::ref_ptr<osgPresentation::Element> element = new osgPresentation::Element; osg::ref_ptr<osgPresentation::Text> text = new osgPresentation::Text; osg::ref_ptr<osgPresentation::Model> model = new osgPresentation::Model; osg::ref_ptr<osgPresentation::Audio> audio = new osgPresentation::Audio; osg::ref_ptr<osgPresentation::Image> image = new osgPresentation::Image; osg::ref_ptr<osgPresentation::Movie> movie = new osgPresentation::Movie; osg::ref_ptr<osgPresentation::Volume> volume = new osgPresentation::Volume; presentation->addChild(slide.get()); slide->addChild(layer.get()); //layer->addChild(element.get()); //layer->addChild(group.get()); layer->addChild(element.get()); // layer->addChild(model.get()); layer->addChild(text.get()); layer->addChild(audio.get()); layer->addChild(image.get()); layer->addChild(movie.get()); layer->addChild(volume.get()); text->setProperty("string",std::string("This is a first test")); text->setProperty("font",std::string("times.ttf")); text->setProperty("character_size",2.2); text->setProperty("width",std::string("103.2")); model->setProperty("filename", std::string("dumptruck.osgt")); image->setProperty("filename", std::string("Images/lz.rgb")); image->setProperty("scale",0.75); movie->setProperty("filename", std::string("/home/robert/Data/Movie/big_buck_bunny_1080p_stereo.ogg")); movie->setProperty("scale",0.75); volume->setProperty("filename", std::string("/home/robert/Data/MaleVisibleHumanHead")); volume->setProperty("scale",0.75); volume->setProperty("technique",std::string("iso-surface")); presentation->setProperty("scale",1.0); #if 0 osgPresentation::PrintSupportedProperties psp(std::cout); presentation->accept(psp); osgPresentation::PrintProperties pp(std::cout); presentation->accept(pp); #endif osgPresentation::LoadAction load; presentation->accept( load ); viewer.setSceneData( presentation.get() ); osgDB::writeNodeFile(*presentation, "pres.osgt"); osgDB::ClassInterface pi; pi.getWhiteList()["osgPresentation::Presentation"]["filename"]=osgDB::BaseSerializer::RW_STRING; pi.getBlackList()["osgPresentation::Presentation"]["Children"]; pi.getBlackList()["osgPresentation::Presentation"]["UserDataContainer"]; pi.getBlackList()["osgPresentation::Presentation"]["UserData"]; pi.getBlackList()["osgPresentation::Presentation"]["CullCallback"]; pi.getBlackList()["osgPresentation::Presentation"]["ComputeBoundingSphereCallback"]; #if 0 osgDB::ObjectWrapperManager* owm = osgDB::Registry::instance()->getObjectWrapperManager(); if (owm) { const osgDB::ObjectWrapperManager::WrapperMap& wrapperMap = owm->getWrapperMap(); for(osgDB::ObjectWrapperManager::WrapperMap::const_iterator itr = wrapperMap.begin(); itr != wrapperMap.end(); ++itr) { osgDB::ObjectWrapper* ow = itr->second.get(); OSG_NOTICE<<std::endl<<"Wrapper : "<<itr->first<<", Domain="<<ow->getDomain()<<", Name="<<ow->getName()<<std::endl; const osgDB::StringList& associates = ow->getAssociates(); for(osgDB::StringList::const_iterator aitr = associates.begin(); aitr != associates.end(); ++aitr) { OSG_NOTICE<<" associate = "<<*aitr<<std::endl; } osgDB::StringList properties; osgDB::ObjectWrapper::TypeList types; ow->writeSchema(properties, types); OSG_NOTICE<<" properties.size() = "<<properties.size()<<", types.size() = "<<types.size()<<std::endl; unsigned int numProperties = std::min(properties.size(), types.size()); for(unsigned int i=0; i<numProperties; ++i) { OSG_NOTICE<<" property = "<<properties[i]<<", type = "<<types[i]<<", typeName = "<<pi.getTypeName(types[i])<<std::endl; } } #if 1 osgDB::ObjectWrapperManager::IntLookupMap& intLookupMap = owm->getLookupMap(); for(osgDB::ObjectWrapperManager::IntLookupMap::iterator itr = intLookupMap.begin(); itr != intLookupMap.end(); ++itr) { OSG_NOTICE<<std::endl<<"IntLookMap["<<itr->first<<"]"<<std::endl; osgDB::IntLookup::StringToValue& stv = itr->second.getStringToValue(); for(osgDB::IntLookup::StringToValue::iterator sitr = stv.begin(); sitr != stv.end(); ++sitr) { OSG_NOTICE<<" "<<sitr->first<<", "<<sitr->second<<std::endl; } } #endif } #endif presentation->setName("[this is a test]"); #if 0 if (pi.setProperty(presentation.get(), "Name", std::string("[this is new improved test]"))) { OSG_NOTICE<<"setProperty(presentation.get(), Name) succeeded."<<std::endl; } else { OSG_NOTICE<<"setProperty(presentation.get(), Name) failed."<<std::endl; } std::string name; if (pi.getProperty(presentation.get(), "Name", name)) { OSG_NOTICE<<"getProperty(presentation.get(), Name) succeeded, Name = "<<name<<std::endl; } else { OSG_NOTICE<<"getProperty(presentation.get(), Name) failed."<<std::endl; } OSG_NOTICE<<std::endl; // presentation->setDataVariance(osg::Object::DYNAMIC); int variance = 1234; if (pi.getProperty(presentation.get(), "DataVariance", variance)) { OSG_NOTICE<<"getProperty(presentation.get(), DataVariance) succeeded, variance = "<<variance<<std::endl; } else { OSG_NOTICE<<"getProperty(presentation.get(), DataVariance) failed."<<std::endl; } OSG_NOTICE<<std::endl; if (pi.setProperty(presentation.get(), "DataVariance", 1)) { OSG_NOTICE<<"setProperty(presentation.get(), DataVariance) succeeded."<<std::endl; } else { OSG_NOTICE<<"setProperty(presentation.get(), DataVariance) failed."<<std::endl; } OSG_NOTICE<<std::endl; if (pi.getProperty(presentation.get(), "DataVariance", variance)) { OSG_NOTICE<<"2nd getProperty(presentation.get(), DataVariance) succeeded, variance = "<<variance<<std::endl; } else { OSG_NOTICE<<"2nd getProperty(presentation.get(), DataVariance) failed."<<std::endl; } OSG_NOTICE<<std::endl; presentation->setMatrix(osg::Matrixd::translate(osg::Vec3d(1.0,2.0,3.0))); // if (pi.setProperty(presentation.get(), "Matrix", osg::Matrixd::scale(1.0,2.0,2.0))) if (pi.setProperty(presentation.get(), "Matrix", osg::Matrixd::scale(2.0,2.0,2.0))) { OSG_NOTICE<<"setProperty(..,Matrix) succeeded."<<std::endl; } else { OSG_NOTICE<<"setProperty(..,Matrix) failed."<<std::endl; } osg::Matrixd matrix; if (pi.getProperty(presentation.get(), "Matrix", matrix)) { OSG_NOTICE<<"getProperty(presentation.get(), ...) succeeded, Matrix = "<<matrix<<std::endl; } else { OSG_NOTICE<<"getProperty(presentation.get(), ...) failed."<<std::endl; } #if 1 osg::ref_ptr<osg::Geometry> geometry = new osg::Geometry; osg::ref_ptr<osg::Node> node = new osg::Node; osgDB::ClassInterface::PropertyMap properties; if (pi.getSupportedProperties(presentation.get(), properties, true)) { OSG_NOTICE<<"Have supported properites found."<<std::endl; for(osgDB::ClassInterface::PropertyMap::iterator itr = properties.begin(); itr != properties.end(); ++itr) { OSG_NOTICE<<" Property "<<itr->first<<", "<<pi.getTypeName(itr->second)<<std::endl; } } else { OSG_NOTICE<<"No supported properites found."<<std::endl; } OSG_NOTICE<<"Type(float) = "<<osgDB::getTypeEnum<float>()<<", "<<osgDB::getTypeString<float>()<<std::endl; OSG_NOTICE<<"Type(bool) = "<<osgDB::getTypeEnum<bool>()<<", "<<osgDB::getTypeString<bool>()<<std::endl; OSG_NOTICE<<"Type(osg::Vec3) = "<<osgDB::getTypeEnum<osg::Vec3>()<<", "<<osgDB::getTypeString<osg::Vec3>()<<std::endl; OSG_NOTICE<<"Type(osg::Matrixd) = "<<osgDB::getTypeEnum<osg::Matrixd>()<<", "<<osgDB::getTypeString<osg::Matrixd>()<<std::endl; OSG_NOTICE<<"Type(osg::Vec2ui) = "<<osgDB::getTypeEnum<osg::Vec2ui>()<<", "<<osgDB::getTypeString<osg::Vec2ui>()<<std::endl; OSG_NOTICE<<"Type(GLenum) = "<<osgDB::getTypeEnum<GLenum>()<<", "<<osgDB::getTypeString<GLenum>()<<std::endl; OSG_NOTICE<<"Type(int) = "<<osgDB::getTypeEnum<int>()<<", "<<osgDB::getTypeString<int>()<<std::endl; OSG_NOTICE<<"Type(osg::Image*) = "<<osgDB::getTypeEnum<osg::Image*>()<<", "<<osgDB::getTypeString<osg::Image*>()<<std::endl; OSG_NOTICE<<"Type(osg::Object*) = "<<osgDB::getTypeEnum<osg::Object*>()<<", "<<osgDB::getTypeString<osg::Object*>()<<std::endl; OSG_NOTICE<<"Type(osg::Referenced*) = "<<osgDB::getTypeEnum<osg::Referenced*>()<<", "<<osgDB::getTypeString<osg::Referenced*>()<<std::endl; osg::Object* ptr = presentation.get(); OSG_NOTICE<<"Type(ptr) = "<<osgDB::getTypeEnumFromPtr(ptr)<<", "<<osgDB::getTypeStringFromPtr(ptr)<<std::endl; OSG_NOTICE<<"Type(presentation) = "<<osgDB::getTypeEnumFromPtr(presentation.get())<<", "<<osgDB::getTypeStringFromPtr(presentation.get())<<std::endl; osg::Image* image2 = 0; OSG_NOTICE<<"Type(image) = "<<osgDB::getTypeEnumFromPtr(image2)<<", "<<osgDB::getTypeStringFromPtr(image2)<<std::endl; osg::Vec3 pos; OSG_NOTICE<<"Type(pos) = "<<osgDB::getTypeEnumFrom(pos)<<", "<<osgDB::getTypeStringFrom(pos)<<std::endl; OSG_NOTICE<<"Type(std::string) = "<<osgDB::getTypeEnum<std::string>()<<", "<<osgDB::getTypeString<std::string>()<<std::endl; osgDB::BaseSerializer::Type type; if (pi.getPropertyType(presentation.get(), "Name", type)) { OSG_NOTICE<<"Property Type, Name = "<< type<<std::endl; } #endif osg::Matrixd mymatrix = osg::Matrix::translate(-1,2,3); pi.setProperty(presentation.get(), "mymatrix", mymatrix); osg::Matrixd copyofmatrix; if (pi.getProperty(presentation.get(), "mymatrix", copyofmatrix)) { OSG_NOTICE<<"mymatrix = "<<copyofmatrix<<std::endl; } if (pi.getProperty(presentation.get(), "Matrix", copyofmatrix)) { OSG_NOTICE<<"Matrix = "<<copyofmatrix<<std::endl; } std::string teststring="Another test"; pi.setProperty(presentation.get(),"mystring",teststring); std::string astring; if (pi.getProperty(presentation.get(),"mystring",astring)) { OSG_NOTICE<<"mystring = "<<astring<<std::endl; } else { OSG_NOTICE<<"failed to get mystring"<<std::endl; } #define PRINT_TYPE(O,PN) \ { \ osgDB::BaseSerializer::Type type; \ if (pi.getPropertyType(O, #PN, type)) \ { \ OSG_NOTICE<<#PN<<" : type "<<type<<", "<<pi.getTypeName(type)<<std::endl; \ } \ else \ { \ OSG_NOTICE<<#PN<<" : failed to get type"<<std::endl; \ } \ } PRINT_TYPE(presentation.get(), Name) PRINT_TYPE(presentation.get(), Matrix) PRINT_TYPE(presentation.get(), DataVariance) PRINT_TYPE(presentation.get(), mystring) PRINT_TYPE(presentation.get(), mymatrix) osg::ref_ptr<osgGA::GUIEventAdapter> event = new osgGA::GUIEventAdapter; if (pi.getSupportedProperties(event.get(), properties, true)) { OSG_NOTICE<<"Have supported properites found."<<std::endl; for(osgDB::ClassInterface::PropertyMap::iterator itr = properties.begin(); itr != properties.end(); ++itr) { OSG_NOTICE<<" Property "<<itr->first<<", "<<pi.getTypeName(itr->second)<<std::endl; } } else { OSG_NOTICE<<"No supported properites found."<<std::endl; } #endif osg::Vec3f pos(1.5,3.0,4.5); presentation->setProperty("position",pos); osg::Vec2f texcoord(0.5f,0.20f); presentation->setProperty("texcoord",texcoord); osg::ref_ptr<osg::ScriptEngine> luaScriptEngine = osgDB::readFile<osg::ScriptEngine>("ScriptEngine.lua"); if (luaScriptEngine.valid()) { presentation->getOrCreateUserDataContainer()->addUserObject(luaScriptEngine.get()); std::string str; while (arguments.read("--lua", str)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(str); if (script.valid()) { presentation->addUpdateCallback(new osg::ScriptNodeCallback(script.get(),"update")); } } if (arguments.read("--test", str)) { osg::ref_ptr<osg::Script> script = osgDB::readScriptFile(str); if (script.valid()) { osg::Parameters inputParameters; osg::Parameters outputParameters; inputParameters.push_back(new osg::StringValueObject("string","my very first string input")); inputParameters.push_back(new osg::DoubleValueObject("double",1.234)); inputParameters.push_back(new osg::MatrixfValueObject("matrix",osg::Matrixf())); osg::ref_ptr<osg::MatrixdValueObject> svo = new osg::MatrixdValueObject("return", osg::Matrixd()); outputParameters.push_back(svo.get()); if (luaScriptEngine->run(script.get(), "test", inputParameters, outputParameters)) { OSG_NOTICE<<"Successfully ran script : return value = "<<svo->getValue()<<std::endl; } else { OSG_NOTICE<<"script run failed"<<std::endl; } } } } osg::ref_ptr<osg::Object> obj = pi.createObject("osgVolume::VolumeTile"); if (obj.valid()) { OSG_NOTICE<<"obj created "<<obj->getCompoundClassName()<<std::endl; } else { OSG_NOTICE<<"obj creation failed "<<std::endl; } osgDB::ClassInterface::PropertyMap properties; if (pi.getSupportedProperties(obj.get(), properties, true)) { OSG_NOTICE<<"Have supported properites found."<<std::endl; for(osgDB::ClassInterface::PropertyMap::iterator itr = properties.begin(); itr != properties.end(); ++itr) { OSG_NOTICE<<" Property "<<itr->first<<", "<<pi.getTypeName(itr->second)<<std::endl; } } else { OSG_NOTICE<<"No supported properites found."<<std::endl; } //return 0; return viewer.run(); #endif }
// -------------- // -----Main----- // -------------- int main (int argc, char** argv) { // -------------------------------------- // -----Parse Command Line Arguments----- // -------------------------------------- if (pcl::console::find_argument (argc, argv, "-h") >= 0) { printUsage (argv[0]); return 0; } if (pcl::console::find_argument (argc, argv, "-m") >= 0) { setUnseenToMaxRange = true; cout << "Setting unseen values in range image to maximum range readings.\n"; } int tmp_coordinate_frame; if (pcl::console::parse (argc, argv, "-c", tmp_coordinate_frame) >= 0) { coordinate_frame = pcl::RangeImage::CoordinateFrame (tmp_coordinate_frame); cout << "Using coordinate frame "<< (int)coordinate_frame<<".\n"; } if (pcl::console::parse (argc, argv, "-r", angular_resolution) >= 0) cout << "Setting angular resolution to "<<angular_resolution<<"deg.\n"; angular_resolution = pcl::deg2rad (angular_resolution); // ------------------------------------------------------------------ // -----Read pcd file or create example point cloud if not given----- // ------------------------------------------------------------------ pcl::PointCloud<PointType>::Ptr point_cloud_ptr (new pcl::PointCloud<PointType>); pcl::PointCloud<PointType>& point_cloud = *point_cloud_ptr; pcl::PointCloud<pcl::PointWithViewpoint> far_ranges; Eigen::Affine3f scene_sensor_pose (Eigen::Affine3f::Identity ()); std::vector<int> pcd_filename_indices = pcl::console::parse_file_extension_argument (argc, argv, "pcd"); if (!pcd_filename_indices.empty ()) { std::string filename = argv[pcd_filename_indices[0]]; if (pcl::io::loadPCDFile (filename, point_cloud) == -1) { cout << "Was not able to open file \""<<filename<<"\".\n"; printUsage (argv[0]); return 0; } scene_sensor_pose = Eigen::Affine3f (Eigen::Translation3f (point_cloud.sensor_origin_[0], point_cloud.sensor_origin_[1], point_cloud.sensor_origin_[2])) * Eigen::Affine3f (point_cloud.sensor_orientation_); std::string far_ranges_filename = pcl::getFilenameWithoutExtension (filename)+"_far_ranges.pcd"; if (pcl::io::loadPCDFile(far_ranges_filename.c_str(), far_ranges) == -1) std::cout << "Far ranges file \""<<far_ranges_filename<<"\" does not exists.\n"; } else { cout << "\nNo *.pcd file given => Generating example point cloud.\n\n"; for (float x=-0.5f; x<=0.5f; x+=0.01f) { for (float y=-0.5f; y<=0.5f; y+=0.01f) { PointType point; point.x = x; point.y = y; point.z = 2.0f - y; point_cloud.points.push_back (point); } } point_cloud.width = (int) point_cloud.points.size (); point_cloud.height = 1; } // ----------------------------------------------- // -----Create RangeImage from the PointCloud----- // ----------------------------------------------- float noise_level = 0.0; float min_range = 0.0f; int border_size = 1; boost::shared_ptr<pcl::RangeImage> range_image_ptr (new pcl::RangeImage); pcl::RangeImage& range_image = *range_image_ptr; range_image.createFromPointCloud (point_cloud, angular_resolution, pcl::deg2rad (360.0f), pcl::deg2rad (180.0f), scene_sensor_pose, coordinate_frame, noise_level, min_range, border_size); range_image.integrateFarRanges (far_ranges); if (setUnseenToMaxRange) range_image.setUnseenToMaxRange (); // -------------------------------------------- // -----Open 3D viewer and add point cloud----- // -------------------------------------------- pcl::visualization::PCLVisualizer viewer ("3D Viewer"); viewer.setBackgroundColor (1, 1, 1); viewer.addCoordinateSystem (1.0f, "global"); pcl::visualization::PointCloudColorHandlerCustom<PointType> point_cloud_color_handler (point_cloud_ptr, 0, 0, 0); viewer.addPointCloud (point_cloud_ptr, point_cloud_color_handler, "original point cloud"); //PointCloudColorHandlerCustom<pcl::PointWithRange> range_image_color_handler (range_image_ptr, 150, 150, 150); //viewer.addPointCloud (range_image_ptr, range_image_color_handler, "range image"); //viewer.setPointCloudRenderingProperties (PCL_VISUALIZER_POINT_SIZE, 2, "range image"); // ------------------------- // -----Extract borders----- // ------------------------- pcl::RangeImageBorderExtractor border_extractor (&range_image); pcl::PointCloud<pcl::BorderDescription> border_descriptions; border_extractor.compute (border_descriptions); // ---------------------------------- // -----Show points in 3D viewer----- // ---------------------------------- pcl::PointCloud<pcl::PointWithRange>::Ptr border_points_ptr(new pcl::PointCloud<pcl::PointWithRange>), veil_points_ptr(new pcl::PointCloud<pcl::PointWithRange>), shadow_points_ptr(new pcl::PointCloud<pcl::PointWithRange>); pcl::PointCloud<pcl::PointWithRange>& border_points = *border_points_ptr, & veil_points = * veil_points_ptr, & shadow_points = *shadow_points_ptr; for (int y=0; y< (int)range_image.height; ++y) { for (int x=0; x< (int)range_image.width; ++x) { if (border_descriptions.points[y*range_image.width + x].traits[pcl::BORDER_TRAIT__OBSTACLE_BORDER]) border_points.points.push_back (range_image.points[y*range_image.width + x]); if (border_descriptions.points[y*range_image.width + x].traits[pcl::BORDER_TRAIT__VEIL_POINT]) veil_points.points.push_back (range_image.points[y*range_image.width + x]); if (border_descriptions.points[y*range_image.width + x].traits[pcl::BORDER_TRAIT__SHADOW_BORDER]) shadow_points.points.push_back (range_image.points[y*range_image.width + x]); } } pcl::visualization::PointCloudColorHandlerCustom<pcl::PointWithRange> border_points_color_handler (border_points_ptr, 0, 255, 0); viewer.addPointCloud<pcl::PointWithRange> (border_points_ptr, border_points_color_handler, "border points"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 7, "border points"); pcl::visualization::PointCloudColorHandlerCustom<pcl::PointWithRange> veil_points_color_handler (veil_points_ptr, 255, 0, 0); viewer.addPointCloud<pcl::PointWithRange> (veil_points_ptr, veil_points_color_handler, "veil points"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 7, "veil points"); pcl::visualization::PointCloudColorHandlerCustom<pcl::PointWithRange> shadow_points_color_handler (shadow_points_ptr, 0, 255, 255); viewer.addPointCloud<pcl::PointWithRange> (shadow_points_ptr, shadow_points_color_handler, "shadow points"); viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 7, "shadow points"); //------------------------------------- // -----Show points on range image----- // ------------------------------------ pcl::visualization::RangeImageVisualizer* range_image_borders_widget = NULL; range_image_borders_widget = pcl::visualization::RangeImageVisualizer::getRangeImageBordersWidget (range_image, -std::numeric_limits<float>::infinity (), std::numeric_limits<float>::infinity (), false, border_descriptions, "Range image with borders"); // ------------------------------------- //-------------------- // -----Main loop----- //-------------------- while (!viewer.wasStopped ()) { range_image_borders_widget->spinOnce (); viewer.spinOnce (); pcl_sleep(0.01); } }
int main(int argc, char* argv[]) { double discountFactor; unsigned int maxNbEvaluations; char isTerminal = 0; char keepingTree = 0; int nbTimestep = -1; unsigned int branchingFactor = 0; #ifdef USE_SDL char isDisplayed = 1; char isFullscreen = 1; char verbose = 0; char resolution[255] = "640x480"; #else char verbose = 1; #endif uniform_instance* instance = NULL; state* crtState = NULL; state* nextState = NULL; double reward = 0.0; action* optimalAction = NULL; struct arg_dbl* g = arg_dbl1("g", "discountFactor", "<d>", "The discount factor for the problem"); struct arg_int* n = arg_int1("n", "nbEvaluations", "<n>", "The number of evaluations"); struct arg_int* s = arg_int0("s", "nbtimestep", "<n>", "The number of timestep"); struct arg_int* b = arg_int0("b", "branchingFactor", "<n>", "The branching factor of the problem"); struct arg_lit* k = arg_lit0("k", NULL, "Keep the subtree"); struct arg_str* i = arg_str0(NULL, "state", "<s>", "The initial state to use"); #ifdef USE_SDL struct arg_lit* d = arg_lit0("d", NULL, "Display the viewer"); struct arg_lit* f = arg_lit0("f", NULL, "Fullscreen"); struct arg_lit* v = arg_lit0("v", NULL, "Verbose"); struct arg_str* r = arg_str0(NULL, "resolution", "<s>", "The resolution of the display window"); void* argtable[11]; int nbArgs = 10; #else void* argtable[7]; int nbArgs = 6; #endif struct arg_end* end = arg_end(nbArgs+1); int nerrors = 0; s->ival[0] = -1; b->ival[0] = 0; argtable[0] = g; argtable[1] = n; argtable[2] = s; argtable[3] = k; argtable[4] = b; argtable[5] = i; #ifdef USE_SDL argtable[6] = d; argtable[7] = f; argtable[8] = v; argtable[9] = r; #endif argtable[nbArgs] = end; if(arg_nullcheck(argtable) != 0) { printf("error: insufficient memory\n"); arg_freetable(argtable, nbArgs+1); return EXIT_FAILURE; } nerrors = arg_parse(argc, argv, argtable); if(nerrors > 0) { printf("%s:", argv[0]); arg_print_syntax(stdout, argtable, "\n"); arg_print_errors(stdout, end, argv[0]); arg_freetable(argtable, nbArgs+1); return EXIT_FAILURE; } discountFactor = g->dval[0]; maxNbEvaluations = n->ival[0]; branchingFactor = b->ival[0]; initGenerativeModelParameters(); if(branchingFactor) K = branchingFactor; initGenerativeModel(); if(i->count) crtState = makeState(i->sval[0]); else crtState = initState(); #if USE_SDL isDisplayed = d->count; isFullscreen = f->count; verbose = v->count; if(r->count) strcpy(resolution, r->sval[0]); #endif nbTimestep = s->ival[0]; keepingTree = k->count; arg_freetable(argtable, nbArgs+1); instance = uniform_initInstance(crtState, discountFactor); #ifdef USE_SDL if(isDisplayed) { if(initViewer(resolution, uniform_drawingProcedure, isFullscreen) == -1) return EXIT_FAILURE; viewer(crtState, NULL, 0.0, instance); } #endif do { if(keepingTree) uniform_keepSubtree(instance); else uniform_resetInstance(instance, crtState); optimalAction = uniform_planning(instance, maxNbEvaluations); isTerminal = nextStateReward(crtState, optimalAction, &nextState, &reward); freeState(crtState); crtState = nextState; if(verbose) { printState(crtState); printAction(optimalAction); printf("reward: %f depth: %u\n", reward, uniform_getMaxDepth(instance)); } #ifdef USE_SDL } while(!isTerminal && (nbTimestep < 0 || --nbTimestep) && !viewer(crtState, optimalAction, reward, instance)); #else } while(!isTerminal && (nbTimestep < 0 || --nbTimestep));
//---------------------------------------------------------------------------- int main( void ) { // uncomment to log dynamics Moby::Log<Moby::OutputToFile>::reporting_level = 7; boost::shared_ptr<Moby::TimeSteppingSimulator> sim( new Moby::TimeSteppingSimulator() ); boost::shared_ptr<Moby::GravityForce> g( new Moby::GravityForce() ); g->gravity = Ravelin::Vector3d( 0, 0, -9.8 ); Moby::RCArticulatedBodyPtr ab( new Moby::RCArticulatedBody() ); ab->id = "gripper"; ab->algorithm_type = Moby::RCArticulatedBody::eCRB; std::vector< Moby::RigidBodyPtr > links; Moby::RigidBodyPtr base( new Moby::RigidBody() ); { Moby::PrimitivePtr box( new Moby::BoxPrimitive(0.05,1.0,0.05) ); box->set_mass( 1 ); // static base->id = "base"; base->set_visualization_data( box->create_visualization() ); base->set_inertia( box->get_inertia() ); base->set_enabled( false ); base->set_pose( Ravelin::Pose3d( Ravelin::Quatd::normalize(Ravelin::Quatd(0,0,0,1)), Ravelin::Origin3d(0,0,0) ) ); links.push_back( base ); } Moby::RigidBodyPtr link( new Moby::RigidBody() ); { Moby::PrimitivePtr box( new Moby::BoxPrimitive(0.05,0.05,0.5) ); box->set_mass( 1 ); link->id = "link"; link->set_visualization_data( box->create_visualization() ); link->set_inertia( box->get_inertia() ); link->set_enabled( true ); link->get_recurrent_forces().push_back( g ); link->set_pose( Ravelin::Pose3d( Ravelin::Quatd::normalize(Ravelin::Quatd(0,0,0,1)), Ravelin::Origin3d(0,0,-0.275) ) ); links.push_back( link ); } std::vector< Moby::JointPtr > joints; boost::shared_ptr<Moby::PrismaticJoint> joint( new Moby::PrismaticJoint() ); { joint->id = "joint"; joint->set_location( Ravelin::Vector3d(0,0,0,base->get_pose()), base, link ); joint->set_axis( Ravelin::Vector3d(0,1,0,Moby::GLOBAL) ); joint->lolimit = -0.5; joint->hilimit = 0.5; joints.push_back( joint ); } ab->set_links_and_joints( links, joints ); ab->get_recurrent_forces().push_back( g ); ab->set_floating_base(false); ab->controller = &push_controller; sim->add_dynamic_body( ab ); Viewer viewer( sim, Ravelin::Origin3d(-5,0,-1), Ravelin::Origin3d(0,0,-1), Ravelin::Origin3d(0,0,1) ); while(true) { if( !viewer.update() ) break; sim->step( 0.001 ); Ravelin::Pose3d pose = *link->get_pose(); pose.update_relative_pose(Moby::GLOBAL); std::cout << "t: " << sim->current_time << " x: " << pose.x << std::endl; } return 0; }
template <typename PointT> Eigen::VectorXf open_ptrack::detection::GroundplaneEstimation<PointT>::compute () { Eigen::VectorXf ground_coeffs; ground_coeffs.resize(4); // Manual mode: if (ground_estimation_mode_ == 0) { std::cout << "Manual mode for ground plane estimation." << std::endl; // Initialize viewer: pcl::visualization::PCLVisualizer viewer("Pick 3 points"); // Create XYZ cloud: pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_xyzrgb(new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointXYZRGB xyzrgb_point; cloud_xyzrgb->points.resize(cloud_->width * cloud_->height, xyzrgb_point); cloud_xyzrgb->width = cloud_->width; cloud_xyzrgb->height = cloud_->height; cloud_xyzrgb->is_dense = false; for (int i=0; i<cloud_->height; i++) { for (int j=0; j<cloud_->width; j++) { cloud_xyzrgb->at(j,i).x = cloud_->at(j,i).x; cloud_xyzrgb->at(j,i).y = cloud_->at(j,i).y; cloud_xyzrgb->at(j,i).z = cloud_->at(j,i).z; } } //#if (XYZRGB_CLOUDS) // pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud_); // viewer.addPointCloud<pcl::PointXYZRGB> (cloud_, rgb, "input_cloud"); //#else // viewer.addPointCloud<pcl::PointXYZ> (cloud_, "input_cloud"); //#endif pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> rgb(cloud_xyzrgb, 255, 255, 255); viewer.addPointCloud<pcl::PointXYZRGB> (cloud_xyzrgb, rgb, "input_cloud"); viewer.setCameraPosition(0,0,-2,0,-1,0,0); // Add point picking callback to viewer: struct callback_args_color cb_args; //#if (XYZRGB_CLOUDS) // PointCloudPtr clicked_points_3d (new PointCloud); //#else // pcl::PointCloud<pcl::PointXYZ>::Ptr clicked_points_3d (new pcl::PointCloud<pcl::PointXYZ>); //#endif pcl::PointCloud<pcl::PointXYZRGB>::Ptr clicked_points_3d (new pcl::PointCloud<pcl::PointXYZRGB>); cb_args.clicked_points_3d = clicked_points_3d; cb_args.viewerPtr = &viewer; viewer.registerPointPickingCallback (GroundplaneEstimation::pp_callback, (void*)&cb_args); // Spin until 'Q' is pressed: viewer.spin(); viewer.setSize(1,1); // resize viewer in order to make it disappear viewer.spinOnce(); viewer.close(); // close method does not work std::cout << "done." << std::endl; // Keep only the last three clicked points: while(clicked_points_3d->points.size()>3) { clicked_points_3d->points.erase(clicked_points_3d->points.begin()); } // Ground plane estimation: std::vector<int> clicked_points_indices; for (unsigned int i = 0; i < clicked_points_3d->points.size(); i++) clicked_points_indices.push_back(i); // pcl::SampleConsensusModelPlane<PointT> model_plane(clicked_points_3d); pcl::SampleConsensusModelPlane<pcl::PointXYZRGB> model_plane(clicked_points_3d); model_plane.computeModelCoefficients(clicked_points_indices,ground_coeffs); std::cout << "Ground plane coefficients: " << ground_coeffs(0) << ", " << ground_coeffs(1) << ", " << ground_coeffs(2) << ", " << ground_coeffs(3) << "." << std::endl; } // Semi-automatic mode: if (ground_estimation_mode_ == 1) { std::cout << "Semi-automatic mode for ground plane estimation." << std::endl; // Normals computation: pcl::IntegralImageNormalEstimation<PointT, pcl::Normal> ne; ne.setNormalEstimationMethod (ne.COVARIANCE_MATRIX); ne.setMaxDepthChangeFactor (0.03f); ne.setNormalSmoothingSize (20.0f); pcl::PointCloud<pcl::Normal>::Ptr normal_cloud (new pcl::PointCloud<pcl::Normal>); ne.setInputCloud (cloud_); ne.compute (*normal_cloud); // Multi plane segmentation initialization: std::vector<pcl::PlanarRegion<PointT>, Eigen::aligned_allocator<pcl::PlanarRegion<PointT> > > regions; pcl::OrganizedMultiPlaneSegmentation<PointT, pcl::Normal, pcl::Label> mps; mps.setMinInliers (500); mps.setAngularThreshold (2.0 * M_PI / 180); mps.setDistanceThreshold (0.2); mps.setInputNormals (normal_cloud); mps.setInputCloud (cloud_); mps.segmentAndRefine (regions); std::cout << "Found " << regions.size() << " planar regions." << std::endl; // Color planar regions with different colors: pcl::PointCloud<pcl::PointXYZRGB>::Ptr colored_cloud (new pcl::PointCloud<pcl::PointXYZRGB>); colored_cloud = colorRegions(regions); if (regions.size()>0) { // Viewer initialization: pcl::visualization::PCLVisualizer viewer("PCL Viewer"); pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(colored_cloud); viewer.addPointCloud<pcl::PointXYZRGB> (colored_cloud, rgb, "input_cloud"); viewer.setCameraPosition(0,0,-2,0,-1,0,0); // Add point picking callback to viewer: struct callback_args_color cb_args; typename pcl::PointCloud<pcl::PointXYZRGB>::Ptr clicked_points_3d (new pcl::PointCloud<pcl::PointXYZRGB>); cb_args.clicked_points_3d = clicked_points_3d; cb_args.viewerPtr = &viewer; viewer.registerPointPickingCallback (GroundplaneEstimation::pp_callback, (void*)&cb_args); std::cout << "Shift+click on a floor point, then press 'Q'..." << std::endl; // Spin until 'Q' is pressed: viewer.spin(); viewer.setSize(1,1); // resize viewer in order to make it disappear viewer.spinOnce(); viewer.close(); // close method does not work std::cout << "done." << std::endl; // Find plane closest to clicked point: unsigned int index = 0; float min_distance = FLT_MAX; float distance; float X = cb_args.clicked_points_3d->points[clicked_points_3d->points.size() - 1].x; float Y = cb_args.clicked_points_3d->points[clicked_points_3d->points.size() - 1].y; float Z = cb_args.clicked_points_3d->points[clicked_points_3d->points.size() - 1].z; for(unsigned int i = 0; i < regions.size(); i++) { float a = regions[i].getCoefficients()[0]; float b = regions[i].getCoefficients()[1]; float c = regions[i].getCoefficients()[2]; float d = regions[i].getCoefficients()[3]; distance = (float) (fabs((a*X + b*Y + c*Z + d)))/(sqrtf(a*a+b*b+c*c)); if(distance < min_distance) { min_distance = distance; index = i; } } ground_coeffs[0] = regions[index].getCoefficients()[0]; ground_coeffs[1] = regions[index].getCoefficients()[1]; ground_coeffs[2] = regions[index].getCoefficients()[2]; ground_coeffs[3] = regions[index].getCoefficients()[3]; std::cout << "Ground plane coefficients: " << regions[index].getCoefficients()[0] << ", " << regions[index].getCoefficients()[1] << ", " << regions[index].getCoefficients()[2] << ", " << regions[index].getCoefficients()[3] << "." << std::endl; } } // Automatic mode: if ((ground_estimation_mode_ == 2) || (ground_estimation_mode_ == 3)) { std::cout << "Automatic mode for ground plane estimation." << std::endl; // Normals computation: // pcl::NormalEstimation<PointT, pcl::Normal> ne; //// ne.setNormalEstimationMethod (ne.COVARIANCE_MATRIX); //// ne.setMaxDepthChangeFactor (0.03f); //// ne.setNormalSmoothingSize (20.0f); // pcl::PointCloud<pcl::Normal>::Ptr normal_cloud (new pcl::PointCloud<pcl::Normal>); // pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB> ()); // ne.setSearchMethod (tree); // ne.setRadiusSearch (0.2); // ne.setInputCloud (cloud_); // ne.compute (*normal_cloud); pcl::IntegralImageNormalEstimation<PointT, pcl::Normal> ne; ne.setNormalEstimationMethod (ne.COVARIANCE_MATRIX); ne.setMaxDepthChangeFactor (0.03f); ne.setNormalSmoothingSize (20.0f); pcl::PointCloud<pcl::Normal>::Ptr normal_cloud (new pcl::PointCloud<pcl::Normal>); ne.setInputCloud (cloud_); ne.compute (*normal_cloud); // std::cout << "Normals estimated!" << std::endl; // // // Multi plane segmentation initialization: // std::vector<pcl::PlanarRegion<PointT>, Eigen::aligned_allocator<pcl::PlanarRegion<PointT> > > regions; // pcl::OrganizedMultiPlaneSegmentation<PointT, pcl::Normal, pcl::Label> mps; // mps.setMinInliers (500); // mps.setAngularThreshold (2.0 * M_PI / 180); // mps.setDistanceThreshold (0.2); // mps.setInputNormals (normal_cloud); // mps.setInputCloud (cloud_); // mps.segmentAndRefine (regions); // Multi plane segmentation initialization: std::vector<pcl::PlanarRegion<PointT>, Eigen::aligned_allocator<pcl::PlanarRegion<PointT> > > regions; pcl::OrganizedMultiPlaneSegmentation<PointT, pcl::Normal, pcl::Label> mps; mps.setMinInliers (500); mps.setAngularThreshold (2.0 * M_PI / 180); mps.setDistanceThreshold (0.2); mps.setInputNormals (normal_cloud); mps.setInputCloud (cloud_); mps.segmentAndRefine (regions); // std::cout << "Found " << regions.size() << " planar regions." << std::endl; // Removing planes not compatible with camera roll ~= 0: unsigned int i = 0; while(i < regions.size()) { // Check on the normal to the plane: if(fabs(regions[i].getCoefficients()[1]) < 0.70) { regions.erase(regions.begin()+i); } else ++i; } // Order planar regions according to height (y coordinate): std::sort(regions.begin(), regions.end(), GroundplaneEstimation::planeHeightComparator); // Color selected planar region in red: pcl::PointCloud<pcl::PointXYZRGB>::Ptr colored_cloud (new pcl::PointCloud<pcl::PointXYZRGB>); colored_cloud = colorRegions(regions, 0); // If at least a valid plane remained: if (regions.size()>0) { ground_coeffs[0] = regions[0].getCoefficients()[0]; ground_coeffs[1] = regions[0].getCoefficients()[1]; ground_coeffs[2] = regions[0].getCoefficients()[2]; ground_coeffs[3] = regions[0].getCoefficients()[3]; std::cout << "Ground plane coefficients: " << regions[0].getCoefficients()[0] << ", " << regions[0].getCoefficients()[1] << ", " << regions[0].getCoefficients()[2] << ", " << regions[0].getCoefficients()[3] << "." << std::endl; // Result visualization: if (ground_estimation_mode_ == 2) { // Viewer initialization: pcl::visualization::PCLVisualizer viewer("PCL Viewer"); pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(colored_cloud); viewer.addPointCloud<pcl::PointXYZRGB> (colored_cloud, rgb, "input_cloud"); viewer.setCameraPosition(0,0,-2,0,-1,0,0); // Spin until 'Q' is pressed: viewer.spin(); viewer.setSize(1,1); // resize viewer in order to make it disappear viewer.spinOnce(); viewer.close(); // close method does not work } } else { std::cout << "ERROR: no valid ground plane found!" << std::endl; } } return ground_coeffs; }
int main(int argc, char* argv[]) { osg::ref_ptr<osg::Group> root(new osg::Group); osg::ref_ptr<osg::Geode> sphere(build_sphere()); root->addChild(sphere.get()); osg::ref_ptr<osg::Group> satellites_geometry(build_satellites()); osg::ref_ptr<osg::MatrixTransform> satellites(new osg::MatrixTransform); satellites->addChild(satellites_geometry.get()); satellites->setUpdateCallback(new RotateCallback(osg::Vec3(0., 1., 0.), 0.001)); root->addChild(satellites.get()); osg::ref_ptr<osg::Geode> plane(build_quad()); osg::ref_ptr<osg::MatrixTransform> plane_transform(new osg::MatrixTransform); plane_transform->addChild(plane.get()); osg::Matrix plane_transform_matrix; plane_transform_matrix.makeTranslate(0., -2., 0.); plane_transform->setMatrix(plane_transform_matrix); root->addChild(plane_transform.get()); osg::ref_ptr<osg::Light> light(new osg::Light); light->setLightNum(1); light->setPosition(osg::Vec4(0., 0., 0., 1.)); osg::ref_ptr<osg::LightSource> light_source(new osg::LightSource); light_source->setLight(light.get()); osg::ref_ptr<osg::MatrixTransform> transform(new osg::MatrixTransform); transform->addChild(light_source.get()); osg::Matrix matrix; matrix.makeTranslate(osg::Vec3(10., 0., 0.)); transform->setMatrix(matrix); root->addChild(transform.get()); osg::ref_ptr<osg::Texture2D> tex = new osg::Texture2D; int env_width = 1024; int env_height = 1024; tex->setTextureSize(env_width, env_height); tex->setInternalFormat(GL_RGBA); tex->setFilter(osg::Texture2D::MIN_FILTER, osg::Texture2D::LINEAR); tex->setFilter(osg::Texture2D::MAG_FILTER, osg::Texture2D::LINEAR); boost::shared_ptr<shade::osg::Texture> shade_tex(new shade::osg::Texture(tex.get())); osg::ref_ptr<osg::Camera> env_camera = new osg::Camera; env_camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); env_camera->setClearColor(osg::Vec4(0.5, 0.5, 0.5, 1.)); env_camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF); env_camera->setViewport(0, 0, env_width, env_height); env_camera->setRenderOrder(osg::Camera::PRE_RENDER); env_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT, osg::Camera::FRAME_BUFFER); env_camera->attach(osg::Camera::COLOR_BUFFER, tex, 0, 0, true); env_camera->addChild(sphere); env_camera->addChild(satellites); osg::ref_ptr<osg::StateSet> env_camera_state(env_camera->getOrCreateStateSet()); osg::FrontFace* cf = new osg::FrontFace(osg::FrontFace::CLOCKWISE); env_camera_state->setAttributeAndModes(cf); root->addChild(env_camera); setup_plane_shading(plane->getOrCreateStateSet(), shade_tex); osg::ref_ptr<osg::TextureCubeMap> osg_cube_tex = new osg::TextureCubeMap; int cube_map_size = 512; osg_cube_tex->setTextureSize(cube_map_size, cube_map_size); osg_cube_tex->setInternalFormat(GL_RGBA); struct { osg::Vec3 center; osg::Vec3 up; } cube_cameras[] = { {osg::Vec3(+1., 0., 0.), osg::Vec3(0., -1., 0.)}, {osg::Vec3(-1., 0., 0.), osg::Vec3(0., -1., 0.)}, {osg::Vec3(0., +1., 0.), osg::Vec3(0., 0., +1.)}, {osg::Vec3(0., -1., 0.), osg::Vec3(0., 0., +1.)}, {osg::Vec3(0., 0., +1.), osg::Vec3(0., -1., 0.)}, {osg::Vec3(0., 0., -1.), osg::Vec3(0., -1., 0.)}, }; for(int i = 0; i != 6; ++i) { osg::ref_ptr<osg::Camera> cm_camera = new osg::Camera; cm_camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); cm_camera->setClearColor(osg::Vec4(0.3, 0.3, 0.3, 1.)); cm_camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF); cm_camera->setViewport(0, 0, cube_map_size, cube_map_size); cm_camera->setRenderOrder(osg::Camera::PRE_RENDER); cm_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT, osg::Camera::FRAME_BUFFER); cm_camera->attach(osg::Camera::COLOR_BUFFER, osg_cube_tex, 0, i, true); cm_camera->setProjectionMatrixAsPerspective(90., 1., 0.1, 10.); cm_camera->setViewMatrixAsLookAt(osg::Vec3(0., 0., 0.), cube_cameras[i].center, cube_cameras[i].up); cm_camera->addChild(satellites); root->addChild(cm_camera); } boost::shared_ptr<shade::osg::Texture> cube_tex(new shade::osg::Texture(osg_cube_tex.get())); setup_sphere_shading(sphere->getOrCreateStateSet(), cube_tex); osg::ref_ptr<osgViewer::Viewer> viewer(build_viewer(root.get())); env_camera->setDataVariance(osg::Object::DYNAMIC); env_camera->setUpdateCallback(new MirrorCamera(viewer->getCamera(), plane_transform_matrix)); viewer->run(); }
int main(int argc, char** argv) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc, argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setDescription(arguments.getApplicationName() + " is the example which demonstrates using of GL_ARB_shadow extension implemented in osg::Texture class"); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()); arguments.getApplicationUsage()->addCommandLineOption("-h or --help", "Display this information"); arguments.getApplicationUsage()->addCommandLineOption("--positionalLight", "Use a positional light."); arguments.getApplicationUsage()->addCommandLineOption("--directionalLight", "Use a direction light."); arguments.getApplicationUsage()->addCommandLineOption("--noUpdate", "Disable the updating the of light source."); arguments.getApplicationUsage()->addCommandLineOption("--castsShadowMask", "Override default castsShadowMask (default - 0x2)"); arguments.getApplicationUsage()->addCommandLineOption("--receivesShadowMask", "Override default receivesShadowMask (default - 0x1)"); arguments.getApplicationUsage()->addCommandLineOption("--base", "Add a base geometry to test shadows."); arguments.getApplicationUsage()->addCommandLineOption("--sv", "Select ShadowVolume implementation."); arguments.getApplicationUsage()->addCommandLineOption("--ssm", "Select SoftShadowMap implementation."); arguments.getApplicationUsage()->addCommandLineOption("--sm", "Select ShadowMap implementation."); arguments.getApplicationUsage()->addCommandLineOption("--pssm", "Select ParallelSplitShadowMap implementation.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--mapcount", "ParallelSplitShadowMap texture count.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--mapres", "ParallelSplitShadowMap texture resolution.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--debug-color", "ParallelSplitShadowMap display debugging color (only the first 3 maps are color r=0,g=1,b=2.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--minNearSplit", "ParallelSplitShadowMap shadow map near offset.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--maxFarDist", "ParallelSplitShadowMap max far distance to shadow.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--moveVCamFactor", "ParallelSplitShadowMap move the virtual frustum behind the real camera, (also back ground object can cast shadow).");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--PolyOffset-Factor", "ParallelSplitShadowMap set PolygonOffset factor.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--PolyOffset-Unit", "ParallelSplitShadowMap set PolygonOffset unit.");//ADEGLI arguments.getApplicationUsage()->addCommandLineOption("--lispsm", "Select LightSpacePerspectiveShadowMap implementation."); arguments.getApplicationUsage()->addCommandLineOption("--msm", "Select MinimalShadowMap implementation."); arguments.getApplicationUsage()->addCommandLineOption("--ViewBounds", "MSM, LiSPSM optimize shadow for view frustum (weakest option)"); arguments.getApplicationUsage()->addCommandLineOption("--CullBounds", "MSM, LiSPSM optimize shadow for bounds of culled objects in view frustum (better option)."); arguments.getApplicationUsage()->addCommandLineOption("--DrawBounds", "MSM, LiSPSM optimize shadow for bounds of predrawn pixels in view frustum (best & default)."); arguments.getApplicationUsage()->addCommandLineOption("--mapres", "MSM, LiSPSM & texture resolution."); arguments.getApplicationUsage()->addCommandLineOption("--maxFarDist", "MSM, LiSPSM max far distance to shadow."); arguments.getApplicationUsage()->addCommandLineOption("--moveVCamFactor", "MSM, LiSPSM move the virtual frustum behind the real camera, (also back ground object can cast shadow)."); arguments.getApplicationUsage()->addCommandLineOption("--minLightMargin", "MSM, LiSPSM the same as --moveVCamFactor."); arguments.getApplicationUsage()->addCommandLineOption("-1", "Use test model one."); arguments.getApplicationUsage()->addCommandLineOption("-2", "Use test model two."); arguments.getApplicationUsage()->addCommandLineOption("-3", "Use test model three (default)."); arguments.getApplicationUsage()->addCommandLineOption("-4", "Use test model four - island scene."); arguments.getApplicationUsage()->addCommandLineOption("--two-sided", "Use two-sided stencil extension for shadow volumes."); arguments.getApplicationUsage()->addCommandLineOption("--two-pass", "Use two-pass stencil for shadow volumes."); arguments.getApplicationUsage()->addCommandLineOption("--near-far-mode","COMPUTE_NEAR_USING_PRIMITIVES, COMPUTE_NEAR_FAR_USING_PRIMITIVES, COMPUTE_NEAR_FAR_USING_BOUNDING_VOLUMES, DO_NOT_COMPUTE_NEAR_FAR"); arguments.getApplicationUsage()->addCommandLineOption("--max-shadow-distance","<float> Maximum distance that the shadow map should extend from the eye point."); // construct the viewer. osgViewer::Viewer viewer(arguments); // if user request help write it out to cout. if (arguments.read("-h") || arguments.read("--help")) { arguments.getApplicationUsage()->write(std::cout); return 1; } double zNear=1.0, zMid=10.0, zFar=1000.0; if (arguments.read("--depth-partition",zNear, zMid, zFar)) { // set up depth partitioning osg::ref_ptr<osgViewer::DepthPartitionSettings> dps = new osgViewer::DepthPartitionSettings; dps->_mode = osgViewer::DepthPartitionSettings::FIXED_RANGE; dps->_zNear = zNear; dps->_zMid = zMid; dps->_zFar = zFar; viewer.setUpDepthPartition(dps.get()); } if (arguments.read("--dp")) { // set up depth partitioning viewer.setUpDepthPartition(); } float fov = 0.0; while (arguments.read("--fov",fov)) {} osg::Vec4 lightpos(0.0,0.0,1,0.0); bool spotlight = false; while (arguments.read("--positionalLight")) { lightpos.set(0.5,0.5,1.5,1.0); } while (arguments.read("--directionalLight")) { lightpos.set(0.0,0.0,1,0.0); } while (arguments.read("--spotLight")) { lightpos.set(0.5,0.5,1.5,1.0); spotlight = true; } bool keepLightPos = false; osg::Vec3 spotLookat(0.0,0.0,0.0); while ( arguments.read("--light-pos", lightpos.x(), lightpos.y(), lightpos.z(), lightpos.w())) { keepLightPos = true; } while ( arguments.read("--light-pos", lightpos.x(), lightpos.y(), lightpos.z())) { lightpos.w()=1.0; keepLightPos = true; } while ( arguments.read("--light-dir", lightpos.x(), lightpos.y(), lightpos.z())) { lightpos.w()=0.0; keepLightPos = true; } while ( arguments.read("--spot-lookat", spotLookat.x(), spotLookat.y(), spotLookat.z())) { } while (arguments.read("--castsShadowMask", CastsShadowTraversalMask )); while (arguments.read("--receivesShadowMask", ReceivesShadowTraversalMask )); bool updateLightPosition = true; while (arguments.read("--noUpdate")) updateLightPosition = false; // set up the camera manipulators. { osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator; keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() ); keyswitchManipulator->addMatrixManipulator( '2', "Flight", new osgGA::FlightManipulator() ); keyswitchManipulator->addMatrixManipulator( '3', "Drive", new osgGA::DriveManipulator() ); keyswitchManipulator->addMatrixManipulator( '4', "Terrain", new osgGA::TerrainManipulator() ); std::string pathfile; char keyForAnimationPath = '5'; while (arguments.read("-p",pathfile)) { osgGA::AnimationPathManipulator* apm = new osgGA::AnimationPathManipulator(pathfile); if (apm || !apm->valid()) { unsigned int num = keyswitchManipulator->getNumMatrixManipulators(); keyswitchManipulator->addMatrixManipulator( keyForAnimationPath, "Path", apm ); keyswitchManipulator->selectMatrixManipulator(num); ++keyForAnimationPath; } } viewer.setCameraManipulator( keyswitchManipulator.get() ); } // add the state manipulator viewer.addEventHandler( new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet()) ); // add stats viewer.addEventHandler( new osgViewer::StatsHandler() ); // add the record camera path handler viewer.addEventHandler(new osgViewer::RecordCameraPathHandler); // add the window size toggle handler viewer.addEventHandler(new osgViewer::WindowSizeHandler); // add the threading handler viewer.addEventHandler( new osgViewer::ThreadingHandler() ); osg::ref_ptr<osgShadow::ShadowedScene> shadowedScene = new osgShadow::ShadowedScene; osgShadow::ShadowSettings* settings = shadowedScene->getShadowSettings(); settings->setReceivesShadowTraversalMask(ReceivesShadowTraversalMask); settings->setCastsShadowTraversalMask(CastsShadowTraversalMask); std::string nearFarMode(""); if (arguments.read("--near-far-mode",nearFarMode)) { if (nearFarMode=="COMPUTE_NEAR_USING_PRIMITIVES") settings->setComputeNearFarModeOverride(osg::CullSettings::COMPUTE_NEAR_USING_PRIMITIVES); else if (nearFarMode=="COMPUTE_NEAR_FAR_USING_PRIMITIVES") settings->setComputeNearFarModeOverride(osg::CullSettings::COMPUTE_NEAR_FAR_USING_PRIMITIVES); else if (nearFarMode=="DO_NOT_COMPUTE_NEAR_FAR") settings->setComputeNearFarModeOverride(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR); else if (nearFarMode=="COMPUTE_NEAR_FAR_USING_BOUNDING_VOLUMES") settings->setComputeNearFarModeOverride(osg::CullSettings::COMPUTE_NEAR_FAR_USING_BOUNDING_VOLUMES); OSG_NOTICE<<"ComputeNearFarModeOverride set to "; switch(settings->getComputeNearFarModeOverride()) { case(osg::CullSettings::COMPUTE_NEAR_FAR_USING_BOUNDING_VOLUMES): OSG_NOTICE<<"COMPUTE_NEAR_FAR_USING_BOUNDING_VOLUMES"; break; case(osg::CullSettings::COMPUTE_NEAR_USING_PRIMITIVES): OSG_NOTICE<<"COMPUTE_NEAR_USING_PRIMITIVES"; break; case(osg::CullSettings::COMPUTE_NEAR_FAR_USING_PRIMITIVES): OSG_NOTICE<<"COMPUTE_NEAR_FAR_USING_PRIMITIVES"; break; case(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR): OSG_NOTICE<<"DO_NOT_COMPUTE_NEAR_FAR"; break; } OSG_NOTICE<<std::endl; } double distance; if (arguments.read("--max-shadow-distance",distance)) { settings->setMaximumShadowMapDistance(distance); OSG_NOTICE<<"MaximumShadowMapDistance set to "<<settings->getMaximumShadowMapDistance()<<std::endl; } osg::ref_ptr<osgShadow::MinimalShadowMap> msm = NULL; if (arguments.read("--no-shadows")) { OSG_NOTICE<<"Not using a ShadowTechnique"<<std::endl; shadowedScene->setShadowTechnique(0); } else if (arguments.read("--sv")) { // hint to tell viewer to request stencil buffer when setting up windows osg::DisplaySettings::instance()->setMinimumNumStencilBits(8); osg::ref_ptr<osgShadow::ShadowVolume> sv = new osgShadow::ShadowVolume; sv->setDynamicShadowVolumes(updateLightPosition); while (arguments.read("--two-sided")) sv->setDrawMode(osgShadow::ShadowVolumeGeometry::STENCIL_TWO_SIDED); while (arguments.read("--two-pass")) sv->setDrawMode(osgShadow::ShadowVolumeGeometry::STENCIL_TWO_PASS); shadowedScene->setShadowTechnique(sv.get()); } else if (arguments.read("--st")) { osg::ref_ptr<osgShadow::ShadowTexture> st = new osgShadow::ShadowTexture; shadowedScene->setShadowTechnique(st.get()); } else if (arguments.read("--stsm")) { osg::ref_ptr<osgShadow::StandardShadowMap> st = new osgShadow::StandardShadowMap; shadowedScene->setShadowTechnique(st.get()); } else if (arguments.read("--pssm")) { int mapcount = 3; while (arguments.read("--mapcount", mapcount)); osg::ref_ptr<osgShadow::ParallelSplitShadowMap> pssm = new osgShadow::ParallelSplitShadowMap(NULL,mapcount); int mapres = 1024; while (arguments.read("--mapres", mapres)) pssm->setTextureResolution(mapres); while (arguments.read("--debug-color")) { pssm->setDebugColorOn(); } int minNearSplit=0; while (arguments.read("--minNearSplit", minNearSplit)) if ( minNearSplit > 0 ) { pssm->setMinNearDistanceForSplits(minNearSplit); std::cout << "ParallelSplitShadowMap : setMinNearDistanceForSplits(" << minNearSplit <<")" << std::endl; } int maxfardist = 0; while (arguments.read("--maxFarDist", maxfardist)) if ( maxfardist > 0 ) { pssm->setMaxFarDistance(maxfardist); std::cout << "ParallelSplitShadowMap : setMaxFarDistance(" << maxfardist<<")" << std::endl; } int moveVCamFactor = 0; while (arguments.read("--moveVCamFactor", moveVCamFactor)) if ( maxfardist > 0 ) { pssm->setMoveVCamBehindRCamFactor(moveVCamFactor); std::cout << "ParallelSplitShadowMap : setMoveVCamBehindRCamFactor(" << moveVCamFactor<<")" << std::endl; } double polyoffsetfactor = pssm->getPolygonOffset().x(); double polyoffsetunit = pssm->getPolygonOffset().y(); while (arguments.read("--PolyOffset-Factor", polyoffsetfactor)); while (arguments.read("--PolyOffset-Unit", polyoffsetunit)); pssm->setPolygonOffset(osg::Vec2(polyoffsetfactor,polyoffsetunit)); shadowedScene->setShadowTechnique(pssm.get()); } else if (arguments.read("--ssm")) { osg::ref_ptr<osgShadow::SoftShadowMap> sm = new osgShadow::SoftShadowMap; shadowedScene->setShadowTechnique(sm.get()); } else if( arguments.read("--vdsm") ) { while( arguments.read("--debugHUD") ) settings->setDebugDraw( true ); if (arguments.read("--persp")) settings->setShadowMapProjectionHint(osgShadow::ShadowSettings::PERSPECTIVE_SHADOW_MAP); if (arguments.read("--ortho")) settings->setShadowMapProjectionHint(osgShadow::ShadowSettings::ORTHOGRAPHIC_SHADOW_MAP); unsigned int unit=1; if (arguments.read("--unit",unit)) settings->setBaseShadowTextureUnit(unit); double n=0.0; if (arguments.read("-n",n)) settings->setMinimumShadowMapNearFarRatio(n); unsigned int numShadowMaps; if (arguments.read("--num-sm",numShadowMaps)) settings->setNumShadowMapsPerLight(numShadowMaps); if (arguments.read("--parallel-split") || arguments.read("--ps") ) settings->setMultipleShadowMapHint(osgShadow::ShadowSettings::PARALLEL_SPLIT); if (arguments.read("--cascaded")) settings->setMultipleShadowMapHint(osgShadow::ShadowSettings::CASCADED); int mapres = 1024; while (arguments.read("--mapres", mapres)) settings->setTextureSize(osg::Vec2s(mapres,mapres)); osg::ref_ptr<osgShadow::ViewDependentShadowMap> vdsm = new osgShadow::ViewDependentShadowMap; shadowedScene->setShadowTechnique(vdsm.get()); } else if ( arguments.read("--lispsm") ) { if( arguments.read( "--ViewBounds" ) ) msm = new osgShadow::LightSpacePerspectiveShadowMapVB; else if( arguments.read( "--CullBounds" ) ) msm = new osgShadow::LightSpacePerspectiveShadowMapCB; else // if( arguments.read( "--DrawBounds" ) ) // default msm = new osgShadow::LightSpacePerspectiveShadowMapDB; } else if( arguments.read("--msm") ) { if( arguments.read( "--ViewBounds" ) ) msm = new osgShadow::MinimalShadowMap; else if( arguments.read( "--CullBounds" ) ) msm = new osgShadow::MinimalCullBoundsShadowMap; else // if( arguments.read( "--DrawBounds" ) ) // default msm = new osgShadow::MinimalDrawBoundsShadowMap; } else /* if (arguments.read("--sm")) */ { osg::ref_ptr<osgShadow::ShadowMap> sm = new osgShadow::ShadowMap; shadowedScene->setShadowTechnique(sm.get()); int mapres = 1024; while (arguments.read("--mapres", mapres)) sm->setTextureSize(osg::Vec2s(mapres,mapres)); } if( msm )// Set common MSM & LISPSM arguments { shadowedScene->setShadowTechnique( msm.get() ); while( arguments.read("--debugHUD") ) msm->setDebugDraw( true ); float minLightMargin = 10.f; float maxFarPlane = 0; unsigned int texSize = 1024; unsigned int baseTexUnit = 0; unsigned int shadowTexUnit = 1; while ( arguments.read("--moveVCamFactor", minLightMargin ) ); while ( arguments.read("--minLightMargin", minLightMargin ) ); while ( arguments.read("--maxFarDist", maxFarPlane ) ); while ( arguments.read("--mapres", texSize )); while ( arguments.read("--baseTextureUnit", baseTexUnit) ); while ( arguments.read("--shadowTextureUnit", shadowTexUnit) ); msm->setMinLightMargin( minLightMargin ); msm->setMaxFarPlane( maxFarPlane ); msm->setTextureSize( osg::Vec2s( texSize, texSize ) ); msm->setShadowTextureCoordIndex( shadowTexUnit ); msm->setShadowTextureUnit( shadowTexUnit ); msm->setBaseTextureCoordIndex( baseTexUnit ); msm->setBaseTextureUnit( baseTexUnit ); } OSG_INFO<<"shadowedScene->getShadowTechnique()="<<shadowedScene->getShadowTechnique()<<std::endl; osg::ref_ptr<osg::Node> model = osgDB::readNodeFiles(arguments); if (model.valid()) { model->setNodeMask(CastsShadowTraversalMask | ReceivesShadowTraversalMask); } else { model = createTestModel(arguments); } // get the bounds of the model. osg::ComputeBoundsVisitor cbbv; model->accept(cbbv); osg::BoundingBox bb = cbbv.getBoundingBox(); if (lightpos.w()==1.0 && !keepLightPos) { lightpos.x() = bb.xMin()+(bb.xMax()-bb.xMin())*lightpos.x(); lightpos.y() = bb.yMin()+(bb.yMax()-bb.yMin())*lightpos.y(); lightpos.z() = bb.zMin()+(bb.zMax()-bb.zMin())*lightpos.z(); } if ( arguments.read("--base")) { osg::Geode* geode = new osg::Geode; osg::Vec3 widthVec(bb.radius(), 0.0f, 0.0f); osg::Vec3 depthVec(0.0f, bb.radius(), 0.0f); osg::Vec3 centerBase( (bb.xMin()+bb.xMax())*0.5f, (bb.yMin()+bb.yMax())*0.5f, bb.zMin()-bb.radius()*0.1f ); geode->addDrawable( osg::createTexturedQuadGeometry( centerBase-widthVec*1.5f-depthVec*1.5f, widthVec*3.0f, depthVec*3.0f) ); geode->setNodeMask(shadowedScene->getReceivesShadowTraversalMask()); geode->getOrCreateStateSet()->setTextureAttributeAndModes(0, new osg::Texture2D(osgDB::readImageFile("Images/lz.rgb"))); shadowedScene->addChild(geode); } osg::ref_ptr<osg::LightSource> ls = new osg::LightSource; ls->getLight()->setPosition(lightpos); if (spotlight) { osg::Vec3 center = spotLookat; osg::Vec3 lightdir = center - osg::Vec3(lightpos.x(), lightpos.y(), lightpos.z()); lightdir.normalize(); ls->getLight()->setDirection(lightdir); ls->getLight()->setSpotCutoff(25.0f); //set the LightSource, only for checking, there is only 1 light in the scene osgShadow::ShadowMap* shadowMap = dynamic_cast<osgShadow::ShadowMap*>(shadowedScene->getShadowTechnique()); if( shadowMap ) shadowMap->setLight(ls.get()); } if ( arguments.read("--coloured-light")) { ls->getLight()->setAmbient(osg::Vec4(1.0,0.0,0.0,1.0)); ls->getLight()->setDiffuse(osg::Vec4(0.0,1.0,0.0,1.0)); } else { ls->getLight()->setAmbient(osg::Vec4(0.2,0.2,0.2,1.0)); ls->getLight()->setDiffuse(osg::Vec4(0.8,0.8,0.8,1.0)); } shadowedScene->addChild(model.get()); shadowedScene->addChild(ls.get()); viewer.setSceneData(shadowedScene.get()); osg::ref_ptr< DumpShadowVolumesHandler > dumpShadowVolumes = new DumpShadowVolumesHandler; viewer.addEventHandler(new ChangeFOVHandler(viewer.getCamera())); viewer.addEventHandler( dumpShadowVolumes.get() ); // create the windows and run the threads. viewer.realize(); if (fov!=0.0) { double fovy, aspectRatio, zNear, zFar; viewer.getCamera()->getProjectionMatrix().getPerspective(fovy, aspectRatio, zNear, zFar); std::cout << "Setting FOV to " << fov << std::endl; viewer.getCamera()->getProjectionMatrix().makePerspective(fov, aspectRatio, zNear, zFar); } // it is done after viewer.realize() so that the windows are already initialized if ( arguments.read("--debugHUD")) { osgViewer::Viewer::Windows windows; viewer.getWindows(windows); if (windows.empty()) return 1; osgShadow::ShadowMap* sm = dynamic_cast<osgShadow::ShadowMap*>(shadowedScene->getShadowTechnique()); if( sm ) { osg::ref_ptr<osg::Camera> hudCamera = sm->makeDebugHUD(); // set up cameras to rendering on the first window available. hudCamera->setGraphicsContext(windows[0]); hudCamera->setViewport(0,0,windows[0]->getTraits()->width, windows[0]->getTraits()->height); viewer.addSlave(hudCamera.get(), false); } } osg::ref_ptr<LightAnimationHandler> lightAnimationHandler = updateLightPosition ? new LightAnimationHandler : 0; if (lightAnimationHandler) viewer.addEventHandler(lightAnimationHandler.get()); // osgDB::writeNodeFile(*group,"test.osgt"); while (!viewer.done()) { { osgShadow::MinimalShadowMap * msm = dynamic_cast<osgShadow::MinimalShadowMap*>( shadowedScene->getShadowTechnique() ); if( msm ) { // If scene decorated by CoordinateSystemNode try to find localToWorld // and set modellingSpaceToWorld matrix to optimize scene bounds computation osg::NodePath np = viewer.getCoordinateSystemNodePath(); if( !np.empty() ) { osg::CoordinateSystemNode * csn = dynamic_cast<osg::CoordinateSystemNode *>( np.back() ); if( csn ) { osg::Vec3d pos = viewer.getCameraManipulator()->getMatrix().getTrans(); msm->setModellingSpaceToWorldTransform ( csn->computeLocalCoordinateFrame( pos ) ); } } } } if (lightAnimationHandler.valid() && lightAnimationHandler ->getAnimating()) { float t = viewer.getFrameStamp()->getSimulationTime(); if (lightpos.w()==1.0) { lightpos.set(bb.center().x()+sinf(t)*bb.radius(), bb.center().y() + cosf(t)*bb.radius(), bb.zMax() + bb.radius()*3.0f ,1.0f); } else { lightpos.set(sinf(t),cosf(t),1.0f,0.0f); } ls->getLight()->setPosition(lightpos); osg::Vec3f lightDir(-lightpos.x(),-lightpos.y(),-lightpos.z()); if(spotlight) lightDir = osg::Vec3(bb.center().x()+sinf(t)*bb.radius()/2.0, bb.center().y() + cosf(t)*bb.radius()/2.0, bb.center().z()) - osg::Vec3(lightpos.x(), lightpos.y(), lightpos.z()) ; lightDir.normalize(); ls->getLight()->setDirection(lightDir); } if( dumpShadowVolumes->get() ) { dumpShadowVolumes->set( false ); static int dumpFileNo = 0; dumpFileNo ++; char filename[256]; std::sprintf( filename, "shadowDump%d.osgt", dumpFileNo ); osgShadow::MinimalShadowMap * msm = dynamic_cast<osgShadow::MinimalShadowMap*>( shadowedScene->getShadowTechnique() ); if( msm ) msm->setDebugDump( filename ); } viewer.frame(); } return 0; }
int main(int argc, char **argv) { osg::ArgumentParser arguments(&argc, argv); // initialize the viewer. osgViewer::Viewer viewer(arguments); osg::DisplaySettings *ds = viewer.getDisplaySettings() ? viewer.getDisplaySettings() : osg::DisplaySettings::instance().get(); ds->readCommandLine(arguments); osg::ref_ptr<osg::Node> model = osgDB::readNodeFiles(arguments); if (!model) { OSG_NOTICE << "No models loaded, please specify a model file on the command line" << std::endl; return 1; } OSG_NOTICE << "Stereo " << ds->getStereo() << std::endl; OSG_NOTICE << "StereoMode " << ds->getStereoMode() << std::endl; viewer.setSceneData(model.get()); // add the state manipulator viewer.addEventHandler(new osgGA::StateSetManipulator(viewer.getCamera()->getOrCreateStateSet())); // add the stats handler viewer.addEventHandler(new osgViewer::StatsHandler); // add camera manipulator viewer.setCameraManipulator(new osgGA::TrackballManipulator()); OSG_NOTICE << "KeystoneFileNames.size()=" << ds->getKeystoneFileNames().size() << std::endl; for (osg::DisplaySettings::FileNames::iterator itr = ds->getKeystoneFileNames().begin(); itr != ds->getKeystoneFileNames().end(); ++itr) { OSG_NOTICE << " keystone filename = " << *itr << std::endl; } ds->setKeystoneHint(true); if (!ds->getKeystoneFileNames().empty()) { for (osg::DisplaySettings::Objects::iterator itr = ds->getKeystones().begin(); itr != ds->getKeystones().end(); ++itr) { osgViewer::Keystone *keystone = dynamic_cast<osgViewer::Keystone*>(itr->get()); if (keystone) { std::string filename; keystone->getUserValue("filename", filename); OSG_NOTICE << "Loaded keystone " << filename << ", " << keystone << std::endl; ds->getKeystones().push_back(keystone); } } } viewer.apply(new osgViewer::SingleScreen(0)); viewer.realize(); while (!viewer.done()) { viewer.advance(); viewer.eventTraversal(); viewer.updateTraversal(); viewer.renderingTraversals(); } return 0; }
int main( int argc, char **argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" is the example which demonstrates use of 3D textures."); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ..."); arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information"); arguments.getApplicationUsage()->addCommandLineOption("--images [filenames]","Specify a stack of 2d images to build the 3d volume from."); arguments.getApplicationUsage()->addCommandLineOption("--shader","Use OpenGL Shading Language. (default)"); arguments.getApplicationUsage()->addCommandLineOption("--no-shader","Disable use of OpenGL Shading Language."); arguments.getApplicationUsage()->addCommandLineOption("--gpu-tf","Aply the transfer function on the GPU. (default)"); arguments.getApplicationUsage()->addCommandLineOption("--cpu-tf","Apply the transfer function on the CPU."); arguments.getApplicationUsage()->addCommandLineOption("--mip","Use Maximum Intensity Projection (MIP) filtering."); arguments.getApplicationUsage()->addCommandLineOption("--isosurface","Use Iso surface render."); arguments.getApplicationUsage()->addCommandLineOption("--light","Use normals computed on the GPU to render a lit volume."); arguments.getApplicationUsage()->addCommandLineOption("-n","Use normals computed on the GPU to render a lit volume."); arguments.getApplicationUsage()->addCommandLineOption("--xSize <size>","Relative width of rendered brick."); arguments.getApplicationUsage()->addCommandLineOption("--ySize <size>","Relative length of rendered brick."); arguments.getApplicationUsage()->addCommandLineOption("--zSize <size>","Relative height of rendered brick."); arguments.getApplicationUsage()->addCommandLineOption("--maxTextureSize <size>","Set the texture maximum resolution in the s,t,r (x,y,z) dimensions."); arguments.getApplicationUsage()->addCommandLineOption("--s_maxTextureSize <size>","Set the texture maximum resolution in the s (x) dimension."); arguments.getApplicationUsage()->addCommandLineOption("--t_maxTextureSize <size>","Set the texture maximum resolution in the t (y) dimension."); arguments.getApplicationUsage()->addCommandLineOption("--r_maxTextureSize <size>","Set the texture maximum resolution in the r (z) dimension."); arguments.getApplicationUsage()->addCommandLineOption("--modulate-alpha-by-luminance","For each pixel multiply the alpha value by the luminance."); arguments.getApplicationUsage()->addCommandLineOption("--replace-alpha-with-luminance","For each pixel set the alpha value to the luminance."); arguments.getApplicationUsage()->addCommandLineOption("--replace-rgb-with-luminance","For each rgb pixel convert to the luminance."); arguments.getApplicationUsage()->addCommandLineOption("--num-components <num>","Set the number of components to in he target image."); arguments.getApplicationUsage()->addCommandLineOption("--no-rescale","Disable the rescaling of the pixel data to 0.0 to 1.0 range"); arguments.getApplicationUsage()->addCommandLineOption("--rescale","Enable the rescale of the pixel data to 0.0 to 1.0 range (default)."); arguments.getApplicationUsage()->addCommandLineOption("--shift-min-to-zero","Shift the pixel data so min value is 0.0."); arguments.getApplicationUsage()->addCommandLineOption("--sequence-length <num>","Set the length of time that a sequence of images with run for."); arguments.getApplicationUsage()->addCommandLineOption("--sd <num>","Short hand for --sequence-length"); arguments.getApplicationUsage()->addCommandLineOption("--sdwm <num>","Set the SampleDensityWhenMovingProperty to specified value"); arguments.getApplicationUsage()->addCommandLineOption("--lod","Enable techniques to reduce the level of detail when moving."); // arguments.getApplicationUsage()->addCommandLineOption("--raw <sizeX> <sizeY> <sizeZ> <numberBytesPerComponent> <numberOfComponents> <endian> <filename>","read a raw image data"); // construct the viewer. osgViewer::Viewer viewer(arguments); // add the window size toggle handler viewer.addEventHandler(new osgViewer::WindowSizeHandler); { osg::ref_ptr<osgGA::KeySwitchMatrixManipulator> keyswitchManipulator = new osgGA::KeySwitchMatrixManipulator; keyswitchManipulator->addMatrixManipulator( '1', "Trackball", new osgGA::TrackballManipulator() ); osgGA::FlightManipulator* flightManipulator = new osgGA::FlightManipulator(); flightManipulator->setYawControlMode(osgGA::FlightManipulator::NO_AUTOMATIC_YAW); keyswitchManipulator->addMatrixManipulator( '2', "Flight", flightManipulator ); viewer.setCameraManipulator( keyswitchManipulator.get() ); } // add the stats handler viewer.addEventHandler(new osgViewer::StatsHandler); viewer.getCamera()->setClearColor(osg::Vec4(0.0f,0.0f,0.0f,0.0f)); // if user request help write it out to cout. if (arguments.read("-h") || arguments.read("--help")) { arguments.getApplicationUsage()->write(std::cout); return 1; } std::string outputFile; while (arguments.read("-o",outputFile)) {} osg::ref_ptr<osg::TransferFunction1D> transferFunction; std::string tranferFunctionFile; while (arguments.read("--tf",tranferFunctionFile)) { transferFunction = readTransferFunctionFile(tranferFunctionFile); } while (arguments.read("--tf-255",tranferFunctionFile)) { transferFunction = readTransferFunctionFile(tranferFunctionFile,1.0f/255.0f); } while(arguments.read("--test")) { transferFunction = new osg::TransferFunction1D; transferFunction->setColor(0.0, osg::Vec4(1.0,0.0,0.0,0.0)); transferFunction->setColor(0.5, osg::Vec4(1.0,1.0,0.0,0.5)); transferFunction->setColor(1.0, osg::Vec4(0.0,0.0,1.0,1.0)); } while(arguments.read("--test2")) { transferFunction = new osg::TransferFunction1D; transferFunction->setColor(0.0, osg::Vec4(1.0,0.0,0.0,0.0)); transferFunction->setColor(0.5, osg::Vec4(1.0,1.0,0.0,0.5)); transferFunction->setColor(1.0, osg::Vec4(0.0,0.0,1.0,1.0)); transferFunction->assign(transferFunction->getColorMap()); } { // deprecated options bool invalidOption = false; unsigned int numSlices=500; while (arguments.read("-s",numSlices)) { OSG_NOTICE<<"Warning: -s option no longer supported."<<std::endl; invalidOption = true; } float sliceEnd=1.0f; while (arguments.read("--clip",sliceEnd)) { OSG_NOTICE<<"Warning: --clip option no longer supported."<<std::endl; invalidOption = true; } if (invalidOption) return 1; } float xMultiplier=1.0f; while (arguments.read("--xMultiplier",xMultiplier)) {} float yMultiplier=1.0f; while (arguments.read("--yMultiplier",yMultiplier)) {} float zMultiplier=1.0f; while (arguments.read("--zMultiplier",zMultiplier)) {} float alphaFunc=0.02f; while (arguments.read("--alphaFunc",alphaFunc)) {} ShadingModel shadingModel = Standard; while(arguments.read("--mip")) shadingModel = MaximumIntensityProjection; while (arguments.read("--isosurface") || arguments.read("--iso-surface")) shadingModel = Isosurface; while (arguments.read("--light") || arguments.read("-n")) shadingModel = Light; float xSize=0.0f, ySize=0.0f, zSize=0.0f; while (arguments.read("--xSize",xSize)) {} while (arguments.read("--ySize",ySize)) {} while (arguments.read("--zSize",zSize)) {} osg::ref_ptr<TestSupportOperation> testSupportOperation = new TestSupportOperation; viewer.setRealizeOperation(testSupportOperation.get()); viewer.realize(); int maximumTextureSize = testSupportOperation->maximumTextureSize; int s_maximumTextureSize = maximumTextureSize; int t_maximumTextureSize = maximumTextureSize; int r_maximumTextureSize = maximumTextureSize; while(arguments.read("--maxTextureSize",maximumTextureSize)) { s_maximumTextureSize = maximumTextureSize; t_maximumTextureSize = maximumTextureSize; r_maximumTextureSize = maximumTextureSize; } while(arguments.read("--s_maxTextureSize",s_maximumTextureSize)) {} while(arguments.read("--t_maxTextureSize",t_maximumTextureSize)) {} while(arguments.read("--r_maxTextureSize",r_maximumTextureSize)) {} // set up colour space operation. osg::ColorSpaceOperation colourSpaceOperation = osg::NO_COLOR_SPACE_OPERATION; osg::Vec4 colourModulate(0.25f,0.25f,0.25f,0.25f); while(arguments.read("--modulate-alpha-by-luminance")) { colourSpaceOperation = osg::MODULATE_ALPHA_BY_LUMINANCE; } while(arguments.read("--modulate-alpha-by-colour", colourModulate.x(),colourModulate.y(),colourModulate.z(),colourModulate.w() )) { colourSpaceOperation = osg::MODULATE_ALPHA_BY_COLOR; } while(arguments.read("--replace-alpha-with-luminance")) { colourSpaceOperation = osg::REPLACE_ALPHA_WITH_LUMINANCE; } while(arguments.read("--replace-rgb-with-luminance")) { colourSpaceOperation = osg::REPLACE_RGB_WITH_LUMINANCE; } enum RescaleOperation { NO_RESCALE, RESCALE_TO_ZERO_TO_ONE_RANGE, SHIFT_MIN_TO_ZERO }; RescaleOperation rescaleOperation = RESCALE_TO_ZERO_TO_ONE_RANGE; while(arguments.read("--no-rescale")) rescaleOperation = NO_RESCALE; while(arguments.read("--rescale")) rescaleOperation = RESCALE_TO_ZERO_TO_ONE_RANGE; while(arguments.read("--shift-min-to-zero")) rescaleOperation = SHIFT_MIN_TO_ZERO; bool resizeToPowerOfTwo = false; unsigned int numComponentsDesired = 0; while(arguments.read("--num-components", numComponentsDesired)) {} bool useManipulator = false; while(arguments.read("--manipulator") || arguments.read("-m")) { useManipulator = true; } bool useShader = true; while(arguments.read("--shader")) { useShader = true; } while(arguments.read("--no-shader")) { useShader = false; } bool gpuTransferFunction = true; while(arguments.read("--gpu-tf")) { gpuTransferFunction = true; } while(arguments.read("--cpu-tf")) { gpuTransferFunction = false; } double sampleDensityWhenMoving = 0.0; while(arguments.read("--sdwm", sampleDensityWhenMoving)) {} while(arguments.read("--lod")) { sampleDensityWhenMoving = 0.02; } double sequenceLength = 10.0; while(arguments.read("--sequence-duration", sequenceLength) || arguments.read("--sd", sequenceLength)) {} typedef std::list< osg::ref_ptr<osg::Image> > Images; Images images; std::string vh_filename; while (arguments.read("--vh", vh_filename)) { std::string raw_filename, transfer_filename; int xdim(0), ydim(0), zdim(0); osgDB::ifstream header(vh_filename.c_str()); if (header) { header >> raw_filename >> transfer_filename >> xdim >> ydim >> zdim >> xSize >> ySize >> zSize; } if (xdim*ydim*zdim==0) { std::cout<<"Error in reading volume header "<<vh_filename<<std::endl; return 1; } if (!raw_filename.empty()) { images.push_back(readRaw(xdim, ydim, zdim, 1, 1, "little", raw_filename)); } if (!transfer_filename.empty()) { osgDB::ifstream fin(transfer_filename.c_str()); if (fin) { osg::TransferFunction1D::ColorMap colorMap; float value = 0.0; while(fin && value<=1.0) { float red, green, blue, alpha; fin >> red >> green >> blue >> alpha; if (fin) { colorMap[value] = osg::Vec4(red/255.0f,green/255.0f,blue/255.0f,alpha/255.0f); std::cout<<"value = "<<value<<" ("<<red<<", "<<green<<", "<<blue<<", "<<alpha<<")"; std::cout<<" ("<<colorMap[value]<<")"<<std::endl; } value += 1/255.0; } if (colorMap.empty()) { std::cout<<"Error: No values read from transfer function file: "<<transfer_filename<<std::endl; return 0; } transferFunction = new osg::TransferFunction1D; transferFunction->assign(colorMap); } } }
int main( int argc, char **argv ) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); if (argc<2) { std::cout << argv[0] <<": requires filename argument." << std::endl; return 1; } unsigned int numRepeats = 2; if (arguments.read("--repeat",numRepeats) || arguments.read("-r",numRepeats) || arguments.read("--repeat") || arguments.read("-r")) { bool sharedModel = arguments.read("--shared"); bool enableVBO = arguments.read("--vbo"); osg::ref_ptr<osg::Node> model; if (sharedModel) { model = osgDB::readNodeFiles(arguments); if (!model) return 0; if (enableVBO) { EnableVBOVisitor enableVBOs; model->accept(enableVBOs); } } osgViewer::Viewer::ThreadingModel threadingModel = osgViewer::Viewer::AutomaticSelection; while (arguments.read("-s")) { threadingModel = osgViewer::Viewer::SingleThreaded; } while (arguments.read("-g")) { threadingModel = osgViewer::Viewer::CullDrawThreadPerContext; } while (arguments.read("-d")) { threadingModel = osgViewer::Viewer::DrawThreadPerContext; } while (arguments.read("-c")) { threadingModel = osgViewer::Viewer::CullThreadPerCameraDrawThreadPerContext; } for(unsigned int i=0; i<numRepeats; ++i) { osg::notify(osg::NOTICE)<<"+++++++++++++ New viewer ++++++++++++"<<std::endl; { osgViewer::Viewer viewer; viewer.setThreadingModel(threadingModel); if (sharedModel) viewer.setSceneData(model.get()); else { osg::ref_ptr<osg::Node> node = osgDB::readNodeFiles(arguments); if (!node) return 0; if (enableVBO) { EnableVBOVisitor enableVBOs; node->accept(enableVBOs); } viewer.setSceneData(node.get()); } viewer.run(); } osg::notify(osg::NOTICE)<<"------------ Viewer ended ----------"<<std::endl<<std::endl; } return 0; } std::string pathfile; osg::ref_ptr<osgGA::AnimationPathManipulator> apm = 0; while (arguments.read("-p",pathfile)) { apm = new osgGA::AnimationPathManipulator(pathfile); if (!apm.valid() || !(apm->valid()) ) { apm = 0; } } osgViewer::Viewer viewer(arguments); while (arguments.read("-s")) { viewer.setThreadingModel(osgViewer::Viewer::SingleThreaded); } while (arguments.read("-g")) { viewer.setThreadingModel(osgViewer::Viewer::CullDrawThreadPerContext); } while (arguments.read("-d")) { viewer.setThreadingModel(osgViewer::Viewer::DrawThreadPerContext); } while (arguments.read("-c")) { viewer.setThreadingModel(osgViewer::Viewer::CullThreadPerCameraDrawThreadPerContext); } bool limitNumberOfFrames = false; unsigned int maxFrames = 10; while (arguments.read("--run-till-frame-number",maxFrames)) { limitNumberOfFrames = true; } // alternative viewer window setups. while (arguments.read("-1")) { singleWindowMultipleCameras(viewer); } while (arguments.read("-2")) { multipleWindowMultipleCameras(viewer, false); } while (arguments.read("-3")) { multipleWindowMultipleCameras(viewer, true); } if (apm.valid()) viewer.setCameraManipulator(apm.get()); else viewer.setCameraManipulator( new osgGA::TrackballManipulator() ); viewer.addEventHandler(new osgViewer::StatsHandler); viewer.addEventHandler(new osgViewer::ThreadingHandler); std::string configfile; while (arguments.read("--config", configfile)) { osg::notify(osg::NOTICE)<<"Trying to read config file "<<configfile<<std::endl; osg::ref_ptr<osg::Object> object = osgDB::readObjectFile(configfile); osgViewer::View* view = dynamic_cast<osgViewer::View*>(object.get()); if (view) { osg::notify(osg::NOTICE)<<"Read config file succesfully"<<std::endl; } else { osg::notify(osg::NOTICE)<<"Failed to read config file : "<<configfile<<std::endl; return 1; } } while (arguments.read("--write-config", configfile)) { osgDB::writeObjectFile(viewer, configfile); } if (arguments.read("-m")) { ModelHandler* modelHandler = new ModelHandler; for(int i=1; i<arguments.argc();++i) { modelHandler->add(arguments[i]); } viewer.addEventHandler(modelHandler); } else { // load the scene. osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFiles(arguments); if (!loadedModel) loadedModel = osgDB::readNodeFile("cow.osgt"); if (!loadedModel) { std::cout << argv[0] <<": No data loaded." << std::endl; return 1; } viewer.setSceneData(loadedModel.get()); } viewer.realize(); unsigned int numFrames = 0; while(!viewer.done() && !(limitNumberOfFrames && numFrames>=maxFrames)) { viewer.frame(); ++numFrames; } return 0; }