Ejemplo n.º 1
0
	void createPipeline( MopedPipeline &pipeline ) {
		
		pipeline.addAlg( "UNDISTORTED_IMAGE", new UTIL_UNDISTORT );
		
		//FEAT_SURF_GPU( Float Threshold ) 
		//pipeline.addAlg( "SURF", new FEAT_SURF_GPU(64) );
		
		// FEAT_SIFT_GPU( string ScaleOrigin, string Verbosity, string GPUDisplay ) 
		//pipeline.addAlg( "SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0") );

#ifdef PERFORMANCE_TESTING_GPU 
		pipeline.addAlg( "SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0") );

#else
#ifdef PERFORMANCE_TESTING_CPU
		pipeline.addAlg( "SIFT", new FEAT_SIFT_CPU("-1") );
#else
		pipeline.addAlg( "SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0") );
#endif
#endif
		// FEAT_SIFT_CPU( string ScaleOrigin ) 

		
		//pipeline.addAlg( "FEAT_DISPLAY", new FEAT_DISPLAY( 2 ) );

		//MATCH_ANN_CPU( int DescriptorSize, string DescriptorType, float Quality, float Ratio )
		pipeline.addAlg( "MATCH_SIFT", new MATCH_ANN_CPU( 128, "SIFT", 5., 0.8) );
		//pipeline.addAlg( "MATCH_SURF", new MATCH_ANN_CPU(  64, "SURF", 5., 0.85) );
		//MATCH_FLANN_CPU( int DescriptorSize, string DescriptorType, float Precision, float Ratio )
		//pipeline.addAlg( "MATCH_SIFT", new MATCH_FLANN_CPU( 128, "SIFT", 0.01, 0.85) );
		//pipeline.addAlg( "MATCH_SURF", new MATCH_FLANN_CPU(  64, "SURF", 0.01, 0.85) );
		//pipeline.addAlg( "MATCH_SURF", new MATCH_FLANN_CPU(  64, "SURF", 0.8, 0.85) );
		//MATCH_HASH_CPU( int DescriptorSize, string DescriptorType, int NHashTables, int NDims, float Ratio, char Threshold, float SizeLimit )
		//pipeline.addAlg( "MATCH_SIFT", new MATCH_HASH_CPU( 128, "SIFT128", 64, 22, 0.8, 3, 16) );
		//MATCH_HASH_CPU( int DescriptorSize, string DescriptorType, int NHashTables, int NDims, float Ratio, char Threshold, float SizeLimit )
		//pipeline.addAlg( "MATCH_SURF", new MATCH_HASH_CPU( 64, "SURF", 64, 22, 0.8, 3, 16) );
		
		
		//MATCH_BRUTE_CPU( int DescriptorSize, string DescriptorType, float Ratio )
		//pipeline.addAlg( "CHECK_MATCH_SIFT128", new MATCH_BRUTE_CPU( 128, "SIFT128", 0.9) );
		
		//NO-DISP		pipeline.addAlg( "MATCH_DISPLAY", new MATCH_DISPLAY( DEFAULT_DISPLAY_LEVEL ) );

		//CLUSTER_MEAN_SHIFT_CPU( float Radius, float Merge, unsigned int MinPts, unsigned int MaxIterations ) 
		pipeline.addAlg( "CLUSTER", new CLUSTER_MEAN_SHIFT_CPU( 200, 20, 7, 100) );
		//NO-DISP		pipeline.addAlg( "CLUSTER_DISPLAY", new CLUSTER_DISPLAY( DEFAULT_DISPLAY_LEVEL ) );
		
		//POSE_RANSAC_LM_DIFF_CPU( int MaxRANSACTests, int MaxLMTests, int NPtsAlign, int MinNPtsObject, float ErrorThreshold )
		//pipeline.addAlg( "POSE", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU( 24, 200, 5, 6, 100) );
		
		
		//POSE_RANSAC_LBFGS_REPROJECTION_CPU( int MaxRANSACTests, int MaxOptTests, int NObjectsCluster, int NPtsAlign, int MinNPtsObject, float ErrorThreshold )
		//pipeline.addAlg( "POSE", new POSE_RANSAC_LBFGS_REPROJECTION_CPU( 250, 200, 4, 5, 6, 10) );
		pipeline.addAlg( "POSE", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU( 600, 200, 4, 5, 6, 10) );
		
		//NO-DISP		pipeline.addAlg( "POSE_DISPLAY", new POSE_DISPLAY( DEFAULT_DISPLAY_LEVEL ) );
		
		//FILTER_PROJECTION_CPU( int MinPoints, float FeatureDistance )
		pipeline.addAlg( "FILTER", new FILTER_PROJECTION_CPU( 5, 4096., 2) );
		
		//pipeline.addAlg( "POSE2", new POSE_RANSAC_LBFGS_REPROJECTION_CPU( 24, 500, 4, 6, 8, 2) );
		pipeline.addAlg( "POSE2", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU( 100, 500, 4, 6, 8, 5) );
		
		pipeline.addAlg( "FILTER2", new FILTER_PROJECTION_CPU( 7, 4096., 3) );

#ifndef PERFORMANCE_TESTING		
		//
		//pipeline.addAlg( "POSE2_DISPLAY", new POSE_DISPLAY( DEFAULT_DISPLAY_LEVEL ) );
#endif
		
		//		
		pipeline.addAlg( "STATUS_DISPLAY", new STATUS_DISPLAY( DEFAULT_DISPLAY_LEVEL ) );
		
#ifndef PERFORMANCE_TESTING
		//		
		//pipeline.addAlg( "GLOBAL_DISPLAY", new GLOBAL_DISPLAY( 2 ) );
#endif
	}
Ejemplo n.º 2
0
	void createPipeline( MopedPipeline &pipeline ) {
 pipeline.addAlg( "UNDISTORTED_IMAGE", new UTIL_UNDISTORT );
 pipeline.addAlg("DEPTHFILL", new DEPTH_FILL_EXACT_CPU(8, false));
 pipeline.addAlg( "SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0") );
 pipeline.addAlg( "DEPTHFILTER", new DEPTHFILTER_CPU(64, 0.050000, 1));
 pipeline.addAlg( "MATCH_SIFT", new MATCH_ADAPTIVE_FLANN_CPU( 128, "SIFT", 8, 0.600000, 0.750000, 0.650000, 0.800000, 150, 50));
 pipeline.addAlg( "DEPTHFILTER2", new DEPTHFILTER_CPU(64, 0.010000, 2));
 pipeline.addAlg("DEPTHPROP", new DEPTHMAP_PROP_CPU());
 pipeline.addAlg( "CLUSTER", new CLUSTER_LINKAGE_CPU( 0.100000, 7, 2, 1, 0.0, 1, -1, -1));
 pipeline.addAlg( "POSE", new POSE_RANSAC_LM_DIFF_BACKPROJECTION_DEPTH_CPU( 192, 100, 4, 5, 6, 8, 0.5));
 pipeline.addAlg( "FILTER", new FILTER_PROJECTION_CPU( 6, 4096., 2) );
 pipeline.addAlg( "POSE2", new POSE_RANSAC_LM_DIFF_BACKPROJECTION_DEPTH_CPU( 64, 250, 4, 6, 8, 5, 0.5));
 pipeline.addAlg( "FILTER2", new FILTER_PROJECTION_CPU( 8, 8192., 1e-4) );
	}
Ejemplo n.º 3
0
    void createPipeline(MopedPipeline &pipeline)
    {
        pipeline.addAlg("UNDISTORTED_IMAGE", new UTIL_UNDISTORT);


        // Feature Detector:
        // SURF_GPU, SIFT_GPU, or SIFT_CPU

        //FEAT_SURF_GPU(Float Threshold) 
        //pipeline.addAlg("SURF", new FEAT_SURF_GPU(64));
		
        // FEAT_SIFT_GPU(string ScaleOrigin, string Verbosity, string GPUDisplay) 
        //pipeline.addAlg("SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0"));

#ifdef USE_GPU 
        pipeline.addAlg("SIFT", new FEAT_SIFT_GPU("-1", "0", ":0.0"));
#else
#ifdef USE_CPU
        pipeline.addAlg("SIFT", new FEAT_SIFT_CPU("-1"));
#else
        // Use SIFT_CPU by default:
        pipeline.addAlg("SIFT", new FEAT_SIFT_CPU("-1"));
#endif
#endif
        // FEAT_SIFT_CPU(string ScaleOrigin) 


        // FEAT_DISPLAY:
        // 1 = print number of detected features to the terminal
        // 2 = display the camera image in a window, overlayed with feature points
        //pipeline.addAlg("FEAT_DISPLAY", new FEAT_DISPLAY(2));


        //MATCH_ANN_CPU(int DescriptorSize, string DescriptorType, float Quality, float Ratio)
        pipeline.addAlg("MATCH_SIFT", new MATCH_ANN_CPU(128, "SIFT", 5., 0.8));
        //pipeline.addAlg("MATCH_SURF", new MATCH_ANN_CPU(64, "SURF", 5., 0.85));

        //MATCH_FLANN_CPU(int DescriptorSize, string DescriptorType, float Precision, float Ratio)
        //pipeline.addAlg("MATCH_SIFT", new MATCH_FLANN_CPU(128, "SIFT", 0.01, 0.85));
        //pipeline.addAlg("MATCH_SURF", new MATCH_FLANN_CPU( 64, "SURF", 0.01, 0.85));
        //pipeline.addAlg("MATCH_SURF", new MATCH_FLANN_CPU( 64, "SURF", 0.8,  0.85));

        //MATCH_HASH_CPU(int DescriptorSize, string DescriptorType, int NHashTables, int NDims, float Ratio, char Threshold, float SizeLimit)
        //pipeline.addAlg("MATCH_SIFT", new MATCH_HASH_CPU(128, "SIFT128", 64, 22, 0.8, 3, 16));
        //MATCH_HASH_CPU(int DescriptorSize, string DescriptorType, int NHashTables, int NDims, float Ratio, char Threshold, float SizeLimit)
        //pipeline.addAlg("MATCH_SURF", new MATCH_HASH_CPU(64, "SURF", 64, 22, 0.8, 3, 16));

        //MATCH_BRUTE_CPU(int DescriptorSize, string DescriptorType, float Ratio)
        //pipeline.addAlg("CHECK_MATCH_SIFT128", new MATCH_BRUTE_CPU(128, "SIFT128", 0.9));


        // MATCH_DISPLAY:
        // 1 = print number of feature matches to the terminal
        // 2 = display the camera image in a window, overlayed with matched points
        //pipeline.addAlg("MATCH_DISPLAY", new MATCH_DISPLAY(2));


        //CLUSTER_MEAN_SHIFT_CPU(float Radius, float Merge, unsigned int MinPts, unsigned int MaxIterations) 
        pipeline.addAlg("CLUSTER", new CLUSTER_MEAN_SHIFT_CPU(200, 20, 7, 100));

        // CLUSTER_DISPLAY:
        // 1 = print number of found clusters to the terminal
        // 2 = display the camera image in a window, overlayed with cluster boundaries
        //pipeline.addAlg("CLUSTER_DISPLAY", new CLUSTER_DISPLAY(2));
		

        //POSE_RANSAC_LM_DIFF_CPU(int MaxRANSACTests, int MaxLMTests, int NPtsAlign, int MinNPtsObject, float ErrorThreshold)
        //pipeline.addAlg("POSE", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU(24, 200, 5, 6, 100));


        //POSE_RANSAC_LBFGS_REPROJECTION_CPU(int MaxRANSACTests, int MaxOptTests, int NObjectsCluster, int NPtsAlign, int MinNPtsObject, float ErrorThreshold)
        //pipeline.addAlg("POSE", new POSE_RANSAC_LBFGS_REPROJECTION_CPU(250, 200, 4, 5, 6, 10));
        pipeline.addAlg("POSE", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU(600, 200, 4, 5, 6, 10));

        // POSE_DISPLAY:
        // 1 = print number of object pose hypotheses to the terminal
        // 2 = display the camera image in a window, overlayed with multiple pose hypotheses
        //pipeline.addAlg("POSE_DISPLAY", new POSE_DISPLAY(2));


        //FILTER_PROJECTION_CPU(int MinPoints, float FeatureDistance)
        pipeline.addAlg("FILTER", new FILTER_PROJECTION_CPU(5, 4096., 2));

        //pipeline.addAlg("POSE2", new POSE_RANSAC_LBFGS_REPROJECTION_CPU(24, 500, 4, 6, 8, 2));
        pipeline.addAlg("POSE2", new POSE_RANSAC_LM_DIFF_REPROJECTION_CPU(100, 500, 4, 6, 8, 5));

        pipeline.addAlg("FILTER2", new FILTER_PROJECTION_CPU(7, 4096., 3));


        // POSE_DISPLAY 2:
        // 1 = print number of detected objects to the terminal
        // 2 = display the camera image in a window, overlayed with object wireframes
        //pipeline.addAlg("POSE2_DISPLAY", new POSE_DISPLAY(2));


        // STATUS_DISPLAY:
        // 1 = print frameData times to the terminal
        pipeline.addAlg("STATUS_DISPLAY", new STATUS_DISPLAY(1));


        // GLOBAL_DISPLAY:
        // <int> argument does not do anything.
        // Display a window containing views of various steps of the pipeline
        // and a graph of the processing load at each step.
        //pipeline.addAlg("GLOBAL_DISPLAY", new GLOBAL_DISPLAY(0));

    }