bool consoleFirstInput (int argc, char *argv[], evg::SrcVideo& _inputVideo, std::string& _outputVideoPath, DetectorType& _detectorType, const DetectorType defaultDetectorType, TrackerType& _trackerType, const TrackerType defaultTrackerType, double& _redetectionTime) { try { ValuesConstraint<string> allowedDetectorVals( makeAllowedDetectorVals() ); ValuesConstraint<string> allowedTrackerVals( makeAllowedTrackerVals() ); // make parsing construction CmdLine cmd("Set/reset all parameters", ' ', "0"); ValueArg<string> cmdDetector ("d", "detector", "number or name of detector.", false, "default", &allowedDetectorVals, cmd); ValueArg<string> cmdTracker ("t", "tracker", "number or name of tracker", false, "default", &allowedTrackerVals, cmd); ValueArg<double> cmdPeriod ("p", "period", "redetection period, in range [0; 4]", false, 2., "double", cmd); ValueArg<string> cmdInput ("i", "input", "input video file path", false, "", "string"); SwitchArg cmdCamera ("c", "camera", "indicate when using camera", false); cmd.xorAdd( cmdInput, cmdCamera ); ValueArg<string> cmdOutput ("o", "output", "output video path", false, "", "string", cmd); // parse cmd.parse(argc, argv); // parse inputVideo if (cmdCamera.isSet()) _inputVideo = evg::SrcVideo(evg::SrcVideo::CAMERA, ""); else if (cmdInput.isSet()) _inputVideo = evg::SrcVideo(evg::SrcVideo::FILE, cmdInput.getValue()); else assert(0); // parse outputVideoPath _outputVideoPath = cmdOutput.getValue(); // parse detectorType if (cmdDetector.isSet()) { if (cmdDetector.getValue() == "default") _detectorType = defaultDetectorType; else if (cmdDetector.getValue() == "1" || cmdDetector.getValue() == "opencv") _detectorType = OPENCV_DETECTOR; else if (cmdDetector.getValue() == "2" || cmdDetector.getValue() == "kyle") _detectorType = KYLE_DETECTOR; else assert(0); } else _detectorType = defaultDetectorType; // parse trackerType if (cmdTracker.isSet()) { if (cmdTracker.getValue() == "default") _trackerType = defaultTrackerType; else if (cmdTracker.getValue() == "1" || cmdTracker.getValue() == "optical_flow") _trackerType = OPTICAL_FLOW_TRACKER; else if (cmdTracker.getValue() == "2" || cmdTracker.getValue() == "camshift") _trackerType = CAMSHIFT_TRACKER; else if (cmdTracker.getValue() == "3" || cmdTracker.getValue() == "compressive") _trackerType = COMPRESSIVE_TRACKER; else if (cmdTracker.getValue() == "4" || cmdTracker.getValue() == "kyle") _trackerType = KYLE_TRACKER; else assert(0); } else _trackerType = defaultTrackerType; // parse redetection time if (cmdPeriod.isSet()) _redetectionTime = cmdPeriod.getValue(); else _redetectionTime = 2.; return 1; } catch (...) { std::cerr << "consoleFirstInput(): exception caught" << std::endl; return 0; } }
bool consoleInput (const std::vector<std::string>& _args, evg::SrcVideo& _inputVideo, std::string& _outputVideoPath, DetectorType& _detectorType, const DetectorType defaultDetectorType, TrackerType& _trackerType, const TrackerType defaultTrackerType, double& _redetectionTime, bool& _restart) { try { ValuesConstraint<string> allowedDetectorVals( makeAllowedDetectorVals() ); ValuesConstraint<string> allowedTrackerVals( makeAllowedTrackerVals() ); // make parsing construction // 4th arg 'false' disables help. Help results in exit in TCLAP CmdLine cmd("Set/reset all parameters", ' ', "0", false); ValueArg<string> cmdDetector ("d", "detector", "number or name of detector.", false, "default", &allowedDetectorVals, cmd); ValueArg<string> cmdTracker ("t", "tracker", "number or name of tracker", false, "default", &allowedTrackerVals, cmd); ValueArg<double> cmdPeriod ("p", "period", "redetection period, in range [0; 4]", false, 2., "double", cmd); ValueArg<string> cmdInput ("i", "input", "input video file path", false, "", "string", cmd); SwitchArg cmdCamera ("c", "camera", "indicate when using camera", cmd, false); ValueArg<string> cmdOutput ("o", "output", "output video path", false, "", "string", cmd); SwitchArg cmdRestart ("r", "restart", "restart the video", cmd, false); // parse // for some reason TCLAP does not have the const qualifier std::vector<std::string> args = _args; cmd.parse(args); // parse restart if (cmdRestart.isSet()) _restart = true; else _restart = false; // parse inputVideo // if input method is set, we restart if (cmdCamera.isSet() || cmdInput.isSet()) _restart = true; // set the new input methd if (cmdCamera.isSet()) _inputVideo = evg::SrcVideo(evg::SrcVideo::CAMERA, ""); else if (cmdInput.isSet()) _inputVideo = evg::SrcVideo(evg::SrcVideo::FILE, cmdInput.getValue()); // no way to set mutually exclusive but non-required arguments => check manually if (cmdCamera.isSet() && cmdInput.isSet()) { cerr << "Input from both camera and file cannot be specified." << endl; return 0; } // parse outputVideoPath if (cmdOutput.isSet()) _outputVideoPath = cmdOutput.getValue(); // parse detectorType if (cmdDetector.isSet()) { if (cmdDetector.getValue() == "default") _detectorType = defaultDetectorType; else if (cmdDetector.getValue() == "1" || cmdDetector.getValue() == "opencv") _detectorType = OPENCV_DETECTOR; else if (cmdDetector.getValue() == "2" || cmdDetector.getValue() == "kyle") _detectorType = KYLE_DETECTOR; else assert(0); } // parse trackerType if (cmdTracker.isSet()) { if (cmdTracker.getValue() == "default") _trackerType = defaultTrackerType; else if (cmdTracker.getValue() == "1" || cmdTracker.getValue() == "optical_flow") _trackerType = OPTICAL_FLOW_TRACKER; else if (cmdTracker.getValue() == "2" || cmdTracker.getValue() == "camshift") _trackerType = CAMSHIFT_TRACKER; else if (cmdTracker.getValue() == "3" || cmdTracker.getValue() == "compressive") _trackerType = COMPRESSIVE_TRACKER; else if (cmdTracker.getValue() == "4" || cmdTracker.getValue() == "kyle") _trackerType = KYLE_TRACKER; else assert(0); } // parse redetection time if (cmdPeriod.isSet()) _redetectionTime = cmdPeriod.getValue(); return 1; } catch (...) { std::cerr << "consoleInput(): exception caught" << std::endl; return 0; } }
void parseOptions ( int argc, char** argv ) { try { CmdLine cmd ( "keypoints", ' ', kVersion ); MyOutput my; cmd.setOutput ( &my ); SwitchArg aArgFullScale ( "","fullscale", "Uses full scale image to detect keypoints (default:false)\n", false ); // SURF has a better performance than the other descriptors, use it by default, if it is enabled ValueArg<int> aArgSurfScoreThreshold ( "","surfscore", "Detection score threshold (default : 1000)\n", false, 1000, "int" ); ValueArg<int> aArgSieve1Width ( "","sievewidth", "Interest point sieve: Number of buckets on width (default : 10)", false, 10, "int" ); ValueArg<int> aArgSieve1Height ( "","sieveheight", "Interest point sieve : Number of buckets on height (default : 10)", false, 10, "int" ); ValueArg<int> aArgSieve1Size ( "","sievesize", "Interest point sieve : Max points per bucket (default : 10)\n", false, 10, "int" ); ValueArg<std::string> aArgOutputFormat ( "","format", "Output format (text, autopano-xml, descperf), default text\n", false, "text", "string" ); ValueArg<std::string> aArgOutputFile ( "o","output", "Output file. If not specified, print to standard out\n", false, "", "string" ); SwitchArg aArgInterestPoints ( "","interestpoints", "output only the interest points and the scale (default:false)\n", false ); ValueArg<std::string> aArgFixedInterestPoint ( "","ip", "Compute descriptor at x,y,scale,ori \n", false, "", "string" ); cmd.add ( aArgSurfScoreThreshold ); cmd.add ( aArgFullScale ); cmd.add ( aArgSieve1Width ); cmd.add ( aArgSieve1Height ); cmd.add ( aArgSieve1Size ); cmd.add ( aArgOutputFormat ); cmd.add ( aArgOutputFile ); cmd.add ( aArgInterestPoints ); cmd.add ( aArgFixedInterestPoint ); /* SwitchArg aArgTest("t","test", "Enables test mode\n", false); cmd.add( aArgTest ); */ UnlabeledMultiArg<string> aArgFiles ( "fileName", "Image files", true, "string" ); cmd.add ( aArgFiles ); cmd.parse ( argc,argv ); // // Set variables // vector<string> aFiles = aArgFiles.getValue(); if ( aFiles.size() != 1 ) { exit ( 1 ); } double surfScoreThreshold=1000; if ( aArgSurfScoreThreshold.isSet() ) { surfScoreThreshold = ( aArgSurfScoreThreshold.getValue() ); } bool downscale = true; if ( aArgFullScale.isSet() ) { downscale = false; } int sieveWidth = 10; if ( aArgSieve1Width.isSet() ) { sieveWidth = aArgSieve1Width.getValue(); } int sieveHeight = 10; if ( aArgSieve1Height.isSet() ) { sieveHeight = aArgSieve1Height.getValue(); } int sieveSize = 10; if ( aArgSieve1Size.isSet() ) { sieveSize = aArgSieve1Size.getValue(); } bool onlyInterestPoints = false; if ( aArgInterestPoints.isSet() ) { onlyInterestPoints = true; } std::ostream* outstream; if ( aArgOutputFile.isSet() ) { outstream = new std::ofstream(aArgOutputFile.getValue().c_str()); } else { outstream = & std::cout; } KeypointWriter* writer = 0; std::string outputformat = "text"; if ( aArgOutputFormat.isSet() ) { outputformat = aArgOutputFormat.getValue(); } if (outputformat == "text") { writer = new SIFTFormatWriter(*outstream); } else if (outputformat == "autopano-sift-xml") { writer = new AutopanoSIFTWriter(*outstream); } else if (outputformat == "descperf") { writer = new DescPerfFormatWriter(*outstream); } else { std::cerr << "Unknown output format, valid values are text, autopano-sift-xml, descperf" << std::endl; exit(1); } KeyPointPtr preKPPtr; if ( aArgFixedInterestPoint.isSet() ) { preKPPtr = KeyPointPtr(new KeyPoint()); preKPPtr->_x = -10001; preKPPtr->_ori = -10001; int nf = sscanf(aArgFixedInterestPoint.getValue().c_str(), "%lf:%lf:%lf:%lf", &(preKPPtr->_x), &(preKPPtr->_y), &(preKPPtr->_scale), &(preKPPtr->_ori)); std::cerr << "passed orientation: " << preKPPtr->_ori << std::endl; if (nf < 3) { std::cerr << "Invalid value for --ip option, expected --ip x:y:scale:ori" << std::endl; exit(1); } } DetectKeypoints ( aFiles[0], downscale, surfScoreThreshold, preKPPtr, onlyInterestPoints, sieveWidth, sieveHeight, sieveSize, *writer ); if ( aArgOutputFile.isSet() ) { delete outstream; } } catch ( ArgException& e ) { cout << "ERROR: " << e.error() << " " << e.argId() << endl; } }