void read(const FileNode& fn) { clear(); read_params(fn["training_params"]); fn["weights"] >> weights; fn["means"] >> means; FileNode cfn = fn["covs"]; FileNodeIterator cfn_it = cfn.begin(); int i, n = (int)cfn.size(); covs.resize(n); for( i = 0; i < n; i++, ++cfn_it ) (*cfn_it) >> covs[i]; decomposeCovs(); computeLogWeightDivDet(); }
bool HaarEvaluator::Feature :: read( const FileNode& node ) { FileNode rnode = node[CC_RECTS]; FileNodeIterator it = rnode.begin(), it_end = rnode.end(); int ri; for( ri = 0; ri < RECT_NUM; ri++ ) { rect[ri].r = Rect(); rect[ri].weight = 0.f; } for(ri = 0; it != it_end; ++it, ri++) { FileNodeIterator it2 = (*it).begin(); it2 >> rect[ri].r.x >> rect[ri].r.y >> rect[ri].r.width >> rect[ri].r.height >> rect[ri].weight; } tilted = (int)node[CC_TILTED] != 0; return true; }
bool ChessCalibration::loadExtrinsics() { FileStorage fs(calibrationExtrinsicsFilePath, FileStorage::READ); if (!fs.isOpened()) return false; imagePoints.clear(); found = true; fs["rvec"] >> rvec; fs["tvec"] >> tvec; FileNode features = fs["features"]; for(FileNodeIterator it = features.begin(); it != features.end(); it++) { vector<Point2f> cur; (*it) >> cur; imagePoints.push_back(cur[0]); } fs["mapping"] >> mapping; cout << "Extrinsics loaded from file" << endl; // createTransformImageExtrinsicMap(camMat); // fs.release(); // FileStorage fs2(calibrationExtrinsicsFilePath, FileStorage::APPEND); // fs2 << "mapping" << mapping; return true; }
void Calibration::load(string filename, bool absolute) { imagePoints.clear(); FileStorage fs(ofToDataPath(filename, absolute), FileStorage::READ); cv::Size imageSize, sensorSize; Mat cameraMatrix; fs["cameraMatrix"] >> cameraMatrix; fs["imageSize_width"] >> imageSize.width; fs["imageSize_height"] >> imageSize.height; fs["sensorSize_width"] >> sensorSize.width; fs["sensorSize_height"] >> sensorSize.height; fs["distCoeffs"] >> distCoeffs; fs["reprojectionError"] >> reprojectionError; FileNode features = fs["features"]; for(FileNodeIterator it = features.begin(); it != features.end(); it++) { vector<Point2f> cur; (*it) >> cur; imagePoints.push_back(cur); } addedImageSize = imageSize; distortedIntrinsics.setup(cameraMatrix, imageSize, sensorSize); updateUndistortion(); }
/**Reads board info from a file */ void BoardConfiguration::readFromFile(cv::FileStorage &fs) throw(cv::Exception) { int aux = 0; // look for the nmarkers if (fs["aruco_bc_nmarkers"].name() != "aruco_bc_nmarkers") throw cv::Exception(81818, "BoardConfiguration::readFromFile", "invalid file type", __FILE__, __LINE__); fs["aruco_bc_nmarkers"] >> aux; resize(aux); fs["aruco_bc_mInfoType"] >> mInfoType; cv::FileNode markers = fs["aruco_bc_markers"]; int i = 0; for (FileNodeIterator it = markers.begin(); it != markers.end(); ++it, i++) { at(i).id = (*it)["id"]; FileNode FnCorners = (*it)["corners"]; for (FileNodeIterator itc = FnCorners.begin(); itc != FnCorners.end(); ++itc) { vector< float > coordinates3d; (*itc) >> coordinates3d; if (coordinates3d.size() != 3) throw cv::Exception(81818, "BoardConfiguration::readFromFile", "invalid file type 3", __FILE__, __LINE__); cv::Point3f point(coordinates3d[0], coordinates3d[1], coordinates3d[2]); at(i).push_back(point); } } }
int CV_MLBaseTest::read_params( CvFileStorage* __fs ) { FileStorage _fs(__fs, false); if( !_fs.isOpened() ) test_case_count = -1; else { FileNode fn = _fs.getFirstTopLevelNode()["run_params"][modelName]; test_case_count = (int)fn.size(); if( test_case_count <= 0 ) test_case_count = -1; if( test_case_count > 0 ) { dataSetNames.resize( test_case_count ); FileNodeIterator it = fn.begin(); for( int i = 0; i < test_case_count; i++, ++it ) { dataSetNames[i] = (string)*it; } } } return cvtest::TS::OK;; }
static bool readStringList( const string& filename, const string& tempFile, vector<string>& l ) { /* // Immediately generate the file name that we will read from FileStorage fs(tempFile, FileStorage::WRITE); fs << "images" << "["; fs << string(filename); fs << "]"; */ l.resize(0); FileStorage fs(filename, FileStorage::READ); if( !fs.isOpened() ) return false; FileNode n = fs.getFirstTopLevelNode(); if( n.type() != FileNode::SEQ ) return false; FileNodeIterator it = n.begin(), it_end = n.end(); for( ; it != it_end; ++it ) l.push_back((string)*it); return true; }
void CameraCalibration::ReadMonoCalibParams( string &img_xml ) { calib_params = new CalibParams; FileStorage fs(img_xml, FileStorage::READ); // Read the settings if (!fs.isOpened()) { cout << "Could not open the configuration file: \"" << img_xml << "\"" << endl; exit(EXIT_FAILURE); } BoardSize = Size((int)fs["BoardSizeWidth"], (int)fs["BoardSizeHeight"]); SquareSize = (float)fs["SquareSize"]; BoardTexWdith = SquareSize*BoardSize.width; BoardTexHeight = SquareSize*BoardSize.height; FileNode imgs = fs["Images"]; for(FileNodeIterator itr = imgs.begin(); itr != imgs.end(); itr++) calib_params->ImageList.push_back((string)*itr); // Check if all image data have the same size. Mat img = imread(calib_params->ImageList.front(), CV_LOAD_IMAGE_GRAYSCALE); ImageSize = Size(img.rows, img.cols); for(int i=1; i<calib_params->ImageList.size(); i++) { img = imread(calib_params->ImageList.at(i), CV_LOAD_IMAGE_GRAYSCALE); assert(ImageSize == Size(img.rows, img.cols)); ImageSize = Size(img.rows, img.cols); } NumFrames = calib_params->ImageList.size(); fs.release(); }
/* * Run tests to make sure that quantization is working. */ void runQuantTests() { ColonyCounter colonyCounter; colonyCounter.loadTraining("svm_params.yml"); FileStorage fs("samples/tests.yml", FileStorage::READ); FileNode features = fs["tests"]; FileNodeIterator it = features.begin(), it_end = features.end(); int idx = 0; for( ; it != it_end; ++it, idx++ ) { string path; (*it)["path"] >> path; Mat img = imread("samples/" + path); // Find petri img Rect petriRect = findPetriRect(img); Mat petri = img(petriRect); // Preprocess image petri = colonyCounter.preprocessImage(petri); colonyCounter.testQuantization(petri, quants); // Classify image Mat debugImg, debugImgq; colonyCounter.classifyImage(petri, true, &debugImg); colonyCounter.classifyImageQuant(petri, true, &debugImgq, quants); imshow("normal", debugImg); imshow("quant", debugImgq); waitKey(0); } fs.release(); }
int imageDB::readImgInfoMap(const FileStorage& cvfs, const FileNode& node) { imgInfo_map.clear(); releaseImgVoteMap(); int img_id; imageInfo img_info; FileNodeIterator it = node.begin(); while(it != node.end()){ img_id = (int)(*it)[0]; img_info.feature_num = (int)(*it)[1]; img_info.img_size = Size((int)(*it)[2],(int)(*it)[3]); imgInfo_map.insert(pair<int,imageInfo>(img_id,img_info)); // create voteTable vector<featureVote>* voteTable = new vector<featureVote>; imgVote_map.insert(pair<int,vector<featureVote>*>(img_id, voteTable)); it++; } return 0; }
/**Read this from a file */ void Board::readFromFile(string filePath) throw(cv::Exception) { cv::FileStorage fs(filePath, cv::FileStorage::READ); if (fs["aruco_bo_nmarkers"].name() != "aruco_bo_nmarkers") throw cv::Exception(81818, "Board::readFromFile", "invalid file type:", __FILE__, __LINE__); int aux = 0; // look for the nmarkers fs["aruco_bo_nmarkers"] >> aux; resize(aux); fs["aruco_bo_rvec"] >> Rvec; fs["aruco_bo_tvec"] >> Tvec; cv::FileNode markers = fs["aruco_bo_markers"]; int i = 0; for (FileNodeIterator it = markers.begin(); it != markers.end(); ++it, i++) { at(i).id = (*it)["id"]; int ncorners = (*it)["ncorners"]; at(i).resize(ncorners); FileNode FnCorners = (*it)["corners"]; int c = 0; for (FileNodeIterator itc = FnCorners.begin(); itc != FnCorners.end(); ++itc, c++) { vector< float > coordinates2d; (*itc) >> coordinates2d; if (coordinates2d.size() != 2) throw cv::Exception(81818, "Board::readFromFile", "invalid file type 2", __FILE__, __LINE__); cv::Point2f point; point.x = coordinates2d[0]; point.y = coordinates2d[1]; at(i).push_back(point); } } conf.readFromFile(fs); }
void visualize(){ cout << endl << "Reading: " << endl; FileStorage fs("PointCloudFoun.xml ", FileStorage::READ ); //PointCloud_SURF_Foun_1K.xml if (fs.isOpened()) cout<<"File is opened\n"; else cout << "error in opening" << endl; FileNode n = fs["CloudPoint"]; int row; row = (int) (n["rows"]); cout << "Row:" << row << endl; unsigned int count; FileNode nd = n["data"]; vector< double > pointcloud; FileNodeIterator it = nd.begin(), it_end = nd.end(); // Go through the node for (; it != it_end; ++it) { pointcloud.push_back( (double)*it ); // cout << (double)*it; count += 1; } cout << count/3 <<endl; fs.release(); cout << "\n" << pointcloud.size()/3 <<endl; //-------------------- // Visualization //-------------------- pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); std::cout << "Genarating point clouds.\n\n"; cloud.reset(new pcl::PointCloud<pcl::PointXYZ>); for(unsigned int i = 0; i < pointcloud.size() ; i = i+3) { if(isnan( pointcloud[i] ) ) continue; pcl::PointXYZ p; p.x = pointcloud[i]; p.y = pointcloud[i+1]; p.z = pointcloud[i+2]; cloud->push_back(p); //cloud->points.push_back(p); } cloud->width = (int) cloud->points.size(); cloud->height = 1; for (size_t i = 0; i < cloud->points.size (); ++i) std::cerr << " " << cloud->points[i].x << " " << cloud->points[i].y << " " << cloud->points[i].z << std::endl; boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer; viewer = customColourVis(cloud); while (!viewer->wasStopped ()) { viewer->spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } }
static Fields* parseCascade(const FileNode &root, const float mins, const float maxs, const int totals, const int method) { static const char *const SC_STAGE_TYPE = "stageType"; static const char *const SC_BOOST = "BOOST"; static const char *const SC_FEATURE_TYPE = "featureType"; static const char *const SC_ICF = "ICF"; static const char *const SC_ORIG_W = "width"; static const char *const SC_ORIG_H = "height"; static const char *const SC_FEATURE_FORMAT = "featureFormat"; static const char *const SC_SHRINKAGE = "shrinkage"; static const char *const SC_OCTAVES = "octaves"; static const char *const SC_OCT_SCALE = "scale"; static const char *const SC_OCT_WEAKS = "weaks"; static const char *const SC_TREES = "trees"; static const char *const SC_WEAK_THRESHOLD = "treeThreshold"; static const char *const SC_FEATURES = "features"; static const char *const SC_INTERNAL = "internalNodes"; static const char *const SC_LEAF = "leafValues"; static const char *const SC_F_CHANNEL = "channel"; static const char *const SC_F_RECT = "rect"; // only Ada Boost supported std::string stageTypeStr = (std::string)root[SC_STAGE_TYPE]; CV_Assert(stageTypeStr == SC_BOOST); // only HOG-like integral channel features supported std::string featureTypeStr = (std::string)root[SC_FEATURE_TYPE]; CV_Assert(featureTypeStr == SC_ICF); int origWidth = (int)root[SC_ORIG_W]; int origHeight = (int)root[SC_ORIG_H]; std::string fformat = (std::string)root[SC_FEATURE_FORMAT]; bool useBoxes = (fformat == "BOX"); ushort shrinkage = cv::saturate_cast<ushort>((int)root[SC_SHRINKAGE]); FileNode fn = root[SC_OCTAVES]; if (fn.empty()) return 0; std::vector<device::Octave> voctaves; std::vector<float> vstages; std::vector<device::Node> vnodes; std::vector<float> vleaves; FileNodeIterator it = fn.begin(), it_end = fn.end(); for (ushort octIndex = 0; it != it_end; ++it, ++octIndex) { FileNode fns = *it; float scale = powf(2.f,saturate_cast<float>((int)fns[SC_OCT_SCALE])); bool isUPOctave = scale >= 1; ushort nweaks = saturate_cast<ushort>((int)fns[SC_OCT_WEAKS]); ushort2 size; size.x = cvRound(origWidth * scale); size.y = cvRound(origHeight * scale); device::Octave octave(octIndex, nweaks, shrinkage, size, scale); CV_Assert(octave.stages > 0); voctaves.push_back(octave); FileNode ffs = fns[SC_FEATURES]; if (ffs.empty()) return 0; std::vector<cv::Rect> feature_rects; std::vector<int> feature_channels; FileNodeIterator ftrs = ffs.begin(), ftrs_end = ffs.end(); int feature_offset = 0; for (; ftrs != ftrs_end; ++ftrs, ++feature_offset ) { cv::FileNode ftn = (*ftrs)[SC_F_RECT]; cv::FileNodeIterator r_it = ftn.begin(); int x = (int)*(r_it++); int y = (int)*(r_it++); int w = (int)*(r_it++); int h = (int)*(r_it++); if (useBoxes) { if (isUPOctave) { w -= x; h -= y; } } else { if (!isUPOctave) { w += x; h += y; } } feature_rects.push_back(cv::Rect(x, y, w, h)); feature_channels.push_back((int)(*ftrs)[SC_F_CHANNEL]); } fns = fns[SC_TREES]; if (fn.empty()) return false; // for each stage (~ decision tree with H = 2) FileNodeIterator st = fns.begin(), st_end = fns.end(); for (; st != st_end; ++st ) { FileNode octfn = *st; float threshold = (float)octfn[SC_WEAK_THRESHOLD]; vstages.push_back(threshold); FileNode intfns = octfn[SC_INTERNAL]; FileNodeIterator inIt = intfns.begin(), inIt_end = intfns.end(); for (; inIt != inIt_end;) { inIt +=2; int featureIdx = (int)(*(inIt++)); float orig_threshold = (float)(*(inIt++)); unsigned int th = saturate_cast<unsigned int>((int)orig_threshold); cv::Rect& r = feature_rects[featureIdx]; uchar4 rect; rect.x = saturate_cast<uchar>(r.x); rect.y = saturate_cast<uchar>(r.y); rect.z = saturate_cast<uchar>(r.width); rect.w = saturate_cast<uchar>(r.height); unsigned int channel = saturate_cast<unsigned int>(feature_channels[featureIdx]); vnodes.push_back(device::Node(rect, channel, th)); } intfns = octfn[SC_LEAF]; inIt = intfns.begin(), inIt_end = intfns.end(); for (; inIt != inIt_end; ++inIt) { vleaves.push_back((float)(*inIt)); } } } cv::Mat hoctaves(1, (int) (voctaves.size() * sizeof(device::Octave)), CV_8UC1, (uchar*)&(voctaves[0])); CV_Assert(!hoctaves.empty()); cv::Mat hstages(cv::Mat(vstages).reshape(1,1)); CV_Assert(!hstages.empty()); cv::Mat hnodes(1, (int) (vnodes.size() * sizeof(device::Node)), CV_8UC1, (uchar*)&(vnodes[0]) ); CV_Assert(!hnodes.empty()); cv::Mat hleaves(cv::Mat(vleaves).reshape(1,1)); CV_Assert(!hleaves.empty()); Fields* fields = new Fields(mins, maxs, totals, origWidth, origHeight, shrinkage, 0, hoctaves, hstages, hnodes, hleaves, method); fields->voctaves = voctaves; fields->createLevels(DEFAULT_FRAME_HEIGHT, DEFAULT_FRAME_WIDTH); return fields; }
bool fill(const FileNode &root) { // cascade properties static const char *const SC_STAGE_TYPE = "stageType"; static const char *const SC_BOOST = "BOOST"; static const char *const SC_FEATURE_TYPE = "featureType"; static const char *const SC_ICF = "ICF"; static const char *const SC_ORIG_W = "width"; static const char *const SC_ORIG_H = "height"; static const char *const SC_OCTAVES = "octaves"; static const char *const SC_TREES = "trees"; static const char *const SC_FEATURES = "features"; static const char *const SC_INTERNAL = "internalNodes"; static const char *const SC_LEAF = "leafValues"; static const char *const SC_SHRINKAGE = "shrinkage"; static const char *const FEATURE_FORMAT = "featureFormat"; // only Ada Boost supported std::string stageTypeStr = (string)root[SC_STAGE_TYPE]; CV_Assert(stageTypeStr == SC_BOOST); std::string fformat = (string)root[FEATURE_FORMAT]; bool useBoxes = (fformat == "BOX"); // only HOG-like integral channel features supported string featureTypeStr = (string)root[SC_FEATURE_TYPE]; CV_Assert(featureTypeStr == SC_ICF); origObjWidth = (int)root[SC_ORIG_W]; origObjHeight = (int)root[SC_ORIG_H]; shrinkage = (int)root[SC_SHRINKAGE]; FileNode fn = root[SC_OCTAVES]; if (fn.empty()) return false; // for each octave FileNodeIterator it = fn.begin(), it_end = fn.end(); for (int octIndex = 0; it != it_end; ++it, ++octIndex) { FileNode fns = *it; Octave octave(octIndex, cv::Size(origObjWidth, origObjHeight), fns); CV_Assert(octave.weaks > 0); octaves.push_back(octave); FileNode ffs = fns[SC_FEATURES]; if (ffs.empty()) return false; fns = fns[SC_TREES]; if (fn.empty()) return false; FileNodeIterator st = fns.begin(), st_end = fns.end(); for (; st != st_end; ++st ) { weaks.push_back(Weak(*st)); fns = (*st)[SC_INTERNAL]; FileNodeIterator inIt = fns.begin(), inIt_end = fns.end(); for (; inIt != inIt_end;) nodes.push_back(Node(features.size(), inIt)); fns = (*st)[SC_LEAF]; inIt = fns.begin(), inIt_end = fns.end(); for (; inIt != inIt_end; ++inIt) leaves.push_back((float)(*inIt)); } st = ffs.begin(), st_end = ffs.end(); for (; st != st_end; ++st ) features.push_back(Feature(*st, useBoxes)); } return true; }
void System3d::readCalibrationXML(string filename) { if(filename.length() > 5) { if( filename.substr(filename.length()-4) == ".xml" ) { pmdPoints.clear(); webcamPoints.clear(); patternCount = 0; cout << "opening file \"" << filename << "\" in progress..."; FileStorage xmlfile(filename, FileStorage::READ); FileNode chessboardNode = xmlfile["chessboard_pattern_parameters"]; float vertical = (float)chessboardNode["vertical"]; float horizontal = (float)chessboardNode["horizontal"]; FileNode IFMcamNode = xmlfile["detected_corners"]; FileNodeIterator it = IFMcamNode.begin(), it_end = IFMcamNode.end(); int idx = 0; // iterate through a sequence using FileNodeIterator for( ; it != it_end; ++it, idx++ ) { vector<Point2f> pts_pmd; (*it)["IFMcam_corners"] >> pts_pmd; pmdPoints.push_back(pts_pmd); vector<Point2f> pts_web; (*it)["webcam_corners"] >> pts_web; webcamPoints.push_back(pts_web); } xmlfile.release(); patternCount = idx; // write points coordinates cout << "\n*****************************************************\n"; for(int k=0; k<pmdPoints.size(); k++) { cout << "pmd feature #" << k << ": "; cout << "\n( "; for(int i=0; i<pmdPoints[k].size(); i++) { cout << pmdPoints[k][i].x << " " << pmdPoints[k][i].y << " || "; } cout << ")\n"; } // write points coordinates cout << "\n*****************************************************\n"; for(int k=0; k<webcamPoints.size(); k++) { cout << "web feature #" << k << ": "; cout << "\n( "; for(int i=0; i<webcamPoints[k].size(); i++) { cout << webcamPoints[k][i].x << " " << webcamPoints[k][i].y << " || "; } cout << ")\n"; } } else cout << "Error: wrong file name\n";
int main() { //leap motion data Controller controller; controller.setPolicy(Leap::Controller::POLICY_IMAGES); controller.setPolicy(Leap::Controller::POLICY_BACKGROUND_FRAMES); //process variables int updateRate; int frameAmount; //amount of frame to record int counter; bool done; FileStorage fs; vector<string> imagelist; FileNode n; FileNodeIterator it, it_begin, it_end; Size boardSize; int64 t1, t2; double time; Vector slopes_left, slopes_right, position; float cameraZ, cameraY, cameraX; //behavior options Behaviour behaviour; start: //initialization updateRate = 100; frameAmount = 20; //amount of frame to record counter = 0; done = false; imagelist.clear(); time = 0; behaviour = CheckBehaviour(); if (behaviour == Quit) return 0; else { system("cls"); std::cout << "Press 'q' to quit, 'r' to restart (set focus on image window)\n"; } char key = ' '; while (key != 'q' && key != 'r') { key = waitKey(updateRate); //refresh rate //image acquisition Frame frame = controller.frame(); if (!frame.isValid()) { //std::cout << "Frame is Invalid" << std::endl; continue; } ImageList images = frame.images(); if (images.isEmpty() || images.count() == 1) { //std::cout << "imageList.isEmpty()" << std::endl; continue; } Image imageLeft = images[0]; Image imageRight = images[1]; cvImgLeft = Mat(imageLeft.height(), imageLeft.width(), CV_8UC1); cvImgRight = Mat(imageRight.height(), imageRight.width(), CV_8UC1); cvImgLeft.data = (unsigned char*)imageLeft.data(); cvImgRight.data = (unsigned char*)imageRight.data(); //image output imshow("Left image", cvImgLeft); imshow("Right image", cvImgRight); //behaviour check switch (behaviour) { case Show_images: //do nothing break; case Undistort_images: //use Leap Motion distortion map UndistortLeap(imageLeft, "Undistorted left image", true); UndistortLeap(imageRight, "Undistorted right image", true); break; case Calib_image_recording: //record calibration images counter++; //done = imgSave(cvImgLeft, cvImgRight, frameAmount, counter); done = imgSave(UndistortLeap(imageLeft, "Undistorted left image", true), UndistortLeap(imageRight, "Undistorted right image", true), frameAmount, counter); if (done) { system("cls"); std::cout << "Images for calibration recorded!" << "\nPress 'q' to quit, 'r' to restart (set focus on image window)"; behaviour = Show_images; } break; case Stereo_calibration: //read calibration names to list fs.open(calibFileNames, FileStorage::READ); n = fs["strings"]; // Read string sequence - Get node if (n.type() != FileNode::SEQ) { cerr << "strings is not a sequence! FAIL" << endl; behaviour = Show_images; break; } it = n.begin(); it_end = n.end(); // Go through the node for (; it != it_end; ++it) { imagelist.push_back((string)*it); cout << (string)*it << endl; } fs.release(); //provide board size boardSize.width = 9; boardSize.height = 6; //apply calibration, save parameters StereoCalibration(imagelist, boardSize, true, true); //exit calibration, revert to showing images behaviour = Show_images; break; case Hough_circle_transform: //apply calibration //rectify calibrated images //apply threshold //Hough circle transform detection t1 = getTickCount(); TrackingHoughCircles(cvImgLeft); t2 = getTickCount(); time = (t2 - t1) / getTickFrequency(); cout << "\nExecution time (ms): " << time * 1000.0f; break; case Tracking_blobs: //uncalibrated //threshold //simple blob detector t1 = getTickCount(); TrackingBlobs(cvImgLeft); t2 = getTickCount(); time = (t2 - t1) / getTickFrequency(); cout << "\nExecution time (ms): " << time * 1000.0f; break; case Tracking_contours: //uncalibrated //threshold //blur + contours + enclosing circle t1 = getTickCount(); TrackingContours(cvImgLeft); t2 = getTickCount(); time = (t2 - t1) / getTickFrequency(); cout << "\nExecution time (ms): " << time * 1000.0f; break; case Triangulation: //triangulation based on two tracked points and Leap Motion rectify() function t1 = getTickCount(); //get the direction to the centere of object from left and right images //since there is only one tracked object, points should correspond slopes_left = imageLeft.rectify(GetTrackedPoint(imageLeft)); slopes_right = imageRight.rectify(GetTrackedPoint(imageRight)); //Do the triangulation from the rectify() slopes //40 mm camera separation cameraZ = 40 / (slopes_right.x - slopes_left.x); cameraY = cameraZ * slopes_right.y; cameraX = cameraZ * slopes_right.x - 20; position = Vector(cameraX, -cameraZ, cameraY); t2 = getTickCount(); time = (t2 - t1) / getTickFrequency(); cout << "\nPosition: " << position; cout << "\nExecution time (ms): " << time * 1000.0f; break; default: //do nothing break; } } //restart if (key == 'r') { destroyAllWindows(); goto start; } return 0; }
int main(int ac, char** av) { if (ac != 2) { help(av); return 1; } string filename = av[1]; //write { FileStorage fs(filename, FileStorage::WRITE); cout << "writing images\n"; fs << "images" << "["; fs << "image1.jpg" << "myfi.png" << "baboon.jpg"; cout << "image1.jpg" << " myfi.png" << " baboon.jpg" << endl; fs << "]"; cout << "writing mats\n"; Mat R =Mat_<double>::eye(3, 3),T = Mat_<double>::zeros(3, 1); cout << "R = " << R << "\n"; cout << "T = " << T << "\n"; fs << "R" << R; fs << "T" << T; cout << "writing MyData struct\n"; MyData m(1); fs << "mdata" << m; cout << m << endl; } //read { FileStorage fs(filename, FileStorage::READ); if (!fs.isOpened()) { cerr << "failed to open " << filename << endl; help(av); return 1; } FileNode n = fs["images"]; if (n.type() != FileNode::SEQ) { cerr << "images is not a sequence! FAIL" << endl; return 1; } cout << "reading images\n"; FileNodeIterator it = n.begin(), it_end = n.end(); for (; it != it_end; ++it) { cout << (string)*it << "\n"; } Mat R, T; cout << "reading R and T" << endl; fs["R"] >> R; fs["T"] >> T; cout << "R = " << R << "\n"; cout << "T = " << T << endl; MyData m; fs["mdata"] >> m; cout << "read mdata\n"; cout << m << endl; cout << "attempting to read mdata_b\n"; //Show default behavior for empty matrix fs["mdata_b"] >> m; cout << "read mdata_b\n"; cout << m << endl; } cout << "Try opening " << filename << " to see the serialized data." << endl << endl; //read from string { cout << "Read data from string\n"; string dataString = "%YAML:1.0\n" "mdata:\n" " A: 97\n" " X: 3.1415926535897931e+00\n" " id: mydata1234\n"; MyData m; FileStorage fs(dataString, FileStorage::READ | FileStorage::MEMORY); cout << "attempting to read mdata_b from string\n"; //Show default behavior for empty matrix fs["mdata"] >> m; cout << "read mdata\n"; cout << m << endl; } //write to string { cout << "Write data to string\n"; FileStorage fs(filename, FileStorage::WRITE | FileStorage::MEMORY | FileStorage::FORMAT_YAML); cout << "writing MyData struct\n"; MyData m(1); fs << "mdata" << m; cout << m << endl; string createdString = fs.releaseAndGetString(); cout << "Created string:\n" << createdString << "\n"; } return 0; }
int main() { //vars for 120 fps time_duration td, td1; ptime nextFrameTimestamp, currentFrameTimestamp, initialLoopTimestamp, finalLoopTimestamp; int delayFound = 0; //Setting up communication with arduino HANDLE hSerial; hSerial = CreateFile("COM4",GENERIC_READ|GENERIC_WRITE,0,0,OPEN_EXISTING,FILE_ATTRIBUTE_NORMAL,0); if(hSerial==INVALID_HANDLE_VALUE) { if(GetLastError()==ERROR_FILE_NOT_FOUND) { cout<<"File not found"<<endl;//serial port does not exist. Inform user. } cout<<"error :/"<<endl;//some other error occurred. Inform user. } DCB dcbSerialParams = {0}; dcbSerialParams.DCBlength=sizeof(dcbSerialParams); if (!GetCommState(hSerial, &dcbSerialParams)) { printf("error getting state\n"); } dcbSerialParams.BaudRate=CBR_9600; dcbSerialParams.ByteSize=8; dcbSerialParams.StopBits=ONESTOPBIT; dcbSerialParams.Parity=NOPARITY; if(!SetCommState(hSerial, &dcbSerialParams)) { printf("error setting serial port state\n"); } int n=1; DWORD bytesWritten; ifstream infile("..//..//..//data.csv"); //Takes input from basic(GUI) for reading patient data and stores in the data.csv file infile>>patdat; //reading patients first name infile>>patdat1; //reading patients last name infile>>patdat2; //reading patients age infile>>patdat3; //reading patients gender //Snippet for reading camera settings from xml file FileStorage fs; fs.open("file1.xml", FileStorage::READ); FileNode q = fs.root(); for (FileNodeIterator current = q.begin(); current != q.end(); current++) { FileNode item = *current; //cout<<"Success"<<endl; item["Exposure"] >> exposure; item["FPS"] >> framerate; item["Framewidth"] >> framewidth; item["Frameheight"] >> frameheight; //cout << exposure << endl; } VideoCapture capture(0); //Opens the camera of the device connected VideoCapture capture1(1); capture.set(CV_CAP_PROP_FRAME_WIDTH, framewidth); capture.set(CV_CAP_PROP_FRAME_HEIGHT,frameheight); capture.set(CV_CAP_PROP_EXPOSURE, exposure); capture1.set(CV_CAP_PROP_FRAME_WIDTH, framewidth); capture1.set(CV_CAP_PROP_FRAME_HEIGHT,frameheight); capture1.set(CV_CAP_PROP_EXPOSURE, exposure); capture>>image; //Extract a frame and store in image matrix. capture1>>image1; strcpy(fname, "..//..//Videos//"); //declaring path for storing video strcat(fname, patdat); //appending patients first name on the video strcat(fname, "_"); //appending patients name on the video strcat(fname, patdat1); //appending patients last name on the video strcat(fname, "_"); strcat(fname, patdat2); strcat(fname, "_"); strcat(fname, patdat3); strcat(fname, "_"); strcat(fname, "left"); strcat(fname,".avi"); strcpy(fname1, "..//..//Videos//"); //declaring path for storing video strcat(fname1, patdat); //appending patients first name on the video strcat(fname1, "_"); //appending patients name on the video strcat(fname1, patdat1); //appending patients last name on the video strcat(fname1, "_"); strcat(fname1, patdat2); strcat(fname1, "_"); strcat(fname1, patdat3); strcat(fname1, "_"); strcat(fname1, "right"); strcat(fname1,".avi"); int fps1=20; /*Define VideoiWriter object for storing the video*/ VideoWriter video(fname,CV_FOURCC('M','J','P','G'),fps1,cvSize(framewidth, frameheight)); //CV_FOURCC('M','J','P','G') is a motion-jpeg codec VideoWriter video1(fname1,CV_FOURCC('M','J','P','G'),fps1,cvSize(framewidth, frameheight)); // initialize initial timestamps nextFrameTimestamp = microsec_clock::local_time(); currentFrameTimestamp = nextFrameTimestamp; td = (currentFrameTimestamp - nextFrameTimestamp); //Starting timer DWORD start = GetTickCount(); int i=1; for(i=1;;i++) { // wait for X microseconds until 1second/framerate time has passed after previous frame write while(td.total_microseconds() < 1000/framerate) { //determine current elapsed time currentFrameTimestamp = microsec_clock::local_time(); td = (currentFrameTimestamp - nextFrameTimestamp); } // determine time at start of write initialLoopTimestamp = microsec_clock::local_time(); capture>>image; capture1>>image1; Size sz1 = image.size(); Size sz2 = image1.size(); Mat im3(sz1.height, sz1.width+sz2.width, CV_8UC3); Mat left(im3, Rect(0, 0, sz1.width, sz1.height)); image.copyTo(left); Mat right(im3, Rect(sz1.width, 0, sz2.width, sz2.height)); image1.copyTo(right); namedWindow("Video",WINDOW_NORMAL); imshow("Video",im3); // add 1second/framerate time for next loop pause nextFrameTimestamp = nextFrameTimestamp + microsec(1000000/framerate); // reset time_duration so while loop engages td = (currentFrameTimestamp - nextFrameTimestamp); // cout<< (td) <<" "<<endl; //determine and print out delay in ms, should be less than 1000/FPS //occasionally, if delay is larger than said value, correction will occur //if delay is consistently larger than said value, then CPU is not powerful // enough to capture/decompress/record/compress that fast. finalLoopTimestamp = microsec_clock::local_time(); td1 = (finalLoopTimestamp - initialLoopTimestamp); delayFound = td1.total_milliseconds(); //cout << delayFound << endl; //output will be in following format //[TIMESTAMP OF PREVIOUS FRAME] [TIMESTAMP OF NEW FRAME] [TIME DELAY OF WRITING] key1 = waitKey(100); //Capture Keyboard stroke if (char(key1) == 32 ) { break; //If you hit ESC key loop will break and code will terminate } } DWORD elapsed = GetTickCount() - start; cout<<"for the "<<i<<"th frame "<<elapsed/1000<<" seconds are required"<<endl; cout<<"Therefore the framerate is "<<i/(elapsed/1000)<<endl; WriteFile(hSerial, "s",n, &bytesWritten, NULL); cout<<"No of bytes written are: "<<bytesWritten<<endl; cout<<"The test has started"<<endl; start = GetTickCount(); cout<<"Time = "<< start; i=1; for(i=1;;i++) { // wait for X microseconds until 1second/framerate time has passed after previous frame write while(td.total_microseconds() < 1000/framerate) { //determine current elapsed time currentFrameTimestamp = microsec_clock::local_time(); td = (currentFrameTimestamp - nextFrameTimestamp); } // determine time at start of write initialLoopTimestamp = microsec_clock::local_time(); capture>>image; video<<image; capture1>>image1; video1<<image1; Size sz1 = image.size(); Size sz2 = image1.size(); Mat im3(sz1.height, sz1.width+sz2.width, CV_8UC3); Mat left(im3, Rect(0, 0, sz1.width, sz1.height)); image.copyTo(left); Mat right(im3, Rect(sz1.width, 0, sz2.width, sz2.height)); image1.copyTo(right); namedWindow("Video",WINDOW_NORMAL); imshow("Video",im3); // add 1second/framerate time for next loop pause nextFrameTimestamp = nextFrameTimestamp + microsec(1000000/framerate); // reset time_duration so while loop engages td = (currentFrameTimestamp - nextFrameTimestamp); // cout<< (td) <<" "<<endl; //determine and print out delay in ms, should be less than 1000/FPS //occasionally, if delay is larger than said value, correction will occur //if delay is consistently larger than said value, then CPU is not powerful // enough to capture/decompress/record/compress that fast. finalLoopTimestamp = microsec_clock::local_time(); td1 = (finalLoopTimestamp - initialLoopTimestamp); delayFound = td1.total_milliseconds(); //cout << delayFound << endl; //output will be in following format //[TIMESTAMP OF PREVIOUS FRAME] [TIMESTAMP OF NEW FRAME] [TIME DELAY OF WRITING] // video1<<im3; //DWORD dwRead; //BOOL fWaitingOnRead = FALSE; //OVERLAPPED osReader = {0}; // Create the overlapped event. Must be closed before exiting // to avoid a handle leak. //osReader.hEvent = CreateEvent(NULL, TRUE, FALSE, NULL); //if (osReader.hEvent == NULL) // Error creating overlapped event; abort. //if (!fWaitingOnRead) { // Issue read operation. //if (!ReadFile(hSerial, lpBuf, 1, &dwRead, &osReader)) { //if (GetLastError() != ERROR_IO_PENDING) ; // read not delayed? // Error in communications; report it. //else // fWaitingOnRead = TRUE; //} //else { // read completed immediately // ReadFile(hSerial, lpBuf, 1, &dwRead, &osReader); //HandleASuccessfulRead(lpBuf, dwRead); //cout<<"Arduino-Read: "<<lpBuf[0]<<" ->"<<"Excitation number"<<lpBuf[0]<<endl; //} //} elapsed = GetTickCount() - start; //cout<<"time elapsed ="<<elapsed/1000<<"secons"<<endl; cout<<"time left for test to end = "<<39-(elapsed/1000)<<endl; key2 = waitKey(100); //Capture Keyboard stroke if (char(key2) == 27 || elapsed==39000 ) { cout<<"Test is complete"<<endl; break; //Ijf you hit ESC key loop will break and code will terminate || lpBuf[0]=='9' } } CloseHandle(hSerial); //Ending communication with arduino elapsed = GetTickCount() - start; cout<<"for the "<<i<<"th frame "<<elapsed/1000<<" seconds are required"<<endl; cout<<"Therefore the framerate is "<<i/(elapsed/1000)<<endl; capture.release(); capture1.release(); return 0; }
bool s_model::load() { FileStorage fs; fs.open(fname, FileStorage::READ); if(!fs.isOpened()){ return false; } FileNode fn; fn = fs["ModelName"]; string nameModel; if(fn.empty()){ cerr << "Cannot find node ModelName." << endl; return false; } fn >> name; // decode the model name. Some names are reserved for specific models // If model head is "chsbd", the name should have the format "chsbd_<x>_<y>_<p>". Here <x>, <y> are non-negative integer, <p> is fixed point number. if(par_chsbd.parse(name.c_str(), type, pts, edges)){ pts_deformed.resize(pts.size()); for(int i = 0; i < pts.size(); i++) pts_deformed[i] = pts[i]; calc_bounds(); return true; } int numPoints; fn = fs["NumPoints"]; if(fn.empty()){ cerr << "Cannot find node NumPoints." << endl; return false; } fn >> numPoints; int numEdges; fn = fs["NumEdges"]; if(fn.empty()){ cerr << "Cannot find node NumEdges." << endl; return false; } fn >> numEdges; int numParts; fn = fs["NumParts"]; if(fn.empty()){ cerr << "Cannot find node NumParts" << endl; } fn >> numParts; fn = fs["Points"]; if(fn.empty()){ cerr << "Cannot find node Points." << endl; return false; } char buf[64]; pts.resize(numPoints); pts_deformed.resize(numPoints); for(int ip = 0; ip < numPoints; ip++){ snprintf(buf, 63, "Point%05d", ip); FileNode fpt = fn[(const char*)buf]; if(fpt.empty()){ cerr << "Cannot find node " << buf << "." << endl; return false; } fpt["x"] >> pts[ip].x; fpt["y"] >> pts[ip].y; fpt["z"] >> pts[ip].z; pts_deformed[ip] = pts[ip]; } fn = fs["Edges"]; if(fn.empty()){ cerr << "Cannot find node Edges." << endl; return false; } edges.resize(numEdges); for(int ie =0; ie < numEdges; ie++){ snprintf(buf, 63, "Edge%05d", ie); FileNode fe = fn[(const char*)buf]; if(fe.empty()){ cerr << "Cannot find node " << buf << "." << endl; return false; } fe["s"] >> edges[ie].s; fe["e"] >> edges[ie].e; } fn = fs["Parts"]; parts.resize(numParts); for(int ipart = 0; ipart < numParts; ipart++){ snprintf(buf, 63, "Part%05d", ipart); FileNode fpart = fn[(const char*)buf]; if(fpart.empty()){ cerr << "Cannot find part " << buf << "." << endl; return false; } FileNode fpts = fpart["pts"]; if(fpts.empty()){ cerr << "Cannot find points in " << buf << "." << endl; return false; } FileNodeIterator itr = fpts.begin(); for(; itr != fpts.end(); itr++){ int val; *itr >> val; parts[ipart].pts.push_back(val); } FileNode faxis = fpart["axis"]; if(faxis.empty()){ cerr << "Cannot find axis in " << buf << "." << endl; return false; } Point3f & axis = parts[ipart].axis; faxis["x"] >> axis.x; faxis["y"] >> axis.y; faxis["z"] >> axis.z; FileNode ftrn = fpart["trn"]; if(ftrn.empty()){ cerr << "Cannot find trn in " << buf << "." << endl; return false; } ftrn >> parts[ipart].trn; FileNode frot = fpart["rot"]; if(frot.empty()){ cerr << "Cannot find rot in " << buf << "." << endl; return false; } frot >> parts[ipart].rot; if(parts[ipart].rot){ FileNode forg = fpart["org"]; if(forg.empty()){ cerr << "Cannot find org in " << buf << "." << endl; return false; } forg >> parts[ipart].org; } }
void TrajectoryFrames::load(const std::string& dirname) { cout << "TrajectoryFrames::load" << endl; clear(); vector<string> frameIndices; readFrameIndices(dirname, frameIndices); if(frameIndices.empty()) { cout << "Can not load the data from given directory of the base: " << dirname << endl; return; } frames.resize(frameIndices.size()); objectMasks.resize(frameIndices.size()); poses.resize(frameIndices.size()); #pragma omp parallel for for(size_t i = 0; i < frameIndices.size(); i++) { Mat image, depth; loadFrameData(dirname, frameIndices[i], image, depth); CV_Assert(!image.empty()); CV_Assert(!depth.empty()); Mat mask, objectMask; mask = imread(dirname + "/mask_" + frameIndices[i] + ".png", 0); objectMask = imread(dirname + "/object_mask_" + frameIndices[i] + ".png", 0); CV_Assert(!mask.empty()); CV_Assert(!objectMask.empty()); Mat normals; { FileStorage fs(dirname + "/normals_" + frameIndices[i] + ".xml.gz", FileStorage::READ); CV_Assert(fs.isOpened()); fs["normals"] >> normals; CV_Assert(!normals.empty()); } Mat pose; { FileStorage fs(dirname + "/pose" + frameIndices[i] + ".xml.gz", FileStorage::READ); CV_Assert(fs.isOpened()); fs["pose"] >> pose; CV_Assert(!pose.empty()); } Ptr<RgbdFrame> frame = new RgbdFrame(image, depth, mask, normals, atoi(frameIndices[i].c_str())); frames[i] = frame; objectMasks[i] = objectMask; poses[i] = pose; } resumeFrameState = TrajectoryFrames::KEYFRAME; frameStates.resize(frames.size(), TrajectoryFrames::KEYFRAME); FileStorage fs(dirname + "/poseLinks.xml.gz", FileStorage::READ); CV_Assert(fs.isOpened()); FileNode fn = fs["poseLinks"]; FileNodeIterator fnIt = fn.begin(), fnEnd = fn.end(); for(; fnIt != fnEnd; ++fnIt) { int srcIndex = -1, dstIndex = -1; Mat Rt; (*fnIt)["srcIndex"] >> srcIndex; (*fnIt)["dstIndex"] >> dstIndex; (*fnIt)["Rt"] >> Rt; CV_Assert(srcIndex >= 0); CV_Assert(dstIndex >= 0); keyframePosesLinks.push_back(PosesLink(srcIndex, dstIndex, Rt)); } }
bool loadGraph(const std::string graph_file) { LOG(INFO) << "loading graph " << graph_file; FileStorage fs; fs.open(graph_file, FileStorage::READ); if (!fs.isOpened()) { LOG(ERROR) << "couldn't open " << graph_file; return false; } FileNode nd = fs["nodes"]; if (nd.type() != FileNode::SEQ) { LOG(ERROR) << "no nodes"; return false; } for (FileNodeIterator it = nd.begin(); it != nd.end(); ++it) { string type_id = (*it)["typeid"]; string name; (*it)["name"] >> name; cv::Point loc; loc.x = (*it)["loc"][0]; loc.y = (*it)["loc"][1]; bool enable; (*it)["enable"] >> enable; Node* node; if (type_id.compare("bm::Webcam") == 0) { // TBD make a version of getNode that takes a type_id string Webcam* cam_in = getNode<Webcam>(name, loc); node = cam_in; test_im = cam_in->getImage("out").clone(); test_im = cv::Scalar(200,200,200); } else if (type_id.compare("bm::ScreenCap") == 0) { node = getNode<ScreenCap>(name, loc); node->update(); } else if (type_id.compare("bm::ImageNode") == 0) { node = getNode<ImageNode>(name, loc); } else if (type_id.compare("bm::Sobel") == 0) { node = getNode<Sobel>(name, loc); } else if (type_id.compare("bm::GaussianBlur") == 0) { node = getNode<GaussianBlur>(name, loc); } else if (type_id.compare("bm::Buffer") == 0) { node = getNode<Buffer>(name, loc); } else if (type_id.compare("bm::ImageDir") == 0) { node = getNode<ImageDir>(name, loc); } else if (type_id.compare("bm::Add") == 0) { node = getNode<Add>(name, loc); } else if (type_id.compare("bm::Multiply") == 0) { node = getNode<Multiply>(name, loc); } else if (type_id.compare("bm::AbsDiff") == 0) { node = getNode<AbsDiff>(name, loc); } else if (type_id.compare("bm::Greater") == 0) { node = getNode<Greater>(name, loc); } else if (type_id.compare("bm::Resize") == 0) { node = getNode<Resize>(name, loc); } else if (type_id.compare("bm::Flip") == 0) { node = getNode<Flip>(name, loc); } else if (type_id.compare("bm::Rot2D") == 0) { node = getNode<Rot2D>(name, loc); } else if (type_id.compare("bm::Signal") == 0) { node = getNode<Signal>(name, loc); } else if (type_id.compare("bm::Saw") == 0) { node = getNode<Saw>(name, loc); } else if (type_id.compare("bm::Tap") == 0) { node = getNode<Tap>(name, loc); } else if (type_id.compare("bm::TapInd") == 0) { node = getNode<TapInd>(name, loc); } else if (type_id.compare("bm::Bezier") == 0) { node = getNode<Bezier>(name, loc); } else if (type_id.compare("bm::Random") == 0) { node = getNode<Random>(name, loc); } else if (type_id.compare("bm::Mouse") == 0) { node = getNode<Mouse>(name, loc); input_node = (Mouse*) node; if (output_node) { input_node->display = output_node->display; input_node->win = output_node->win; input_node->opcode = output_node->opcode; } } else if (type_id.compare("bm::Output") == 0) { node = getNode<Output>(name, loc); output_node = (Output*)node; output_node->setup(Config::inst()->out_width, Config::inst()->out_height); // TBD need better way to share X11 info- Config probably if (input_node) { input_node->display = output_node->display; input_node->win = output_node->win; input_node->opcode = output_node->opcode; } } else { LOG(WARNING) << "unknown node type " << type_id << ", assuming imageNode"; node = getNode<ImageNode>(name, loc); } if (dynamic_cast<ImageNode*>(node)) { (dynamic_cast<ImageNode*> (node))->setImage("out", test_im); } node->load(it); if (name == "output") { output_node = (Output*)node; cv::Mat tmp; node->setImage("in", tmp); } LOG(INFO) << type_id << " " << CLTXT << name << CLVAL << " " << node << " " << loc << " " << enable << CLNRM; int ind; (*it)["ind"] >> ind; LOG(INFO) << CLTXT << "first pass inputs " << CLVAL << ind << CLNRM << " " << node->name; for (int i = 0; i < (*it)["inputs"].size(); i++) { int type; string port; (*it)["inputs"][i]["type"] >> type; (*it)["inputs"][i]["name"] >> port; LOG(INFO) << "input " << ind << " \"" << node->name << "\", type " << type << " " << port; // TBD make function for this /* conType con_type = NONE; if (type == "ImageNode") con_type = IMAGE; if (type == "ImageOut") con_type = IMAGE; if (type == "Signal") con_type = SIGNAL; if (type == "Buffer") con_type = BUFFER; */ node->setInputPort((conType)type, port, NULL, ""); } } // second pass for inputs (the first pass was necessary to create them // all in right order LOG(INFO) << "second pass inputs"; for (FileNodeIterator it = nd.begin(); it != nd.end(); ++it) { int ind; (*it)["ind"] >> ind; LOG(INFO) << "second pass inputs " << ind << " " << CLTXT << all_nodes[ind]->name << CLNRM; for (int i = 0; i < (*it)["inputs"].size(); i++) { int input_ind; int type; string port; string src_port; float value; (*it)["inputs"][i]["type"] >> type; (*it)["inputs"][i]["name"] >> port; (*it)["inputs"][i]["src_ind"] >> input_ind; (*it)["inputs"][i]["src_port"] >> src_port; (*it)["inputs"][i]["value"] >> value; if (input_ind >= 0) { LOG(INFO) << "input " << " " << input_ind << ", type " << type << " " << port << " " << input_ind << " " << src_port; all_nodes[ind]->setInputPort((conType)type, port, all_nodes[input_ind], src_port); } // input_ind > 0 if (type == SIGNAL) { all_nodes[ind]->setSignal(port, value); } } } // second input pass if (output_node == NULL) { LOG(WARNING) << CLWRN << "No output node found, setting it to " << all_nodes[all_nodes.size() - 1]->name << CLNRM; // TBD could make sure that this node is an output node output_node = (Output*) all_nodes[all_nodes.size() - 1]; } LOG(INFO) << all_nodes.size() << " nodes total"; //output_node->loc = cv::Point2f(graph.cols - (test_im.cols/2+100), 20); } // loadGraph
int main(int argc, char **argv) { //READ camera calibration parameters cout << endl << "Reading camera params file: "<< endl; const string inputSettingsFile = "camera_cal.yml"; FileStorage fs; fs.open(inputSettingsFile, FileStorage::READ); // Read the settings int itNr; fs["iterationNr"] >> itNr; itNr = (int) fs["iterationNr"]; cout << itNr; if (!fs.isOpened()) { cerr << "Failed to open " << inputSettingsFile << endl; //help(av); return 1; } FileNode n = fs["strings"]; // Read string sequence - Get node /*if (n.type() != FileNode::SEQ) { cerr << "strings is not a sequence! FAIL" << endl; return 1; }*/ FileNodeIterator itt = n.begin(), itt_end = n.end(); // Go through the node for (; itt != itt_end; ++itt) cout << (string)*itt << endl; //int image_width, int image_width,image_height; Mat cameraMatrix, distortion_coefficients, rectification_matrix, projection_matrix; cout << "Debug" << endl; fs["image_width"] >> image_width; fs["image_height"] >> image_height; fs["camera_matrix"] >> cameraMatrix; // Read cv::Mat fs["distortion_coefficients"] >> distortion_coefficients; fs["rectification_matrix"] >> rectification_matrix; fs["projection_matrix"] >> projection_matrix; cout << "image_width = " << image_width << endl << "image_height = " << image_height << endl << "cameraMatrix = " << endl << cameraMatrix << endl << "distortion_coefficients = " << endl << distortion_coefficients << endl << "rectification_matrix = " << endl << rectification_matrix << endl << "projection_matrix = " << endl << projection_matrix << endl; ///// END READIN GOF YAML FILE /////////// ros::init(argc, argv, "image_listener"); // Create ROS generic subscriber (or publisher) ros::NodeHandle nh; // Create OpenCV display window cv::namedWindow("Undistorted Image"); cv::namedWindow("view"); cv::startWindowThread(); //Initialize ImageTransport instance with NodeHandle image_transport::ImageTransport it(nh); //Subscribe to the camera/image base topic. Call imageCallback when new image arrives, with a queue size of 1. image_transport::Subscriber sub=it.subscribe("camera/image_raw", 1,imageCallback); //Process image: if (cv_ptr->image.rows>60 && cv_ptr->image.cols > 60) cv::circle(cv_ptr->image,cv::Point(50,50),10,CV_RGB(255,0,0)); //Display image in window: //cv::imshow("Undistorted Image", cv_ptr->image); //cv::imshow("Undistorted Image",imageRect); ros::spin(); cv::destroyWindow("view"); cv::destroyWindow("Undistorted Image"); }
int main( int argc, const char** argv ) { CommandLineParser parser(argc, argv, "{ help h usage ? | | show this message }" "{ image i | | (required) path to reference image }" "{ model m | | (required) path to cascade xml file }" "{ data d | | (optional) path to video output folder }" ); // Read in the input arguments if (parser.has("help")){ parser.printMessage(); printLimits(); return 0; } string model(parser.get<string>("model")); string output_folder(parser.get<string>("data")); string image_ref = (parser.get<string>("image")); if (model.empty() || image_ref.empty()){ parser.printMessage(); printLimits(); return -1; } // Value for timing // You can increase this to have a better visualisation during the generation int timing = 1; // Value for cols of storing elements int cols_prefered = 5; // Open the XML model FileStorage fs; bool model_ok = fs.open(model, FileStorage::READ); if (!model_ok){ cerr << "the cascade file '" << model << "' could not be loaded." << endl; return -1; } // Get a the required information // First decide which feature type we are using FileNode cascade = fs["cascade"]; string feature_type = cascade["featureType"]; bool haar = false, lbp = false; if (feature_type.compare("HAAR") == 0){ haar = true; } if (feature_type.compare("LBP") == 0){ lbp = true; } if ( feature_type.compare("HAAR") != 0 && feature_type.compare("LBP")){ cerr << "The model is not an HAAR or LBP feature based model!" << endl; cerr << "Please select a model that can be visualized by the software." << endl; return -1; } // We make a visualisation mask - which increases the window to make it at least a bit more visible int resize_factor = 10; int resize_storage_factor = 10; Mat reference_image = imread(image_ref, IMREAD_GRAYSCALE ); if (reference_image.empty()){ cerr << "the reference image '" << image_ref << "'' could not be loaded." << endl; return -1; } Mat visualization; resize(reference_image, visualization, Size(reference_image.cols * resize_factor, reference_image.rows * resize_factor)); // First recover for each stage the number of weak features and their index // Important since it is NOT sequential when using LBP features vector< vector<int> > stage_features; FileNode stages = cascade["stages"]; FileNodeIterator it_stages = stages.begin(), it_stages_end = stages.end(); int idx = 0; for( ; it_stages != it_stages_end; it_stages++, idx++ ){ vector<int> current_feature_indexes; FileNode weak_classifiers = (*it_stages)["weakClassifiers"]; FileNodeIterator it_weak = weak_classifiers.begin(), it_weak_end = weak_classifiers.end(); vector<int> values; for(int idy = 0; it_weak != it_weak_end; it_weak++, idy++ ){ (*it_weak)["internalNodes"] >> values; current_feature_indexes.push_back( (int)values[2] ); } stage_features.push_back(current_feature_indexes); } // If the output option has been chosen than we will store a combined image plane for // each stage, containing all weak classifiers for that stage. bool draw_planes = false; stringstream output_video; output_video << output_folder << "model_visualization.avi"; VideoWriter result_video; if( output_folder.compare("") != 0 ){ draw_planes = true; result_video.open(output_video.str(), VideoWriter::fourcc('X','V','I','D'), 15, Size(reference_image.cols * resize_factor, reference_image.rows * resize_factor), false); } if(haar){ // Grab the corresponding features dimensions and weights FileNode features = cascade["features"]; vector< vector< rect_data > > feature_data; FileNodeIterator it_features = features.begin(), it_features_end = features.end(); for(int idf = 0; it_features != it_features_end; it_features++, idf++ ){ vector< rect_data > current_feature_rectangles; FileNode rectangles = (*it_features)["rects"]; int nrects = (int)rectangles.size(); for(int k = 0; k < nrects; k++){ rect_data current_data; FileNode single_rect = rectangles[k]; current_data.x = (int)single_rect[0]; current_data.y = (int)single_rect[1]; current_data.w = (int)single_rect[2]; current_data.h = (int)single_rect[3]; current_data.weight = (float)single_rect[4]; current_feature_rectangles.push_back(current_data); } feature_data.push_back(current_feature_rectangles); } // Loop over each possible feature on its index, visualise on the mask and wait a bit, // then continue to the next feature. // If visualisations should be stored then do the in between calculations Mat image_plane; Mat metadata = Mat::zeros(150, 1000, CV_8UC1); vector< rect_data > current_rects; for(int sid = 0; sid < (int)stage_features.size(); sid ++){ if(draw_planes){ int features_nmbr = (int)stage_features[sid].size(); int cols = cols_prefered; int rows = features_nmbr / cols; if( (features_nmbr % cols) > 0){ rows++; } image_plane = Mat::zeros(reference_image.rows * resize_storage_factor * rows, reference_image.cols * resize_storage_factor * cols, CV_8UC1); } for(int fid = 0; fid < (int)stage_features[sid].size(); fid++){ stringstream meta1, meta2; meta1 << "Stage " << sid << " / Feature " << fid; meta2 << "Rectangles: "; Mat temp_window = visualization.clone(); Mat temp_metadata = metadata.clone(); int current_feature_index = stage_features[sid][fid]; current_rects = feature_data[current_feature_index]; Mat single_feature = reference_image.clone(); resize(single_feature, single_feature, Size(), resize_storage_factor, resize_storage_factor); for(int i = 0; i < (int)current_rects.size(); i++){ rect_data local = current_rects[i]; if(draw_planes){ if(local.weight >= 0){ rectangle(single_feature, Rect(local.x * resize_storage_factor, local.y * resize_storage_factor, local.w * resize_storage_factor, local.h * resize_storage_factor), Scalar(0), FILLED); }else{ rectangle(single_feature, Rect(local.x * resize_storage_factor, local.y * resize_storage_factor, local.w * resize_storage_factor, local.h * resize_storage_factor), Scalar(255), FILLED); } } Rect part(local.x * resize_factor, local.y * resize_factor, local.w * resize_factor, local.h * resize_factor); meta2 << part << " (w " << local.weight << ") "; if(local.weight >= 0){ rectangle(temp_window, part, Scalar(0), FILLED); }else{ rectangle(temp_window, part, Scalar(255), FILLED); } } imshow("features", temp_window); putText(temp_window, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); result_video.write(temp_window); // Copy the feature image if needed if(draw_planes){ single_feature.copyTo(image_plane(Rect(0 + (fid%cols_prefered)*single_feature.cols, 0 + (fid/cols_prefered) * single_feature.rows, single_feature.cols, single_feature.rows))); } putText(temp_metadata, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); putText(temp_metadata, meta2.str(), Point(15,40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); imshow("metadata", temp_metadata); waitKey(timing); } //Store the stage image if needed if(draw_planes){ stringstream save_location; save_location << output_folder << "stage_" << sid << ".png"; imwrite(save_location.str(), image_plane); } } } if(lbp){ // Grab the corresponding features dimensions and weights FileNode features = cascade["features"]; vector<Rect> feature_data; FileNodeIterator it_features = features.begin(), it_features_end = features.end(); for(int idf = 0; it_features != it_features_end; it_features++, idf++ ){ FileNode rectangle = (*it_features)["rect"]; Rect current_feature ((int)rectangle[0], (int)rectangle[1], (int)rectangle[2], (int)rectangle[3]); feature_data.push_back(current_feature); } // Loop over each possible feature on its index, visualise on the mask and wait a bit, // then continue to the next feature. Mat image_plane; Mat metadata = Mat::zeros(150, 1000, CV_8UC1); for(int sid = 0; sid < (int)stage_features.size(); sid ++){ if(draw_planes){ int features_nmbr = (int)stage_features[sid].size(); int cols = cols_prefered; int rows = features_nmbr / cols; if( (features_nmbr % cols) > 0){ rows++; } image_plane = Mat::zeros(reference_image.rows * resize_storage_factor * rows, reference_image.cols * resize_storage_factor * cols, CV_8UC1); } for(int fid = 0; fid < (int)stage_features[sid].size(); fid++){ stringstream meta1, meta2; meta1 << "Stage " << sid << " / Feature " << fid; meta2 << "Rectangle: "; Mat temp_window = visualization.clone(); Mat temp_metadata = metadata.clone(); int current_feature_index = stage_features[sid][fid]; Rect current_rect = feature_data[current_feature_index]; Mat single_feature = reference_image.clone(); resize(single_feature, single_feature, Size(), resize_storage_factor, resize_storage_factor); // VISUALISATION // The rectangle is the top left one of a 3x3 block LBP constructor Rect resized(current_rect.x * resize_factor, current_rect.y * resize_factor, current_rect.width * resize_factor, current_rect.height * resize_factor); meta2 << resized; // Top left rectangle(temp_window, resized, Scalar(255), 1); // Top middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y, resized.width, resized.height), Scalar(255), 1); // Top right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y, resized.width, resized.height), Scalar(255), 1); // Middle left rectangle(temp_window, Rect(resized.x, resized.y + resized.height, resized.width, resized.height), Scalar(255), 1); // Middle middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y + resized.height, resized.width, resized.height), Scalar(255), FILLED); // Middle right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y + resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom left rectangle(temp_window, Rect(resized.x, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); if(draw_planes){ Rect resized_inner(current_rect.x * resize_storage_factor, current_rect.y * resize_storage_factor, current_rect.width * resize_storage_factor, current_rect.height * resize_storage_factor); // Top left rectangle(single_feature, resized_inner, Scalar(255), 1); // Top middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y, resized_inner.width, resized_inner.height), Scalar(255), 1); // Top right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y, resized_inner.width, resized_inner.height), Scalar(255), 1); // Middle left rectangle(single_feature, Rect(resized_inner.x, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Middle middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), FILLED); // Middle right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom left rectangle(single_feature, Rect(resized_inner.x, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); single_feature.copyTo(image_plane(Rect(0 + (fid%cols_prefered)*single_feature.cols, 0 + (fid/cols_prefered) * single_feature.rows, single_feature.cols, single_feature.rows))); } putText(temp_metadata, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); putText(temp_metadata, meta2.str(), Point(15,40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); imshow("metadata", temp_metadata); imshow("features", temp_window); putText(temp_window, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); result_video.write(temp_window); waitKey(timing); } //Store the stage image if needed if(draw_planes){ stringstream save_location; save_location << output_folder << "stage_" << sid << ".png"; imwrite(save_location.str(), image_plane); } } } return 0; }
bool CascadeClassifier::Data::read(const FileNode &root) { static const float THRESHOLD_EPS = 1e-5f; // load stage params String stageTypeStr = (String)root[CC_STAGE_TYPE]; if( stageTypeStr == CC_BOOST ) stageType = BOOST; else return false; String featureTypeStr = (String)root[CC_FEATURE_TYPE]; if( featureTypeStr == CC_HAAR ) featureType = FeatureEvaluator::HAAR; else if( featureTypeStr == CC_LBP ) featureType = FeatureEvaluator::LBP; else if( featureTypeStr == CC_HOG ) featureType = FeatureEvaluator::HOG; else return false; origWinSize.width = (int)root[CC_WIDTH]; origWinSize.height = (int)root[CC_HEIGHT]; CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false; // load feature params FileNode fn = root[CC_FEATURE_PARAMS]; if( fn.empty() ) return false; ncategories = fn[CC_MAX_CAT_COUNT]; int subsetSize = (ncategories + 31)/32, nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); // load stages fn = root[CC_STAGES]; if( fn.empty() ) return false; stages.reserve(fn.size()); classifiers.clear(); nodes.clear(); FileNodeIterator it = fn.begin(), it_end = fn.end(); for( int si = 0; it != it_end; si++, ++it ) { FileNode fns = *it; Stage stage; stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS; fns = fns[CC_WEAK_CLASSIFIERS]; if(fns.empty()) return false; stage.ntrees = (int)fns.size(); stage.first = (int)classifiers.size(); stages.push_back(stage); classifiers.reserve(stages[si].first + stages[si].ntrees); FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); for( ; it1 != it1_end; ++it1 ) // weak trees { FileNode fnw = *it1; FileNode internalNodes = fnw[CC_INTERNAL_NODES]; FileNode leafValues = fnw[CC_LEAF_VALUES]; if( internalNodes.empty() || leafValues.empty() ) return false; DTree tree; tree.nodeCount = (int)internalNodes.size()/nodeStep; classifiers.push_back(tree); nodes.reserve(nodes.size() + tree.nodeCount); leaves.reserve(leaves.size() + leafValues.size()); if( subsetSize > 0 ) subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); for( ; internalNodesIter != internalNodesEnd; ) // nodes { DTreeNode node; node.left = (int)*internalNodesIter; ++internalNodesIter; node.right = (int)*internalNodesIter; ++internalNodesIter; node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; if( subsetSize > 0 ) { for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) subsets.push_back((int)*internalNodesIter); node.threshold = 0.f; } else { node.threshold = (float)*internalNodesIter; ++internalNodesIter; } nodes.push_back(node); } internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves leaves.push_back((float)*internalNodesIter); } } return true; }
int main(int ac, char** av) { if (ac != 2) { help(av); return 1; } string filename = av[1]; //write { FileStorage fs(filename, FileStorage::WRITE); cout << "writing images\n"; fs << "images" << "["; fs << "image1.jpg" << "myfi.png" << "baboon.jpg"; cout << "image1.jpg" << " myfi.png" << " baboon.jpg" << endl; fs << "]"; cout << "writing mats\n"; Mat R =Mat_<double>::eye(3, 3),T = Mat_<double>::zeros(3, 1); cout << "R = " << R << "\n"; cout << "T = " << T << "\n"; fs << "R" << R; fs << "T" << T; cout << "writing MyData struct\n"; MyData m(1); fs << "mdata" << m; cout << m << endl; } //read { FileStorage fs(filename, FileStorage::READ); if (!fs.isOpened()) { cerr << "failed to open " << filename << endl; help(av); return 1; } FileNode n = fs["images"]; if (n.type() != FileNode::SEQ) { cerr << "images is not a sequence! FAIL" << endl; return 1; } cout << "reading images\n"; FileNodeIterator it = n.begin(), it_end = n.end(); for (; it != it_end; ++it) { cout << (string)*it << "\n"; } Mat R, T; cout << "reading R and T" << endl; fs["R"] >> R; fs["T"] >> T; cout << "R = " << R << "\n"; cout << "T = " << T << endl; MyData m; fs["mdata"] >> m; cout << "read mdata\n"; cout << m << endl; cout << "attempting to read mdata_b\n"; //Show default behavior for empty matrix fs["mdata_b"] >> m; cout << "read mdata_b\n"; cout << m << endl; } cout << "Try opening " << filename << " to see the serialized data." << endl; return 0; }
int main(int ac, char** av) { if (ac != 2) { help(av); return 1; } string filename = av[1]; { //write Mat R = Mat_<uchar>::eye(3, 3), T = Mat_<double>::zeros(3, 1); MyData m(1); FileStorage fs(filename, FileStorage::WRITE); fs << "iterationNr" << 100; fs << "strings" << "["; // text - string sequence fs << "image1.jpg" << "Awesomeness" << "../data/baboon.jpg"; fs << "]"; // close sequence fs << "Mapping"; // text - mapping fs << "{" << "One" << 1; fs << "Two" << 2 << "}"; fs << "R" << R; // cv::Mat fs << "T" << T; fs << "MyData" << m; // your own data structures fs.release(); // explicit close cout << "Write Done." << endl; } {//read cout << endl << "Reading: " << endl; FileStorage fs; fs.open(filename, FileStorage::READ); int itNr; //fs["iterationNr"] >> itNr; itNr = (int) fs["iterationNr"]; cout << itNr; if (!fs.isOpened()) { cerr << "Failed to open " << filename << endl; help(av); return 1; } FileNode n = fs["strings"]; // Read string sequence - Get node if (n.type() != FileNode::SEQ) { cerr << "strings is not a sequence! FAIL" << endl; return 1; } FileNodeIterator it = n.begin(), it_end = n.end(); // Go through the node for (; it != it_end; ++it) cout << (string)*it << endl; n = fs["Mapping"]; // Read mappings from a sequence cout << "Two " << (int)(n["Two"]) << "; "; cout << "One " << (int)(n["One"]) << endl << endl; MyData m; Mat R, T; fs["R"] >> R; // Read cv::Mat fs["T"] >> T; fs["MyData"] >> m; // Read your own structure_ cout << endl << "R = " << R << endl; cout << "T = " << T << endl << endl; cout << "MyData = " << endl << m << endl << endl; //Show default behavior for non existing nodes cout << "Attempt to read NonExisting (should initialize the data structure with its default)."; fs["NonExisting"] >> m; cout << endl << "NonExisting = " << endl << m << endl; } cout << endl << "Tip: Open up " << filename << " with a text editor to see the serialized data." << endl; return 0; }
int main(int argc, char ** argv) { initFeatures(); initClassifiers(); initDataProviders(); initNonMaximumSuppressors(); const string commandLineKeys = "{h|help|false|show help and exit}" "{c|config||.xml or .yml file containing detector " "configuration parameters, i.e. features to used " "and thier parameters, classifier type " "and params of the training algorithm, " "general detection parameters}" "{|pos||path to annotation with positive examples}" "{|posname|annotation|name of the positive annotation file node to read from}" "{|neg||path to annotation with negative examples}" "{|negname|annotation|name of the negative annotation file node to read from}" "{|dump||prefix of files to save dataset to}" "{|rand|50|number of random samples to draw from each negative image}" "{|bi|3|number of bootstrap iterations}" "{|samples|0|number of samples to draw from all false detections at each iteration. All false positives are used by default}" "{|mirror|true|use horizontaly flip positives for training}" "{|mem|0|preallocate memory for specified " "number of samples in the dataset. " "If more samples are needed dataset resize " "is performed that may lead to additional memory usage}" "{|seed||seed to initialize RNG}"; CommandLineParser cmdParser(argc, argv, commandLineKeys.c_str()); if (cmdParser.get<bool>("help")) { cmdParser.printParams(); return 0; } if (cmdParser.get<string>("seed") == "") { theRNG() = RNG(time(0)); } else { theRNG() = RNG(cmdParser.get<uint64>("seed")); } cout << "rng state: " << theRNG().state << endl; FileStorage config(cmdParser.get<string>("config"), FileStorage::READ); CV_Assert(config.isOpened()); // read detection parameters DetectionParams detectorParams; FileNode detectorParamsFn = config["detector_params"]; if (detectorParamsFn.empty()) { cout << "Error. detection_params tag is missed in cofig file" << endl; return 2; } cout << "reading general detector parameters..." << flush; detectorParamsFn >> detectorParams; cout << "done" << endl; // configure feature descriptors Features features; FileNode featuresFn = config["features"]; for (FileNodeIterator i = featuresFn.begin(); i != featuresFn.end(); ++i) { FileNode featureType = (*i)["name"]; CV_Assert(!featureType.empty()); string featureName; featureType >> featureName; Ptr<Feature> feature = Algorithm::create<Feature>(featureName); feature->read(*i); features.featuresSet.push_back(feature); cout << featureName << " feature is used" << endl; } // configure classifier FileNode classifierFn = config["classifier"]; FileNode classifierType = classifierFn["name"]; CV_Assert(!classifierType.empty()); string classifierName; classifierType >> classifierName; Ptr<Classifier> classifier = Algorithm::create<Classifier>(classifierName); classifier->read(classifierFn); cout << classifierName << " classifier is used" << endl; string classifierModelFile = classifierFn["modelFileName"]; string classifierModelName = classifierFn["modelName"]; // configure nonmaximum suppressor FileNode nmsFn = config["nonmaximum_suppressor"]; Ptr<NonMaximumSuppressor> nms = 0; if (!nmsFn.empty()) { FileNode nmsType = nmsFn["name"]; CV_Assert(!nmsType.empty()); string nmsName; nmsType >> nmsName; nms = Algorithm::create<NonMaximumSuppressor>(nmsName); nms->read(nmsFn); cout << nmsName << " nonmaximum suppressor is used" << endl; }